hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4ac34a1ae96f54d39aec3c77c977b49d09a2095f
| 63,395
|
py
|
Python
|
src/algebraicaxioms/python/src/Test.py
|
unsat/d
|
c986b5f486eaf2394fbf659dba09c6f522291386
|
[
"MIT"
] | 14
|
2020-06-17T21:05:01.000Z
|
2021-12-20T03:44:42.000Z
|
src/algebraicaxioms/python/src/Test.py
|
dynaroars/dig
|
a6a4d045182d627d53c9970f8a77340f6f6b7afb
|
[
"MIT"
] | 31
|
2020-03-23T14:16:45.000Z
|
2022-01-10T02:49:00.000Z
|
src/algebraicaxioms/python/src/Test.py
|
dynaroars/dig
|
a6a4d045182d627d53c9970f8a77340f6f6b7afb
|
[
"MIT"
] | 9
|
2020-06-22T11:22:29.000Z
|
2021-12-10T13:28:48.000Z
|
# testing Any_0 = lpop(lappend(Any_List_0, Any_0))
def t_0_0_0_0():
Any_00 = -11
Any_List_00 = []
Any_01 = -11
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_0))
def t_0_0_0_1():
Any_00 = -41
Any_List_00 = [-12, 45]
Any_01 = -41
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_0))
def t_0_0_0_2():
Any_00 = -30
Any_List_00 = [22, -18, -34]
Any_01 = -30
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_0))
def t_0_0_0_3():
Any_00 = -49
Any_List_00 = [-46, 25, -23, 22]
Any_01 = -49
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_0))
def t_0_0_0_4():
Any_00 = 8
Any_List_00 = [49]
Any_01 = 8
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_0))
def t_0_0_0_5():
Any_00 = 40
Any_List_00 = [15, -46, -2, -25]
Any_01 = 40
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_0))
def t_0_0_0_6():
Any_00 = -6
Any_List_00 = []
Any_01 = -6
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_0))
def t_0_0_0_7():
Any_00 = -24
Any_List_00 = [36, 5, 25, -26]
Any_01 = -24
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_0))
def t_0_0_0_8():
Any_00 = 13
Any_List_00 = []
Any_01 = 13
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_0))
def t_0_0_0_9():
Any_00 = 35
Any_List_00 = [-13, 14, 13]
Any_01 = 35
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_0_0_0():
try:
return (t_0_0_0_0() and t_0_0_0_1() and t_0_0_0_2() and t_0_0_0_3() and t_0_0_0_4() and t_0_0_0_5() and t_0_0_0_6() and t_0_0_0_7() and t_0_0_0_8() and t_0_0_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lappend(Any_List_0, Any_1))
def t_0_1_0_0():
Any_00 = -48
Any_List_00 = []
Any_10 = -9
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_1))
def t_0_1_0_1():
Any_00 = -14
Any_List_00 = [-25]
Any_10 = -48
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_1))
def t_0_1_0_2():
Any_00 = -9
Any_List_00 = [-7]
Any_10 = 22
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_1))
def t_0_1_0_3():
Any_00 = 4
Any_List_00 = [36, -38]
Any_10 = -23
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_1))
def t_0_1_0_4():
Any_00 = -2
Any_List_00 = [37, 18]
Any_10 = 20
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_1))
def t_0_1_0_5():
Any_00 = 12
Any_List_00 = [-20, -42, 42, -45]
Any_10 = 48
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_1))
def t_0_1_0_6():
Any_00 = -40
Any_List_00 = [-29]
Any_10 = -33
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_1))
def t_0_1_0_7():
Any_00 = 18
Any_List_00 = [47, -8]
Any_10 = -23
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_1))
def t_0_1_0_8():
Any_00 = 26
Any_List_00 = [-3, -7]
Any_10 = 14
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(Any_List_0, Any_1))
def t_0_1_0_9():
Any_00 = -7
Any_List_00 = [-20, 27]
Any_10 = -36
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_0_1_0():
try:
return (t_0_1_0_0() and t_0_1_0_1() and t_0_1_0_2() and t_0_1_0_3() and t_0_1_0_4() and t_0_1_0_5() and t_0_1_0_6() and t_0_1_0_7() and t_0_1_0_8() and t_0_1_0_9())
except IndexError:
return False
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_0))
def t_1_0_0_0():
Any_00 = 49
Any_List_00 = []
int_00 = -33
Any_01 = 49
list.insert(Any_List_00, int_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_0))
def t_1_0_0_1():
Any_00 = 24
Any_List_00 = [48, -37, -9, -45]
int_00 = 2
Any_01 = 24
list.insert(Any_List_00, int_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_0))
def t_1_0_0_2():
Any_00 = -41
Any_List_00 = [50, -32, -34]
int_00 = -7
Any_01 = -41
list.insert(Any_List_00, int_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_0))
def t_1_0_0_3():
Any_00 = -36
Any_List_00 = [25, 50, -2, -41]
int_00 = 23
Any_01 = -36
list.insert(Any_List_00, int_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_0))
def t_1_0_0_4():
Any_00 = 20
Any_List_00 = [22]
int_00 = -40
Any_01 = 20
list.insert(Any_List_00, int_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_0))
def t_1_0_0_5():
Any_00 = -16
Any_List_00 = [-13, 22]
int_00 = 18
Any_01 = -16
list.insert(Any_List_00, int_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_0))
def t_1_0_0_6():
Any_00 = -36
Any_List_00 = [-15, -37, 50]
int_00 = -45
Any_01 = -36
list.insert(Any_List_00, int_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_0))
def t_1_0_0_7():
Any_00 = -13
Any_List_00 = []
int_00 = 28
Any_01 = -13
list.insert(Any_List_00, int_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_0))
def t_1_0_0_8():
Any_00 = 35
Any_List_00 = []
int_00 = -39
Any_01 = 35
list.insert(Any_List_00, int_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_0))
def t_1_0_0_9():
Any_00 = 2
Any_List_00 = []
int_00 = -45
Any_01 = 2
list.insert(Any_List_00, int_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_1_0_0():
try:
return (t_1_0_0_0() and t_1_0_0_1() and t_1_0_0_2() and t_1_0_0_3() and t_1_0_0_4() and t_1_0_0_5() and t_1_0_0_6() and t_1_0_0_7() and t_1_0_0_8() and t_1_0_0_9())
except IndexError:
return False
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_1))
def t_1_1_0_0():
Any_00 = -26
Any_List_00 = []
int_00 = -30
Any_10 = -20
list.insert(Any_List_00, int_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_1))
def t_1_1_0_1():
Any_00 = -36
Any_List_00 = [37]
int_00 = -20
Any_10 = 7
list.insert(Any_List_00, int_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_1))
def t_1_1_0_2():
Any_00 = -30
Any_List_00 = []
int_00 = 5
Any_10 = 45
list.insert(Any_List_00, int_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_1))
def t_1_1_0_3():
Any_00 = -2
Any_List_00 = [20, -18]
int_00 = 41
Any_10 = 19
list.insert(Any_List_00, int_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_1))
def t_1_1_0_4():
Any_00 = 11
Any_List_00 = []
int_00 = -24
Any_10 = -10
list.insert(Any_List_00, int_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_1))
def t_1_1_0_5():
Any_00 = 33
Any_List_00 = []
int_00 = -47
Any_10 = -10
list.insert(Any_List_00, int_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_1))
def t_1_1_0_6():
Any_00 = -49
Any_List_00 = [42, 26]
int_00 = -10
Any_10 = 50
list.insert(Any_List_00, int_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_1))
def t_1_1_0_7():
Any_00 = 7
Any_List_00 = [1, -42]
int_00 = -42
Any_10 = 0
list.insert(Any_List_00, int_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_1))
def t_1_1_0_8():
Any_00 = -10
Any_List_00 = [-36, -18, -23]
int_00 = 50
Any_10 = 26
list.insert(Any_List_00, int_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(linsert(Any_List_0, int_0, Any_1))
def t_1_1_0_9():
Any_00 = 29
Any_List_00 = [38, 10, 34, -5]
int_00 = -17
Any_10 = 49
list.insert(Any_List_00, int_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_1_1_0():
try:
return (t_1_1_0_0() and t_1_1_0_1() and t_1_1_0_2() and t_1_1_0_3() and t_1_1_0_4() and t_1_1_0_5() and t_1_1_0_6() and t_1_1_0_7() and t_1_1_0_8() and t_1_1_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_0))
def t_2_0_0_0():
Any_00 = -27
Any_List_00 = []
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_0))
def t_2_0_0_1():
Any_00 = -11
Any_List_00 = [-19]
Any_List_01 = [-19]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_0))
def t_2_0_0_2():
Any_00 = -4
Any_List_00 = []
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_0))
def t_2_0_0_3():
Any_00 = -15
Any_List_00 = []
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_0))
def t_2_0_0_4():
Any_00 = 46
Any_List_00 = [-39, 33, 23]
Any_List_01 = [-39, 33, 23]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_0))
def t_2_0_0_5():
Any_00 = 32
Any_List_00 = [-21, -1]
Any_List_01 = [-21, -1]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_0))
def t_2_0_0_6():
Any_00 = -11
Any_List_00 = []
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_0))
def t_2_0_0_7():
Any_00 = -9
Any_List_00 = [-10]
Any_List_01 = [-10]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_0))
def t_2_0_0_8():
Any_00 = 24
Any_List_00 = [-19, -8]
Any_List_01 = [-19, -8]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_0))
def t_2_0_0_9():
Any_00 = -38
Any_List_00 = [28, 24, 26, -39]
Any_List_01 = [28, 24, 26, -39]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_2_0_0():
try:
return (t_2_0_0_0() and t_2_0_0_1() and t_2_0_0_2() and t_2_0_0_3() and t_2_0_0_4() and t_2_0_0_5() and t_2_0_0_6() and t_2_0_0_7() and t_2_0_0_8() and t_2_0_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_1))
def t_2_1_0_0():
Any_00 = -19
Any_List_00 = []
Any_List_10 = []
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_1))
def t_2_1_0_1():
Any_00 = -19
Any_List_00 = [-41, -16, 20]
Any_List_10 = []
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_1))
def t_2_1_0_2():
Any_00 = 43
Any_List_00 = []
Any_List_10 = []
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_1))
def t_2_1_0_3():
Any_00 = 31
Any_List_00 = []
Any_List_10 = [46, -5]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_1))
def t_2_1_0_4():
Any_00 = 13
Any_List_00 = [-31, -38, 14]
Any_List_10 = [-41, 15]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_1))
def t_2_1_0_5():
Any_00 = 35
Any_List_00 = [-28]
Any_List_10 = [-32]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_1))
def t_2_1_0_6():
Any_00 = -10
Any_List_00 = [-37, 40]
Any_List_10 = [27, -13, -34, -24]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_1))
def t_2_1_0_7():
Any_00 = -32
Any_List_00 = [42, -46, 49, -10]
Any_List_10 = [36, 20, 45, 38]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_1))
def t_2_1_0_8():
Any_00 = -24
Any_List_00 = [-12]
Any_List_10 = [18, -30, -44]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, Any_List_1))
def t_2_1_0_9():
Any_00 = 41
Any_List_00 = [-19, -18, 49, -42, 37]
Any_List_10 = [5, 20, -18]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_2_1_0():
try:
return (t_2_1_0_0() and t_2_1_0_1() and t_2_1_0_2() and t_2_1_0_3() and t_2_1_0_4() and t_2_1_0_5() and t_2_1_0_6() and t_2_1_0_7() and t_2_1_0_8() and t_2_1_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_0))
def t_3_0_0_0():
Any_00 = 19
Any_List_00 = []
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
Any_01 = 19
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_0))
def t_3_0_0_1():
Any_00 = 18
Any_List_00 = [-49, 0, -7]
Any_List_01 = [-49, 0, -7]
list.extend(Any_List_00, Any_List_01)
Any_01 = 18
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_0))
def t_3_0_0_2():
Any_00 = -29
Any_List_00 = [12, -47]
Any_List_01 = [12, -47]
list.extend(Any_List_00, Any_List_01)
Any_01 = -29
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_0))
def t_3_0_0_3():
Any_00 = 32
Any_List_00 = [23, -48, -43]
Any_List_01 = [23, -48, -43]
list.extend(Any_List_00, Any_List_01)
Any_01 = 32
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_0))
def t_3_0_0_4():
Any_00 = 38
Any_List_00 = [24, -33]
Any_List_01 = [24, -33]
list.extend(Any_List_00, Any_List_01)
Any_01 = 38
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_0))
def t_3_0_0_5():
Any_00 = 25
Any_List_00 = [-33]
Any_List_01 = [-33]
list.extend(Any_List_00, Any_List_01)
Any_01 = 25
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_0))
def t_3_0_0_6():
Any_00 = -17
Any_List_00 = [0, 22]
Any_List_01 = [0, 22]
list.extend(Any_List_00, Any_List_01)
Any_01 = -17
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_0))
def t_3_0_0_7():
Any_00 = 1
Any_List_00 = [28]
Any_List_01 = [28]
list.extend(Any_List_00, Any_List_01)
Any_01 = 1
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_0))
def t_3_0_0_8():
Any_00 = -39
Any_List_00 = [12]
Any_List_01 = [12]
list.extend(Any_List_00, Any_List_01)
Any_01 = -39
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_0))
def t_3_0_0_9():
Any_00 = -50
Any_List_00 = [17]
Any_List_01 = [17]
list.extend(Any_List_00, Any_List_01)
Any_01 = -50
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_3_0_0():
try:
return (t_3_0_0_0() and t_3_0_0_1() and t_3_0_0_2() and t_3_0_0_3() and t_3_0_0_4() and t_3_0_0_5() and t_3_0_0_6() and t_3_0_0_7() and t_3_0_0_8() and t_3_0_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_0))
def t_3_1_0_0():
Any_00 = -10
Any_List_00 = []
Any_List_10 = []
list.extend(Any_List_00, Any_List_10)
Any_01 = -10
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_0))
def t_3_1_0_1():
Any_00 = -20
Any_List_00 = [13, 37]
Any_List_10 = [-22, 41, 2]
list.extend(Any_List_00, Any_List_10)
Any_01 = -20
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_0))
def t_3_1_0_2():
Any_00 = -7
Any_List_00 = [28, 43, 33, -15]
Any_List_10 = [-22, -44, -41, 47, 15]
list.extend(Any_List_00, Any_List_10)
Any_01 = -7
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_0))
def t_3_1_0_3():
Any_00 = 32
Any_List_00 = [-30, 15]
Any_List_10 = [-11]
list.extend(Any_List_00, Any_List_10)
Any_01 = 32
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_0))
def t_3_1_0_4():
Any_00 = -12
Any_List_00 = [-12, 20, -3, -29, 39]
Any_List_10 = [44, 9, 26, -40, -35]
list.extend(Any_List_00, Any_List_10)
Any_01 = -12
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_0))
def t_3_1_0_5():
Any_00 = 27
Any_List_00 = [23, -2, -28, -31]
Any_List_10 = [4, -23]
list.extend(Any_List_00, Any_List_10)
Any_01 = 27
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_0))
def t_3_1_0_6():
Any_00 = 22
Any_List_00 = [46, 50, -44, 13, 37]
Any_List_10 = [41, 31, -6]
list.extend(Any_List_00, Any_List_10)
Any_01 = 22
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_0))
def t_3_1_0_7():
Any_00 = -1
Any_List_00 = [-29, 19, 43, -45]
Any_List_10 = [-39, -18, 30, -38]
list.extend(Any_List_00, Any_List_10)
Any_01 = -1
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_0))
def t_3_1_0_8():
Any_00 = -16
Any_List_00 = [-40, -33, 49, 28, 34]
Any_List_10 = [39, -40, 6, -20, -2]
list.extend(Any_List_00, Any_List_10)
Any_01 = -16
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_0))
def t_3_1_0_9():
Any_00 = 5
Any_List_00 = [-29, -9, 6]
Any_List_10 = [29]
list.extend(Any_List_00, Any_List_10)
Any_01 = 5
list.append(Any_List_00, Any_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_3_1_0():
try:
return (t_3_1_0_0() and t_3_1_0_1() and t_3_1_0_2() and t_3_1_0_3() and t_3_1_0_4() and t_3_1_0_5() and t_3_1_0_6() and t_3_1_0_7() and t_3_1_0_8() and t_3_1_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_1))
def t_3_2_0_0():
Any_00 = 12
Any_List_00 = []
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
Any_10 = -23
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_1))
def t_3_2_0_1():
Any_00 = 5
Any_List_00 = [2, -35, 34, -13]
Any_List_01 = [2, -35, 34, -13]
list.extend(Any_List_00, Any_List_01)
Any_10 = 26
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_1))
def t_3_2_0_2():
Any_00 = -15
Any_List_00 = [45, 21, -50]
Any_List_01 = [45, 21, -50]
list.extend(Any_List_00, Any_List_01)
Any_10 = -19
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_1))
def t_3_2_0_3():
Any_00 = -26
Any_List_00 = [24, -48, -47]
Any_List_01 = [24, -48, -47]
list.extend(Any_List_00, Any_List_01)
Any_10 = 17
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_1))
def t_3_2_0_4():
Any_00 = 30
Any_List_00 = [-17]
Any_List_01 = [-17]
list.extend(Any_List_00, Any_List_01)
Any_10 = 27
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_1))
def t_3_2_0_5():
Any_00 = -24
Any_List_00 = [-32, 19]
Any_List_01 = [-32, 19]
list.extend(Any_List_00, Any_List_01)
Any_10 = -28
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_1))
def t_3_2_0_6():
Any_00 = -25
Any_List_00 = [24, 46]
Any_List_01 = [24, 46]
list.extend(Any_List_00, Any_List_01)
Any_10 = -16
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_1))
def t_3_2_0_7():
Any_00 = -18
Any_List_00 = [-29, 19, -5]
Any_List_01 = [-29, 19, -5]
list.extend(Any_List_00, Any_List_01)
Any_10 = 37
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_1))
def t_3_2_0_8():
Any_00 = 12
Any_List_00 = []
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
Any_10 = 3
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_0), Any_1))
def t_3_2_0_9():
Any_00 = 48
Any_List_00 = [-1, -24, -14, -37]
Any_List_01 = [-1, -24, -14, -37]
list.extend(Any_List_00, Any_List_01)
Any_10 = -24
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_3_2_0():
try:
return (t_3_2_0_0() and t_3_2_0_1() and t_3_2_0_2() and t_3_2_0_3() and t_3_2_0_4() and t_3_2_0_5() and t_3_2_0_6() and t_3_2_0_7() and t_3_2_0_8() and t_3_2_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_1))
def t_3_3_0_0():
Any_00 = -47
Any_List_00 = []
Any_List_10 = []
list.extend(Any_List_00, Any_List_10)
Any_10 = -35
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_1))
def t_3_3_0_1():
Any_00 = 36
Any_List_00 = [33, -33, -41, 14, -3]
Any_List_10 = [-11, 5, 14, 36]
list.extend(Any_List_00, Any_List_10)
Any_10 = 47
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_1))
def t_3_3_0_2():
Any_00 = -5
Any_List_00 = [-9, -50, -35, 6]
Any_List_10 = [7, -6, -11, 19, 1]
list.extend(Any_List_00, Any_List_10)
Any_10 = 47
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_1))
def t_3_3_0_3():
Any_00 = -7
Any_List_00 = [37, 23, 13, -36, 32]
Any_List_10 = [-2, -24, 21]
list.extend(Any_List_00, Any_List_10)
Any_10 = 50
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_1))
def t_3_3_0_4():
Any_00 = -50
Any_List_00 = [26, 42, 44, 43, 15]
Any_List_10 = [9]
list.extend(Any_List_00, Any_List_10)
Any_10 = -15
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_1))
def t_3_3_0_5():
Any_00 = 26
Any_List_00 = [45, 41, -11]
Any_List_10 = [-29, 7, 29, 35, 17]
list.extend(Any_List_00, Any_List_10)
Any_10 = 16
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_1))
def t_3_3_0_6():
Any_00 = -25
Any_List_00 = [-50, 36, -1, 24]
Any_List_10 = [1, -7, 29]
list.extend(Any_List_00, Any_List_10)
Any_10 = -4
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_1))
def t_3_3_0_7():
Any_00 = 24
Any_List_00 = [45, -42, 13, 45, -19]
Any_List_10 = [33, -13, 30, -48, 2]
list.extend(Any_List_00, Any_List_10)
Any_10 = 43
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_1))
def t_3_3_0_8():
Any_00 = 42
Any_List_00 = [31]
Any_List_10 = [50, -16, -28]
list.extend(Any_List_00, Any_List_10)
Any_10 = 30
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lappend(lextend(Any_List_0, Any_List_1), Any_1))
def t_3_3_0_9():
Any_00 = 48
Any_List_00 = [-49, -6, -17, 40]
Any_List_10 = [37, 19, -12]
list.extend(Any_List_00, Any_List_10)
Any_10 = -41
list.append(Any_List_00, Any_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_3_3_0():
try:
return (t_3_3_0_0() and t_3_3_0_1() and t_3_3_0_2() and t_3_3_0_3() and t_3_3_0_4() and t_3_3_0_5() and t_3_3_0_6() and t_3_3_0_7() and t_3_3_0_8() and t_3_3_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_0)))
def t_4_0_0_0():
Any_00 = -31
Any_List_00 = []
Any_List_01 = []
Any_01 = -31
list.append(Any_List_01, Any_01)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_0)))
def t_4_0_0_1():
Any_00 = -17
Any_List_00 = [-29, 9, 15]
Any_List_01 = [-29, 9, 15]
Any_01 = -17
list.append(Any_List_01, Any_01)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_0)))
def t_4_0_0_2():
Any_00 = -45
Any_List_00 = [15, -38]
Any_List_01 = [15, -38]
Any_01 = -45
list.append(Any_List_01, Any_01)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_0)))
def t_4_0_0_3():
Any_00 = 45
Any_List_00 = [4, -42, -5, -42]
Any_List_01 = [4, -42, -5, -42]
Any_01 = 45
list.append(Any_List_01, Any_01)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_0)))
def t_4_0_0_4():
Any_00 = 34
Any_List_00 = [-48, -29, 14]
Any_List_01 = [-48, -29, 14]
Any_01 = 34
list.append(Any_List_01, Any_01)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_0)))
def t_4_0_0_5():
Any_00 = 40
Any_List_00 = [38]
Any_List_01 = [38]
Any_01 = 40
list.append(Any_List_01, Any_01)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_0)))
def t_4_0_0_6():
Any_00 = -39
Any_List_00 = [31, 38, -15]
Any_List_01 = [31, 38, -15]
Any_01 = -39
list.append(Any_List_01, Any_01)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_0)))
def t_4_0_0_7():
Any_00 = 27
Any_List_00 = [-24, 17]
Any_List_01 = [-24, 17]
Any_01 = 27
list.append(Any_List_01, Any_01)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_0)))
def t_4_0_0_8():
Any_00 = -24
Any_List_00 = [-8]
Any_List_01 = [-8]
Any_01 = -24
list.append(Any_List_01, Any_01)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_0)))
def t_4_0_0_9():
Any_00 = -16
Any_List_00 = []
Any_List_01 = []
Any_01 = -16
list.append(Any_List_01, Any_01)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_4_0_0():
try:
return (t_4_0_0_0() and t_4_0_0_1() and t_4_0_0_2() and t_4_0_0_3() and t_4_0_0_4() and t_4_0_0_5() and t_4_0_0_6() and t_4_0_0_7() and t_4_0_0_8() and t_4_0_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_0)))
def t_4_1_0_0():
Any_00 = -41
Any_List_00 = []
Any_List_10 = []
Any_01 = -41
list.append(Any_List_10, Any_01)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_0)))
def t_4_1_0_1():
Any_00 = 15
Any_List_00 = [44, -44, -29, -12]
Any_List_10 = [44, 41, 21, -16, -5]
Any_01 = 15
list.append(Any_List_10, Any_01)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_0)))
def t_4_1_0_2():
Any_00 = 28
Any_List_00 = [-21, 0, 21, 1, -28]
Any_List_10 = [-17, 28, -8]
Any_01 = 28
list.append(Any_List_10, Any_01)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_0)))
def t_4_1_0_3():
Any_00 = 41
Any_List_00 = [-17]
Any_List_10 = [40, -19, 34, -47]
Any_01 = 41
list.append(Any_List_10, Any_01)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_0)))
def t_4_1_0_4():
Any_00 = 29
Any_List_00 = [-10, 5, 47]
Any_List_10 = [50]
Any_01 = 29
list.append(Any_List_10, Any_01)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_0)))
def t_4_1_0_5():
Any_00 = -16
Any_List_00 = [-41]
Any_List_10 = [43, -29, 24, 6, 24]
Any_01 = -16
list.append(Any_List_10, Any_01)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_0)))
def t_4_1_0_6():
Any_00 = 43
Any_List_00 = [27]
Any_List_10 = [8, 17]
Any_01 = 43
list.append(Any_List_10, Any_01)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_0)))
def t_4_1_0_7():
Any_00 = -30
Any_List_00 = [49]
Any_List_10 = [41]
Any_01 = -30
list.append(Any_List_10, Any_01)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_0)))
def t_4_1_0_8():
Any_00 = 6
Any_List_00 = [-11, 46]
Any_List_10 = [-20, -36, 41]
Any_01 = 6
list.append(Any_List_10, Any_01)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_0)))
def t_4_1_0_9():
Any_00 = -24
Any_List_00 = [37, -11, -42, -37, -21]
Any_List_10 = [-9, 13, -38]
Any_01 = -24
list.append(Any_List_10, Any_01)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_4_1_0():
try:
return (t_4_1_0_0() and t_4_1_0_1() and t_4_1_0_2() and t_4_1_0_3() and t_4_1_0_4() and t_4_1_0_5() and t_4_1_0_6() and t_4_1_0_7() and t_4_1_0_8() and t_4_1_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_1)))
def t_4_2_0_0():
Any_00 = -27
Any_List_00 = []
Any_List_01 = []
Any_10 = -45
list.append(Any_List_01, Any_10)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_1)))
def t_4_2_0_1():
Any_00 = 26
Any_List_00 = [37]
Any_List_01 = [37]
Any_10 = -48
list.append(Any_List_01, Any_10)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_1)))
def t_4_2_0_2():
Any_00 = -46
Any_List_00 = [17, 42, 28, 6, -7]
Any_List_01 = [17, 42, 28, 6, -7]
Any_10 = 13
list.append(Any_List_01, Any_10)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_1)))
def t_4_2_0_3():
Any_00 = 34
Any_List_00 = []
Any_List_01 = []
Any_10 = -15
list.append(Any_List_01, Any_10)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_1)))
def t_4_2_0_4():
Any_00 = 28
Any_List_00 = [-38]
Any_List_01 = [-38]
Any_10 = 38
list.append(Any_List_01, Any_10)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_1)))
def t_4_2_0_5():
Any_00 = -22
Any_List_00 = [13]
Any_List_01 = [13]
Any_10 = 1
list.append(Any_List_01, Any_10)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_1)))
def t_4_2_0_6():
Any_00 = 7
Any_List_00 = [-21]
Any_List_01 = [-21]
Any_10 = -2
list.append(Any_List_01, Any_10)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_1)))
def t_4_2_0_7():
Any_00 = -20
Any_List_00 = [20, 24, -1]
Any_List_01 = [20, 24, -1]
Any_10 = -14
list.append(Any_List_01, Any_10)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_1)))
def t_4_2_0_8():
Any_00 = -23
Any_List_00 = [-17, -8, 13, 25, -36]
Any_List_01 = [-17, -8, 13, 25, -36]
Any_10 = 7
list.append(Any_List_01, Any_10)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_0, Any_1)))
def t_4_2_0_9():
Any_00 = -23
Any_List_00 = []
Any_List_01 = []
Any_10 = -40
list.append(Any_List_01, Any_10)
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_4_2_0():
try:
return (t_4_2_0_0() and t_4_2_0_1() and t_4_2_0_2() and t_4_2_0_3() and t_4_2_0_4() and t_4_2_0_5() and t_4_2_0_6() and t_4_2_0_7() and t_4_2_0_8() and t_4_2_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_1)))
def t_4_3_0_0():
Any_00 = -49
Any_List_00 = []
Any_List_10 = []
Any_10 = -50
list.append(Any_List_10, Any_10)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_1)))
def t_4_3_0_1():
Any_00 = -1
Any_List_00 = [-25, 1]
Any_List_10 = [47]
Any_10 = 24
list.append(Any_List_10, Any_10)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_1)))
def t_4_3_0_2():
Any_00 = 32
Any_List_00 = []
Any_List_10 = []
Any_10 = -31
list.append(Any_List_10, Any_10)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_1)))
def t_4_3_0_3():
Any_00 = -1
Any_List_00 = [19, -43, 22, -2, -18]
Any_List_10 = [-40]
Any_10 = -32
list.append(Any_List_10, Any_10)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_1)))
def t_4_3_0_4():
Any_00 = 9
Any_List_00 = [-49, -46]
Any_List_10 = [-43, 17, -34, -45]
Any_10 = 33
list.append(Any_List_10, Any_10)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_1)))
def t_4_3_0_5():
Any_00 = -15
Any_List_00 = []
Any_List_10 = [-39, -26, -47]
Any_10 = 49
list.append(Any_List_10, Any_10)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_1)))
def t_4_3_0_6():
Any_00 = 13
Any_List_00 = [45]
Any_List_10 = [37, -26]
Any_10 = 31
list.append(Any_List_10, Any_10)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_1)))
def t_4_3_0_7():
Any_00 = 34
Any_List_00 = [-8, 30, -16]
Any_List_10 = [32, 31]
Any_10 = 7
list.append(Any_List_10, Any_10)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_1)))
def t_4_3_0_8():
Any_00 = -19
Any_List_00 = []
Any_List_10 = [50, 25, -28, -6]
Any_10 = -19
list.append(Any_List_10, Any_10)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(Any_List_0, lappend(Any_List_1, Any_1)))
def t_4_3_0_9():
Any_00 = 4
Any_List_00 = [21, 31, 16, -43, -5]
Any_List_10 = [2, 18, -25, 41]
Any_10 = 27
list.append(Any_List_10, Any_10)
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_4_3_0():
try:
return (t_4_3_0_0() and t_4_3_0_1() and t_4_3_0_2() and t_4_3_0_3() and t_4_3_0_4() and t_4_3_0_5() and t_4_3_0_6() and t_4_3_0_7() and t_4_3_0_8() and t_4_3_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_0))
def t_5_0_0_0():
Any_00 = 18
Any_List_00 = []
Any_01 = 18
list.append(Any_List_00, Any_01)
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_0))
def t_5_0_0_1():
Any_00 = 34
Any_List_00 = []
Any_01 = 34
list.append(Any_List_00, Any_01)
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_0))
def t_5_0_0_2():
Any_00 = 41
Any_List_00 = [45, 28]
Any_01 = 41
list.append(Any_List_00, Any_01)
Any_List_01 = [45, 28]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_0))
def t_5_0_0_3():
Any_00 = 42
Any_List_00 = []
Any_01 = 42
list.append(Any_List_00, Any_01)
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_0))
def t_5_0_0_4():
Any_00 = -18
Any_List_00 = [-38]
Any_01 = -18
list.append(Any_List_00, Any_01)
Any_List_01 = [-38]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_0))
def t_5_0_0_5():
Any_00 = -31
Any_List_00 = []
Any_01 = -31
list.append(Any_List_00, Any_01)
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_0))
def t_5_0_0_6():
Any_00 = -24
Any_List_00 = [-45, -44, 31]
Any_01 = -24
list.append(Any_List_00, Any_01)
Any_List_01 = [-45, -44, 31]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_0))
def t_5_0_0_7():
Any_00 = -39
Any_List_00 = [10, 14, -3, -38]
Any_01 = -39
list.append(Any_List_00, Any_01)
Any_List_01 = [10, 14, -3, -38]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_0))
def t_5_0_0_8():
Any_00 = -10
Any_List_00 = []
Any_01 = -10
list.append(Any_List_00, Any_01)
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_0))
def t_5_0_0_9():
Any_00 = -34
Any_List_00 = [-46, 6, 35, -34]
Any_01 = -34
list.append(Any_List_00, Any_01)
Any_List_01 = [-46, 6, 35, -34]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_5_0_0():
try:
return (t_5_0_0_0() and t_5_0_0_1() and t_5_0_0_2() and t_5_0_0_3() and t_5_0_0_4() and t_5_0_0_5() and t_5_0_0_6() and t_5_0_0_7() and t_5_0_0_8() and t_5_0_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_1))
def t_5_1_0_0():
Any_00 = 0
Any_List_00 = []
Any_01 = 0
list.append(Any_List_00, Any_01)
Any_List_10 = []
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_1))
def t_5_1_0_1():
Any_00 = 44
Any_List_00 = [-16, -39, -18, -9]
Any_01 = 44
list.append(Any_List_00, Any_01)
Any_List_10 = []
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_1))
def t_5_1_0_2():
Any_00 = -12
Any_List_00 = []
Any_01 = -12
list.append(Any_List_00, Any_01)
Any_List_10 = [-43, 43, -17]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_1))
def t_5_1_0_3():
Any_00 = -10
Any_List_00 = [-34, -17, -2, -36, 36]
Any_01 = -10
list.append(Any_List_00, Any_01)
Any_List_10 = [-38, 4]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_1))
def t_5_1_0_4():
Any_00 = -19
Any_List_00 = [21, -24, -8, -7]
Any_01 = -19
list.append(Any_List_00, Any_01)
Any_List_10 = [50, 0, 24, 11]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_1))
def t_5_1_0_5():
Any_00 = -37
Any_List_00 = [33]
Any_01 = -37
list.append(Any_List_00, Any_01)
Any_List_10 = [17, 21, 42]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_1))
def t_5_1_0_6():
Any_00 = 24
Any_List_00 = [16, 18, -47, -13, 45]
Any_01 = 24
list.append(Any_List_00, Any_01)
Any_List_10 = [-25]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_1))
def t_5_1_0_7():
Any_00 = -3
Any_List_00 = [16, -9, -38]
Any_01 = -3
list.append(Any_List_00, Any_01)
Any_List_10 = [-6, -34, 23]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_1))
def t_5_1_0_8():
Any_00 = -42
Any_List_00 = []
Any_01 = -42
list.append(Any_List_00, Any_01)
Any_List_10 = [33, 18]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_0), Any_List_1))
def t_5_1_0_9():
Any_00 = -10
Any_List_00 = [-12, -10, -5]
Any_01 = -10
list.append(Any_List_00, Any_01)
Any_List_10 = [-9, 45]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_5_1_0():
try:
return (t_5_1_0_0() and t_5_1_0_1() and t_5_1_0_2() and t_5_1_0_3() and t_5_1_0_4() and t_5_1_0_5() and t_5_1_0_6() and t_5_1_0_7() and t_5_1_0_8() and t_5_1_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_0))
def t_5_2_0_0():
Any_00 = 45
Any_List_00 = []
Any_10 = 16
list.append(Any_List_00, Any_10)
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_0))
def t_5_2_0_1():
Any_00 = 17
Any_List_00 = [-10]
Any_10 = -35
list.append(Any_List_00, Any_10)
Any_List_01 = [-10]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_0))
def t_5_2_0_2():
Any_00 = 43
Any_List_00 = [23, -42]
Any_10 = -9
list.append(Any_List_00, Any_10)
Any_List_01 = [23, -42]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_0))
def t_5_2_0_3():
Any_00 = 7
Any_List_00 = [8, -4, 44]
Any_10 = -15
list.append(Any_List_00, Any_10)
Any_List_01 = [8, -4, 44]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_0))
def t_5_2_0_4():
Any_00 = -2
Any_List_00 = [-43, -33, -44, 17]
Any_10 = -40
list.append(Any_List_00, Any_10)
Any_List_01 = [-43, -33, -44, 17]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_0))
def t_5_2_0_5():
Any_00 = 12
Any_List_00 = [50, -19]
Any_10 = 23
list.append(Any_List_00, Any_10)
Any_List_01 = [50, -19]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_0))
def t_5_2_0_6():
Any_00 = 39
Any_List_00 = [-7, -4, 32, -3, 1]
Any_10 = 23
list.append(Any_List_00, Any_10)
Any_List_01 = [-7, -4, 32, -3, 1]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_0))
def t_5_2_0_7():
Any_00 = -11
Any_List_00 = [-7, 18, 14, -29]
Any_10 = 9
list.append(Any_List_00, Any_10)
Any_List_01 = [-7, 18, 14, -29]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_0))
def t_5_2_0_8():
Any_00 = -47
Any_List_00 = [37, -22]
Any_10 = -32
list.append(Any_List_00, Any_10)
Any_List_01 = [37, -22]
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_0))
def t_5_2_0_9():
Any_00 = 22
Any_List_00 = []
Any_10 = -33
list.append(Any_List_00, Any_10)
Any_List_01 = []
list.extend(Any_List_00, Any_List_01)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_5_2_0():
try:
return (t_5_2_0_0() and t_5_2_0_1() and t_5_2_0_2() and t_5_2_0_3() and t_5_2_0_4() and t_5_2_0_5() and t_5_2_0_6() and t_5_2_0_7() and t_5_2_0_8() and t_5_2_0_9())
except IndexError:
return False
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_1))
def t_5_3_0_0():
Any_00 = -27
Any_List_00 = []
Any_10 = 48
list.append(Any_List_00, Any_10)
Any_List_10 = []
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_1))
def t_5_3_0_1():
Any_00 = -38
Any_List_00 = [-16, 41, -37, -24, -17]
Any_10 = 19
list.append(Any_List_00, Any_10)
Any_List_10 = []
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_1))
def t_5_3_0_2():
Any_00 = 30
Any_List_00 = [32, -40, -41, -23]
Any_10 = 23
list.append(Any_List_00, Any_10)
Any_List_10 = [-28, 15, 5, -48, 25]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_1))
def t_5_3_0_3():
Any_00 = -3
Any_List_00 = [-14, -22, -25, 26, 13]
Any_10 = 12
list.append(Any_List_00, Any_10)
Any_List_10 = [4]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_1))
def t_5_3_0_4():
Any_00 = 7
Any_List_00 = [19, -26]
Any_10 = 36
list.append(Any_List_00, Any_10)
Any_List_10 = [42, -41, -18]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_1))
def t_5_3_0_5():
Any_00 = 2
Any_List_00 = []
Any_10 = -25
list.append(Any_List_00, Any_10)
Any_List_10 = [18, 48, -2, 15, 12]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_1))
def t_5_3_0_6():
Any_00 = -41
Any_List_00 = [15, 24, 24, 4]
Any_10 = 1
list.append(Any_List_00, Any_10)
Any_List_10 = []
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_1))
def t_5_3_0_7():
Any_00 = -5
Any_List_00 = []
Any_10 = 8
list.append(Any_List_00, Any_10)
Any_List_10 = [-12]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_1))
def t_5_3_0_8():
Any_00 = 39
Any_List_00 = [-50, 19, -35, -12, 15]
Any_10 = 38
list.append(Any_List_00, Any_10)
Any_List_10 = [-10, 49, 19, 32, 23]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
# testing Any_0 = lpop(lextend(lappend(Any_List_0, Any_1), Any_List_1))
def t_5_3_0_9():
Any_00 = 20
Any_List_00 = [2, 19, 16, 2]
Any_10 = -14
list.append(Any_List_00, Any_10)
Any_List_10 = [30, 24, -11, 7]
list.extend(Any_List_00, Any_List_10)
lpop_ret0 = list.pop(Any_List_00)
eq_ret0 = Any_00 == lpop_ret0
return eq_ret0
def f_5_3_0():
try:
return (t_5_3_0_0() and t_5_3_0_1() and t_5_3_0_2() and t_5_3_0_3() and t_5_3_0_4() and t_5_3_0_5() and t_5_3_0_6() and t_5_3_0_7() and t_5_3_0_8() and t_5_3_0_9())
except IndexError:
return False
results = [f_0_0_0(), f_0_1_0(), f_1_0_0(), f_1_1_0(), f_2_0_0(), f_2_1_0(), f_3_0_0(), f_3_1_0(), f_3_2_0(), f_3_3_0(), f_4_0_0(), f_4_1_0(), f_4_2_0(), f_4_3_0(), f_5_0_0(), f_5_1_0(), f_5_2_0(), f_5_3_0()]
print('\n'.join(map(str, results)))
| 28.973949
| 208
| 0.662197
| 12,632
| 63,395
| 2.858771
| 0.006491
| 0.24424
| 0.154519
| 0.092712
| 0.946195
| 0.89275
| 0.846145
| 0.835733
| 0.811143
| 0.804912
| 0
| 0.164901
| 0.219623
| 63,395
| 2,188
| 209
| 28.973949
| 0.565047
| 0.182648
| 0
| 0.6875
| 0
| 0
| 0.000039
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.110491
| false
| 0
| 0
| 0
| 0.231027
| 0.000558
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43c8452cefd9f6c421c933e79a18ed81683740a7
| 41
|
py
|
Python
|
external/models/TransH_USE_h2/__init__.py
|
swapUniba/Elliot_refactor-tesi-Ventrella
|
3ddffc041696c90a6f6d3e8906c212fc4f55f842
|
[
"Apache-2.0"
] | null | null | null |
external/models/TransH_USE_h2/__init__.py
|
swapUniba/Elliot_refactor-tesi-Ventrella
|
3ddffc041696c90a6f6d3e8906c212fc4f55f842
|
[
"Apache-2.0"
] | null | null | null |
external/models/TransH_USE_h2/__init__.py
|
swapUniba/Elliot_refactor-tesi-Ventrella
|
3ddffc041696c90a6f6d3e8906c212fc4f55f842
|
[
"Apache-2.0"
] | null | null | null |
from .TransH_USE_h2 import TransH_USE_h2
| 20.5
| 40
| 0.878049
| 8
| 41
| 4
| 0.625
| 0.5625
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 0.097561
| 41
| 1
| 41
| 41
| 0.810811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
78dbe4dc9267a12ecbb7ee982fa69d7766681b09
| 21,331
|
py
|
Python
|
models/resunet.py
|
MEIXuYan/SpatioTemporalSegmentation
|
153f12286e3ca6818d63fd6f67108314351bfb6e
|
[
"MIT"
] | null | null | null |
models/resunet.py
|
MEIXuYan/SpatioTemporalSegmentation
|
153f12286e3ca6818d63fd6f67108314351bfb6e
|
[
"MIT"
] | null | null | null |
models/resunet.py
|
MEIXuYan/SpatioTemporalSegmentation
|
153f12286e3ca6818d63fd6f67108314351bfb6e
|
[
"MIT"
] | null | null | null |
from models.resnet import ResNetBase, get_norm
from models.modules.common import ConvType, NormType, conv, conv_tr
from models.modules.resnet_block import BasicBlock, BasicBlockINBN, Bottleneck
import torch.nn as nn
import MinkowskiEngine as ME
from MinkowskiEngine import MinkowskiReLU
import MinkowskiEngine.MinkowskiOps as me
class MinkUNetBase(ResNetBase):
BLOCK = None
PLANES = (64, 128, 256, 512, 256, 128, 128)
DILATIONS = (1, 1, 1, 1, 1, 1)
LAYERS = (2, 2, 2, 2, 2, 2)
INIT_DIM = 64
OUT_PIXEL_DIST = 1
NORM_TYPE = NormType.BATCH_NORM
NON_BLOCK_CONV_TYPE = ConvType.SPATIAL_HYPERCUBE
CONV_TYPE = ConvType.SPATIAL_HYPERCUBE_TEMPORAL_HYPERCROSS
# To use the model, must call initialize_coords before forward pass.
# Once data is processed, call clear to reset the model before calling initialize_coords
def __init__(self, in_channels, out_channels, config, D=3, **kwargs):
super(MinkUNetBase, self).__init__(in_channels, out_channels, config,
D)
def network_initialization(self, in_channels, out_channels, config, D):
# Setup net_metadata
dilations = self.DILATIONS
bn_momentum = config.bn_momentum
def space_n_time_m(n, m):
return n if D == 3 else [n, n, n, m]
if D == 4:
self.OUT_PIXEL_DIST = space_n_time_m(self.OUT_PIXEL_DIST, 1)
# Output of the first conv concated to conv6
self.inplanes = self.INIT_DIM
self.conv1p1s1 = conv(in_channels,
self.inplanes,
kernel_size=space_n_time_m(
config.conv1_kernel_size, 1),
stride=1,
dilation=1,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bn1 = get_norm(self.NORM_TYPE,
self.PLANES[0],
D,
bn_momentum=bn_momentum)
self.block1 = self._make_layer(self.BLOCK,
self.PLANES[0],
self.LAYERS[0],
dilation=dilations[0],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.conv2p1s2 = conv(self.inplanes,
self.inplanes,
kernel_size=space_n_time_m(2, 1),
stride=space_n_time_m(2, 1),
dilation=1,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bn2 = get_norm(self.NORM_TYPE,
self.inplanes,
D,
bn_momentum=bn_momentum)
self.block2 = self._make_layer(self.BLOCK,
self.PLANES[1],
self.LAYERS[1],
dilation=dilations[1],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.conv3p2s2 = conv(self.inplanes,
self.inplanes,
kernel_size=space_n_time_m(2, 1),
stride=space_n_time_m(2, 1),
dilation=1,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bn3 = get_norm(self.NORM_TYPE,
self.inplanes,
D,
bn_momentum=bn_momentum)
self.block3 = self._make_layer(self.BLOCK,
self.PLANES[2],
self.LAYERS[2],
dilation=dilations[2],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.conv4p4s2 = conv(self.inplanes,
self.inplanes,
kernel_size=space_n_time_m(2, 1),
stride=space_n_time_m(2, 1),
dilation=1,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bn4 = get_norm(self.NORM_TYPE,
self.inplanes,
D,
bn_momentum=bn_momentum)
self.block4 = self._make_layer(self.BLOCK,
self.PLANES[3],
self.LAYERS[3],
dilation=dilations[3],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.convtr4p8s2 = conv_tr(self.inplanes,
self.PLANES[4],
kernel_size=space_n_time_m(2, 1),
upsample_stride=space_n_time_m(2, 1),
dilation=1,
bias=False,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bntr4 = get_norm(self.NORM_TYPE,
self.PLANES[4],
D,
bn_momentum=bn_momentum)
self.inplanes = self.PLANES[4] + self.PLANES[2] * self.BLOCK.expansion
self.block5 = self._make_layer(self.BLOCK,
self.PLANES[4],
self.LAYERS[4],
dilation=dilations[4],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.convtr5p4s2 = conv_tr(self.inplanes,
self.PLANES[5],
kernel_size=space_n_time_m(2, 1),
upsample_stride=space_n_time_m(2, 1),
dilation=1,
bias=False,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bntr5 = get_norm(self.NORM_TYPE,
self.PLANES[5],
D,
bn_momentum=bn_momentum)
self.inplanes = self.PLANES[5] + self.PLANES[1] * self.BLOCK.expansion
self.block6 = self._make_layer(self.BLOCK,
self.PLANES[5],
self.LAYERS[5],
dilation=dilations[5],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.convtr6p2s2 = conv_tr(self.inplanes,
self.PLANES[6],
kernel_size=space_n_time_m(2, 1),
upsample_stride=space_n_time_m(2, 1),
dilation=1,
bias=False,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bntr6 = get_norm(self.NORM_TYPE,
self.PLANES[6],
D,
bn_momentum=bn_momentum)
self.relu = MinkowskiReLU(inplace=True)
self.final = nn.Sequential(
conv(self.PLANES[6] + self.PLANES[0] * self.BLOCK.expansion,
512,
kernel_size=1,
stride=1,
dilation=1,
bias=False,
D=D), ME.MinkowskiBatchNorm(512), ME.MinkowskiReLU(),
conv(512,
out_channels,
kernel_size=1,
stride=1,
dilation=1,
bias=True,
D=D))
def forward(self, x):
out = self.conv1p1s1(x)
out = self.bn1(out)
out = self.relu(out)
out_b1p1 = self.block1(out)
out = self.conv2p1s2(out_b1p1)
out = self.bn2(out)
out = self.relu(out)
out_b2p2 = self.block2(out)
out = self.conv3p2s2(out_b2p2)
out = self.bn3(out)
out = self.relu(out)
out_b3p4 = self.block3(out)
out = self.conv4p4s2(out_b3p4)
out = self.bn4(out)
out = self.relu(out)
# pixel_dist=8
out = self.block4(out)
out = self.convtr4p8s2(out)
out = self.bntr4(out)
out = self.relu(out)
out = me.cat(out, out_b3p4)
out = self.block5(out)
out = self.convtr5p4s2(out)
out = self.bntr5(out)
out = self.relu(out)
out = me.cat(out, out_b2p2)
out = self.block6(out)
out = self.convtr6p2s2(out)
out = self.bntr6(out)
out = self.relu(out)
out = me.cat(out, out_b1p1)
return self.final(out)
class ResUNet14(MinkUNetBase):
BLOCK = BasicBlock
LAYERS = (1, 1, 1, 1, 1, 1)
class ResUNet18(MinkUNetBase):
BLOCK = BasicBlock
LAYERS = (2, 2, 2, 2, 2, 2)
class ResUNet18INBN(ResUNet18):
NORM_TYPE = NormType.INSTANCE_BATCH_NORM
BLOCK = BasicBlockINBN
class ResUNet34(MinkUNetBase):
BLOCK = BasicBlock
LAYERS = (3, 4, 6, 3, 2, 2)
class ResUNet50(MinkUNetBase):
BLOCK = Bottleneck
LAYERS = (3, 4, 6, 3, 2, 2)
class ResUNet101(MinkUNetBase):
BLOCK = Bottleneck
LAYERS = (3, 4, 23, 3, 2, 2)
class ResUNet14D(ResUNet14):
PLANES = (64, 128, 256, 512, 512, 512, 512)
class ResUNet18D(ResUNet18):
PLANES = (64, 128, 256, 512, 512, 512, 512)
class ResUNet34D(ResUNet34):
PLANES = (64, 128, 256, 512, 512, 512, 512)
class ResUNet34E(ResUNet34):
INIT_DIM = 32
PLANES = (32, 64, 128, 256, 128, 64, 64)
class ResUNet34F(ResUNet34):
INIT_DIM = 32
PLANES = (32, 64, 128, 256, 128, 64, 32)
class MinkUNetHyper(MinkUNetBase):
BLOCK = None
PLANES = (64, 128, 256, 512, 256, 128, 128)
DILATIONS = (1, 1, 1, 1, 1, 1)
LAYERS = (2, 2, 2, 2, 2, 2)
INIT_DIM = 64
OUT_PIXEL_DIST = 1
NORM_TYPE = NormType.BATCH_NORM
NON_BLOCK_CONV_TYPE = ConvType.SPATIAL_HYPERCUBE
CONV_TYPE = ConvType.SPATIAL_HYPERCUBE_TEMPORAL_HYPERCROSS
# To use the model, must call initialize_coords before forward pass.
# Once data is processed, call clear to reset the model before calling initialize_coords
def __init__(self, in_channels, out_channels, config, D=3, **kwargs):
super(MinkUNetBase, self).__init__(in_channels, out_channels, config,
D)
def network_initialization(self, in_channels, out_channels, config, D):
# Setup net_metadata
dilations = self.DILATIONS
bn_momentum = config.bn_momentum
def space_n_time_m(n, m):
return n if D == 3 else [n, n, n, m]
if D == 4:
self.OUT_PIXEL_DIST = space_n_time_m(self.OUT_PIXEL_DIST, 1)
# Output of the first conv concated to conv6
self.inplanes = self.INIT_DIM
self.conv1p1s1 = conv(in_channels,
self.inplanes,
kernel_size=space_n_time_m(
config.conv1_kernel_size, 1),
stride=1,
dilation=1,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bn1 = get_norm(self.NORM_TYPE,
self.PLANES[0],
D,
bn_momentum=bn_momentum)
self.block1 = self._make_layer(self.BLOCK,
self.PLANES[0],
self.LAYERS[0],
dilation=dilations[0],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.conv2p1s2 = conv(self.inplanes,
self.inplanes,
kernel_size=space_n_time_m(2, 1),
stride=space_n_time_m(2, 1),
dilation=1,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bn2 = get_norm(self.NORM_TYPE,
self.inplanes,
D,
bn_momentum=bn_momentum)
self.block2 = self._make_layer(self.BLOCK,
self.PLANES[1],
self.LAYERS[1],
dilation=dilations[1],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.conv3p2s2 = conv(self.inplanes,
self.inplanes,
kernel_size=space_n_time_m(2, 1),
stride=space_n_time_m(2, 1),
dilation=1,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bn3 = get_norm(self.NORM_TYPE,
self.inplanes,
D,
bn_momentum=bn_momentum)
self.block3 = self._make_layer(self.BLOCK,
self.PLANES[2],
self.LAYERS[2],
dilation=dilations[2],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.conv4p4s2 = conv(self.inplanes,
self.inplanes,
kernel_size=space_n_time_m(2, 1),
stride=space_n_time_m(2, 1),
dilation=1,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bn4 = get_norm(self.NORM_TYPE,
self.inplanes,
D,
bn_momentum=bn_momentum)
self.block4 = self._make_layer(self.BLOCK,
self.PLANES[3],
self.LAYERS[3],
dilation=dilations[3],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.pool_tr4 = ME.MinkowskiPoolingTranspose(kernel_size=8,
stride=8,
dimension=D)
out_pool4 = self.inplanes
self.convtr4p8s2 = conv_tr(self.inplanes,
self.PLANES[4],
kernel_size=space_n_time_m(2, 1),
upsample_stride=space_n_time_m(2, 1),
dilation=1,
bias=False,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bntr4 = get_norm(self.NORM_TYPE,
self.PLANES[4],
D,
bn_momentum=bn_momentum)
self.inplanes = self.PLANES[4] + self.PLANES[2] * self.BLOCK.expansion
self.block5 = self._make_layer(self.BLOCK,
self.PLANES[4],
self.LAYERS[4],
dilation=dilations[4],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.pool_tr5 = ME.MinkowskiPoolingTranspose(kernel_size=4,
stride=4,
dimension=D)
out_pool5 = self.inplanes
self.convtr5p4s2 = conv_tr(self.inplanes,
self.PLANES[5],
kernel_size=space_n_time_m(2, 1),
upsample_stride=space_n_time_m(2, 1),
dilation=1,
bias=False,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bntr5 = get_norm(self.NORM_TYPE,
self.PLANES[5],
D,
bn_momentum=bn_momentum)
self.inplanes = self.PLANES[5] + self.PLANES[1] * self.BLOCK.expansion
self.block6 = self._make_layer(self.BLOCK,
self.PLANES[5],
self.LAYERS[5],
dilation=dilations[5],
norm_type=self.NORM_TYPE,
bn_momentum=bn_momentum)
self.pool_tr6 = ME.MinkowskiPoolingTranspose(kernel_size=2,
stride=2,
dimension=D)
out_pool6 = self.inplanes
self.convtr6p2s2 = conv_tr(self.inplanes,
self.PLANES[6],
kernel_size=space_n_time_m(2, 1),
upsample_stride=space_n_time_m(2, 1),
dilation=1,
bias=False,
conv_type=self.NON_BLOCK_CONV_TYPE,
D=D)
self.bntr6 = get_norm(self.NORM_TYPE,
self.PLANES[6],
D,
bn_momentum=bn_momentum)
self.relu = MinkowskiReLU(inplace=True)
self.final = nn.Sequential(
conv(out_pool5 + out_pool6 + self.PLANES[6] +
self.PLANES[0] * self.BLOCK.expansion,
512,
kernel_size=1,
bias=False,
D=D), ME.MinkowskiBatchNorm(512), ME.MinkowskiReLU(),
conv(512, out_channels, kernel_size=1, bias=True, D=D))
def forward(self, x):
out = self.conv1p1s1(x)
out = self.bn1(out)
out = self.relu(out)
out_b1p1 = self.block1(out)
out = self.conv2p1s2(out_b1p1)
out = self.bn2(out)
out = self.relu(out)
out_b2p2 = self.block2(out)
out = self.conv3p2s2(out_b2p2)
out = self.bn3(out)
out = self.relu(out)
out_b3p4 = self.block3(out)
out = self.conv4p4s2(out_b3p4)
out = self.bn4(out)
out = self.relu(out)
# pixel_dist=8
out = self.block4(out)
out = self.convtr4p8s2(out)
out = self.bntr4(out)
out = self.relu(out)
out = me.cat(out, out_b3p4)
out = self.block5(out)
out_5 = self.pool_tr5(out)
out = self.convtr5p4s2(out)
out = self.bntr5(out)
out = self.relu(out)
out = me.cat(out, out_b2p2)
out = self.block6(out)
out_6 = self.pool_tr6(out)
out = self.convtr6p2s2(out)
out = self.bntr6(out)
out = self.relu(out)
out = me.cat(out, out_b1p1, out_6, out_5)
return self.final(out)
class MinkUNetHyper14INBN(MinkUNetHyper):
NORM_TYPE = NormType.INSTANCE_BATCH_NORM
BLOCK = BasicBlockINBN
class STMinkUNetBase(MinkUNetBase):
CONV_TYPE = ConvType.SPATIAL_HYPERCUBE_TEMPORAL_HYPERCROSS
def __init__(self, in_channels, out_channels, config, D=4, **kwargs):
super(STMinkUNetBase, self).__init__(in_channels, out_channels, config,
D, **kwargs)
class STResUNet14(STMinkUNetBase, ResUNet14):
pass
class STResUNet18(STMinkUNetBase, ResUNet18):
pass
class STResUNet34(STMinkUNetBase, ResUNet34):
pass
class STResUNet50(STMinkUNetBase, ResUNet50):
pass
class STResUNet101(STMinkUNetBase, ResUNet101):
pass
class STResTesseractUNetBase(STMinkUNetBase):
CONV_TYPE = ConvType.HYPERCUBE
class STResTesseractUNet14(STResTesseractUNetBase, ResUNet14):
pass
class STResTesseractUNet18(STResTesseractUNetBase, ResUNet18):
pass
class STResTesseractUNet34(STResTesseractUNetBase, ResUNet34):
pass
class STResTesseractUNet50(STResTesseractUNetBase, ResUNet50):
pass
class STResTesseractUNet101(STResTesseractUNetBase, ResUNet101):
pass
| 37.422807
| 92
| 0.454597
| 2,092
| 21,331
| 4.418738
| 0.085564
| 0.06058
| 0.034617
| 0.035699
| 0.831891
| 0.826915
| 0.819559
| 0.812852
| 0.802034
| 0.775422
| 0
| 0.056421
| 0.467395
| 21,331
| 569
| 93
| 37.488576
| 0.75724
| 0.021424
| 0
| 0.862222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02
| false
| 0.022222
| 0.015556
| 0.004444
| 0.193333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78e5c1d6120c53f31d92f8592e4a2a563f1951d3
| 3,287
|
py
|
Python
|
numpy/typing/tests/data/reveal/numeric.py
|
deepyaman/numpy
|
b7e75442b03153c7d94fc99e8026d1f09ec17f7f
|
[
"BSD-3-Clause"
] | 603
|
2020-12-23T13:49:32.000Z
|
2022-03-31T23:38:03.000Z
|
numpy/typing/tests/data/reveal/numeric.py
|
deepyaman/numpy
|
b7e75442b03153c7d94fc99e8026d1f09ec17f7f
|
[
"BSD-3-Clause"
] | 387
|
2020-12-15T14:54:04.000Z
|
2022-03-31T07:00:21.000Z
|
numpy/typing/tests/data/reveal/numeric.py
|
deepyaman/numpy
|
b7e75442b03153c7d94fc99e8026d1f09ec17f7f
|
[
"BSD-3-Clause"
] | 35
|
2021-03-26T03:12:04.000Z
|
2022-03-23T10:15:10.000Z
|
"""
Tests for :mod:`numpy.core.numeric`.
Does not include tests which fall under ``array_constructors``.
"""
from typing import List
import numpy as np
class SubClass(np.ndarray):
...
i8: np.int64
A: np.ndarray
B: List[int]
C: SubClass
reveal_type(np.count_nonzero(i8)) # E: int
reveal_type(np.count_nonzero(A)) # E: int
reveal_type(np.count_nonzero(B)) # E: int
reveal_type(np.count_nonzero(A, keepdims=True)) # E: Union[numpy.signedinteger[Any], numpy.ndarray[Any, Any]]
reveal_type(np.count_nonzero(A, axis=0)) # E: Union[numpy.signedinteger[Any], numpy.ndarray[Any, Any]]
reveal_type(np.isfortran(i8)) # E: bool
reveal_type(np.isfortran(A)) # E: bool
reveal_type(np.argwhere(i8)) # E: numpy.ndarray[Any, Any]
reveal_type(np.argwhere(A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.flatnonzero(i8)) # E: numpy.ndarray[Any, Any]
reveal_type(np.flatnonzero(A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.correlate(B, A, mode="valid")) # E: numpy.ndarray[Any, Any]
reveal_type(np.correlate(A, A, mode="same")) # E: numpy.ndarray[Any, Any]
reveal_type(np.convolve(B, A, mode="valid")) # E: numpy.ndarray[Any, Any]
reveal_type(np.convolve(A, A, mode="same")) # E: numpy.ndarray[Any, Any]
reveal_type(np.outer(i8, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.outer(B, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.outer(A, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.outer(A, A, out=C)) # E: SubClass
reveal_type(np.tensordot(B, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.tensordot(A, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.tensordot(A, A, axes=0)) # E: numpy.ndarray[Any, Any]
reveal_type(np.tensordot(A, A, axes=(0, 1))) # E: numpy.ndarray[Any, Any]
reveal_type(np.isscalar(i8)) # E: bool
reveal_type(np.isscalar(A)) # E: bool
reveal_type(np.isscalar(B)) # E: bool
reveal_type(np.roll(A, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.roll(A, (1, 2))) # E: numpy.ndarray[Any, Any]
reveal_type(np.roll(B, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.rollaxis(A, 0, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.moveaxis(A, 0, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.moveaxis(A, (0, 1), (1, 2))) # E: numpy.ndarray[Any, Any]
reveal_type(np.cross(B, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.cross(A, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.indices([0, 1, 2])) # E: numpy.ndarray[Any, Any]
reveal_type(np.indices([0, 1, 2], sparse=False)) # E: numpy.ndarray[Any, Any]
reveal_type(np.indices([0, 1, 2], sparse=True)) # E: tuple[numpy.ndarray[Any, Any]]
reveal_type(np.binary_repr(1)) # E: str
reveal_type(np.base_repr(1)) # E: str
reveal_type(np.allclose(i8, A)) # E: bool
reveal_type(np.allclose(B, A)) # E: bool
reveal_type(np.allclose(A, A)) # E: bool
reveal_type(np.isclose(i8, A)) # E: Union[numpy.bool_, numpy.ndarray[Any, Any]]
reveal_type(np.isclose(B, A)) # E: Union[numpy.bool_, numpy.ndarray[Any, Any]]
reveal_type(np.isclose(A, A)) # E: Union[numpy.bool_, numpy.ndarray[Any, Any]]
reveal_type(np.array_equal(i8, A)) # E: bool
reveal_type(np.array_equal(B, A)) # E: bool
reveal_type(np.array_equal(A, A)) # E: bool
reveal_type(np.array_equiv(i8, A)) # E: bool
reveal_type(np.array_equiv(B, A)) # E: bool
reveal_type(np.array_equiv(A, A)) # E: bool
| 36.522222
| 110
| 0.684211
| 586
| 3,287
| 3.721843
| 0.124573
| 0.233838
| 0.280605
| 0.255846
| 0.849152
| 0.83127
| 0.775791
| 0.695553
| 0.602017
| 0.476845
| 0
| 0.013899
| 0.12443
| 3,287
| 89
| 111
| 36.932584
| 0.743919
| 0.374506
| 0
| 0
| 0
| 0
| 0.009014
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.033898
| 0
| 0.050847
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78fbcc6d7b0d43a6be364830259c3c4062b0297c
| 43
|
py
|
Python
|
python/testData/postfix/isNotNone/complexExpression.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/postfix/isNotNone/complexExpression.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/postfix/isNotNone/complexExpression.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def f(a, b, c):
(a + b) * c.ifnn<caret>
| 21.5
| 27
| 0.44186
| 10
| 43
| 1.9
| 0.7
| 0.210526
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.27907
| 43
| 2
| 27
| 21.5
| 0.612903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6020b5170ffea8a8471670156907d5e67daea7d0
| 13,054
|
py
|
Python
|
yolo4/models/yolo4_mobilenetv3_large.py
|
JsonSadler/keras-YOLOv3-model-set
|
378bee8853bc2c547f379f707098a39025a0ff83
|
[
"MIT"
] | 1
|
2020-05-06T04:04:19.000Z
|
2020-05-06T04:04:19.000Z
|
yolo4/models/yolo4_mobilenetv3_large.py
|
JsonSadler/keras-YOLOv3-model-set
|
378bee8853bc2c547f379f707098a39025a0ff83
|
[
"MIT"
] | null | null | null |
yolo4/models/yolo4_mobilenetv3_large.py
|
JsonSadler/keras-YOLOv3-model-set
|
378bee8853bc2c547f379f707098a39025a0ff83
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""YOLO_v4 MobileNetV3Large Model Defined in Keras."""
from tensorflow.keras.layers import ZeroPadding2D, UpSampling2D, Concatenate
from tensorflow.keras.models import Model
from common.backbones.mobilenet_v3 import MobileNetV3Large
from yolo4.models.layers import compose, DarknetConv2D, DarknetConv2D_BN_Leaky, Spp_Conv2D_BN_Leaky, Depthwise_Separable_Conv2D_BN_Leaky, Darknet_Depthwise_Separable_Conv2D_BN_Leaky, make_yolo_head, make_yolo_spp_head, make_yolo_depthwise_separable_head, make_yolo_spp_depthwise_separable_head
def yolo4_mobilenetv3large_body(inputs, num_anchors, num_classes, alpha=1.0):
"""Create YOLO_V4 MobileNetV3Large model CNN body in Keras."""
mobilenetv3large = MobileNetV3Large(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
# input: 416 x 416 x 3
# activation_38(layer 194, final feature map): 13 x 13 x (960*alpha)
# expanded_conv_14/Add(layer 191, end of block14): 13 x 13 x (160*alpha)
# activation_29(layer 146, middle in block12) : 26 x 26 x (672*alpha)
# expanded_conv_11/Add(layer 143, end of block11) : 26 x 26 x (112*alpha)
# activation_15(layer 79, middle in block6) : 52 x 52 x (240*alpha)
# expanded_conv_5/Add(layer 76, end of block5): 52 x 52 x (40*alpha)
# f1 :13 x 13 x (960*alpha)
# NOTE: activation layer name may different for TF1.x/2.x, so we
# use index to fetch layer
f1 = mobilenetv3large.layers[194].output
#feature map 1 head (13 x 13 x (480*alpha) for 416 input)
x1 = make_yolo_spp_head(f1, int(480*alpha))
#upsample fpn merge for feature map 1 & 2
x1_upsample = compose(
DarknetConv2D_BN_Leaky(int(336*alpha), (1,1)),
UpSampling2D(2))(x1)
f2 = mobilenetv3large.layers[146].output
# f2: 26 x 26 x (672*alpha) for 416 input
x2 = DarknetConv2D_BN_Leaky(int(336*alpha), (1,1))(f2)
x2 = Concatenate()([x2, x1_upsample])
#feature map 2 head (26 x 26 x (336*alpha) for 416 input)
x2 = make_yolo_head(x2, int(336*alpha))
#upsample fpn merge for feature map 2 & 3
x2_upsample = compose(
DarknetConv2D_BN_Leaky(int(120*alpha), (1,1)),
UpSampling2D(2))(x2)
f3 = mobilenetv3large.layers[79].output
# f3 : 52 x 52 x (240*alpha) for 416 input
x3 = DarknetConv2D_BN_Leaky(int(120*alpha), (1,1))(f3)
x3 = Concatenate()([x3, x2_upsample])
#feature map 3 head & output (52 x 52 x (240*alpha) for 416 input)
#x3, y3 = make_last_layers(x3, int(120*alpha), num_anchors*(num_classes+5))
x3 = make_yolo_head(x3, int(120*alpha))
y3 = compose(
DarknetConv2D_BN_Leaky(int(240*alpha), (3,3)),
DarknetConv2D(num_anchors*(num_classes+5), (1,1)))(x3)
#downsample fpn merge for feature map 3 & 2
x3_downsample = compose(
ZeroPadding2D(((1,0),(1,0))),
DarknetConv2D_BN_Leaky(int(336*alpha), (3,3), strides=(2,2)))(x3)
x2 = Concatenate()([x3_downsample, x2])
#feature map 2 output (26 x 26 x (672*alpha) for 416 input)
#x2, y2 = make_last_layers(x2, int(336*alpha), num_anchors*(num_classes+5))
x2 = make_yolo_head(x2, int(336*alpha))
y2 = compose(
DarknetConv2D_BN_Leaky(int(672*alpha), (3,3)),
DarknetConv2D(num_anchors*(num_classes+5), (1,1)))(x2)
#downsample fpn merge for feature map 2 & 1
x2_downsample = compose(
ZeroPadding2D(((1,0),(1,0))),
DarknetConv2D_BN_Leaky(int(480*alpha), (3,3), strides=(2,2)))(x2)
x1 = Concatenate()([x2_downsample, x1])
#feature map 1 output (13 x 13 x (960*alpha) for 416 input)
#x1, y1 = make_last_layers(x1, int(480*alpha), num_anchors*(num_classes+5))
x1 = make_yolo_head(x1, int(480*alpha))
y1 = compose(
DarknetConv2D_BN_Leaky(int(960*alpha), (3,3)),
DarknetConv2D(num_anchors*(num_classes+5), (1,1)))(x1)
return Model(inputs, [y1, y2, y3])
def yolo4lite_mobilenetv3large_body(inputs, num_anchors, num_classes, alpha=1.0):
'''Create YOLO_v4 Lite MobileNetV3Large model CNN body in keras.'''
mobilenetv3large = MobileNetV3Large(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
# input: 416 x 416 x 3
# activation_38(layer 194, final feature map): 13 x 13 x (960*alpha)
# expanded_conv_14/Add(layer 191, end of block14): 13 x 13 x (160*alpha)
# activation_29(layer 146, middle in block12) : 26 x 26 x (672*alpha)
# expanded_conv_11/Add(layer 143, end of block11) : 26 x 26 x (112*alpha)
# activation_15(layer 79, middle in block6) : 52 x 52 x (240*alpha)
# expanded_conv_5/Add(layer 76, end of block5): 52 x 52 x (40*alpha)
# f1 :13 x 13 x (960*alpha)
# NOTE: activation layer name may different for TF1.x/2.x, so we
# use index to fetch layer
f1 = mobilenetv3large.layers[194].output
#feature map 1 head (13 x 13 x (480*alpha) for 416 input)
x1 = make_yolo_spp_depthwise_separable_head(f1, int(480*alpha), block_id_str='15')
#upsample fpn merge for feature map 1 & 2
x1_upsample = compose(
DarknetConv2D_BN_Leaky(int(336*alpha), (1,1)),
UpSampling2D(2))(x1)
f2 = mobilenetv3large.layers[146].output
# f2: 26 x 26 x (672*alpha) for 416 input
x2 = DarknetConv2D_BN_Leaky(int(336*alpha), (1,1))(f2)
x2 = Concatenate()([x2, x1_upsample])
#feature map 2 head (26 x 26 x (336*alpha) for 416 input)
x2 = make_yolo_depthwise_separable_head(x2, int(336*alpha), block_id_str='16')
#upsample fpn merge for feature map 2 & 3
x2_upsample = compose(
DarknetConv2D_BN_Leaky(int(120*alpha), (1,1)),
UpSampling2D(2))(x2)
f3 = mobilenetv3large.layers[79].output
# f3 : 52 x 52 x (240*alpha) for 416 input
x3 = DarknetConv2D_BN_Leaky(int(120*alpha), (1,1))(f3)
x3 = Concatenate()([x3, x2_upsample])
#feature map 3 head & output (52 x 52 x (240*alpha) for 416 input)
#x3, y3 = make_depthwise_separable_last_layers(x3, int(120*alpha), num_anchors*(num_classes+5), block_id_str='17')
x3 = make_yolo_depthwise_separable_head(x3, int(120*alpha), block_id_str='17')
y3 = compose(
Depthwise_Separable_Conv2D_BN_Leaky(int(240*alpha), (3,3), block_id_str='17_3'),
DarknetConv2D(num_anchors*(num_classes+5), (1,1)))(x3)
#downsample fpn merge for feature map 3 & 2
x3_downsample = compose(
ZeroPadding2D(((1,0),(1,0))),
Darknet_Depthwise_Separable_Conv2D_BN_Leaky(int(336*alpha), (3,3), strides=(2,2), block_id_str='17_4'))(x3)
x2 = Concatenate()([x3_downsample, x2])
#feature map 2 output (26 x 26 x (672*alpha) for 416 input)
#x2, y2 = make_depthwise_separable_last_layers(x2, int(336*alpha), num_anchors*(num_classes+5), block_id_str='18')
x2 = make_yolo_depthwise_separable_head(x2, int(336*alpha), block_id_str='18')
y2 = compose(
Depthwise_Separable_Conv2D_BN_Leaky(int(672*alpha), (3,3), block_id_str='18_3'),
DarknetConv2D(num_anchors*(num_classes+5), (1,1)))(x2)
#downsample fpn merge for feature map 2 & 1
x2_downsample = compose(
ZeroPadding2D(((1,0),(1,0))),
Darknet_Depthwise_Separable_Conv2D_BN_Leaky(int(480*alpha), (3,3), strides=(2,2), block_id_str='18_4'))(x2)
x1 = Concatenate()([x2_downsample, x1])
#feature map 1 output (13 x 13 x (960*alpha) for 416 input)
#x1, y1 = make_depthwise_separable_last_layers(x1, int(480*alpha), num_anchors*(num_classes+5))
x1 = make_yolo_depthwise_separable_head(x1, int(480*alpha), block_id_str='19')
y1 = compose(
Depthwise_Separable_Conv2D_BN_Leaky(int(960*alpha), (3,3), block_id_str='19_3'),
DarknetConv2D(num_anchors*(num_classes+5), (1,1)))(x1)
return Model(inputs, [y1, y2, y3])
def tiny_yolo4_mobilenetv3large_body(inputs, num_anchors, num_classes, alpha=1.0, use_spp=True):
'''Create Tiny YOLO_v4 MobileNetV3Large model CNN body in keras.'''
mobilenetv3large = MobileNetV3Large(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
# input: 416 x 416 x 3
# activation_38(layer 194, final feature map): 13 x 13 x (960*alpha)
# expanded_conv_14/Add(layer 191, end of block14): 13 x 13 x (160*alpha)
# activation_29(layer 146, middle in block12) : 26 x 26 x (672*alpha)
# expanded_conv_11/Add(layer 143, end of block11) : 26 x 26 x (112*alpha)
# activation_15(layer 79, middle in block6) : 52 x 52 x (240*alpha)
# expanded_conv_5/Add(layer 76, end of block5): 52 x 52 x (40*alpha)
# f1 :13 x 13 x (960*alpha)
# NOTE: activation layer name may different for TF1.x/2.x, so we
# use index to fetch layer
f1 = mobilenetv3large.layers[194].output
# f2: 26 x 26 x (672*alpha) for 416 input
f2 = mobilenetv3large.layers[146].output
#feature map 1 head (13 x 13 x (480*alpha) for 416 input)
x1 = DarknetConv2D_BN_Leaky(int(480*alpha), (1,1))(f1)
if use_spp:
x1 = Spp_Conv2D_BN_Leaky(x1, int(480*alpha))
#upsample fpn merge for feature map 1 & 2
x1_upsample = compose(
DarknetConv2D_BN_Leaky(int(336*alpha), (1,1)),
UpSampling2D(2))(x1)
x2 = compose(
Concatenate(),
#Depthwise_Separable_Conv2D_BN_Leaky(filters=int(672*alpha), kernel_size=(3, 3), block_id_str='15'),
DarknetConv2D_BN_Leaky(int(672*alpha), (3,3)))([x1_upsample, f2])
#feature map 2 output (26 x 26 x (672*alpha) for 416 input)
y2 = DarknetConv2D(num_anchors*(num_classes+5), (1,1))(x2)
#downsample fpn merge for feature map 2 & 1
x2_downsample = compose(
ZeroPadding2D(((1,0),(1,0))),
#Darknet_Depthwise_Separable_Conv2D_BN_Leaky(int(480*alpha), (3,3), strides=(2,2), block_id_str='16'),
DarknetConv2D_BN_Leaky(int(480*alpha), (3,3), strides=(2,2)))(x2)
x1 = compose(
Concatenate(),
#Depthwise_Separable_Conv2D_BN_Leaky(filters=int(960*alpha), kernel_size=(3, 3), block_id_str='17'),
DarknetConv2D_BN_Leaky(int(960*alpha), (3,3)))([x2_downsample, x1])
#feature map 1 output (13 x 13 x (960*alpha) for 416 input)
y1 = DarknetConv2D(num_anchors*(num_classes+5), (1,1))(x1)
return Model(inputs, [y1,y2])
def tiny_yolo4lite_mobilenetv3large_body(inputs, num_anchors, num_classes, alpha=1.0, use_spp=True):
'''Create Tiny YOLO_v4 Lite MobileNetV3Large model CNN body in keras.'''
mobilenetv3large = MobileNetV3Large(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
# input: 416 x 416 x 3
# activation_38(layer 194, final feature map): 13 x 13 x (960*alpha)
# expanded_conv_14/Add(layer 191, end of block14): 13 x 13 x (160*alpha)
# activation_29(layer 146, middle in block12) : 26 x 26 x (672*alpha)
# expanded_conv_11/Add(layer 143, end of block11) : 26 x 26 x (112*alpha)
# activation_15(layer 79, middle in block6) : 52 x 52 x (240*alpha)
# expanded_conv_5/Add(layer 76, end of block5): 52 x 52 x (40*alpha)
# f1 :13 x 13 x (960*alpha)
# NOTE: activation layer name may different for TF1.x/2.x, so we
# use index to fetch layer
f1 = mobilenetv3large.layers[194].output
# f2: 26 x 26 x (672*alpha) for 416 input
f2 = mobilenetv3large.layers[146].output
#feature map 1 head (13 x 13 x (480*alpha) for 416 input)
x1 = DarknetConv2D_BN_Leaky(int(480*alpha), (1,1))(f1)
if use_spp:
x1 = Spp_Conv2D_BN_Leaky(x1, int(480*alpha))
#upsample fpn merge for feature map 1 & 2
x1_upsample = compose(
DarknetConv2D_BN_Leaky(int(336*alpha), (1,1)),
UpSampling2D(2))(x1)
x2 = compose(
Concatenate(),
#DarknetConv2D_BN_Leaky(int(672*alpha), (3,3)),
Depthwise_Separable_Conv2D_BN_Leaky(filters=int(672*alpha), kernel_size=(3, 3), block_id_str='15'))([x1_upsample, f2])
#feature map 2 output (26 x 26 x (672*alpha) for 416 input)
y2 = DarknetConv2D(num_anchors*(num_classes+5), (1,1))(x2)
#downsample fpn merge for feature map 2 & 1
x2_downsample = compose(
ZeroPadding2D(((1,0),(1,0))),
#DarknetConv2D_BN_Leaky(int(480*alpha), (3,3), strides=(2,2)),
Darknet_Depthwise_Separable_Conv2D_BN_Leaky(int(480*alpha), (3,3), strides=(2,2), block_id_str='16'))(x2)
x1 = compose(
Concatenate(),
#DarknetConv2D_BN_Leaky(int(960*alpha), (3,3)),
Depthwise_Separable_Conv2D_BN_Leaky(filters=int(960*alpha), kernel_size=(3, 3), block_id_str='17'))([x2_downsample, x1])
#feature map 1 output (13 x 13 x (960*alpha) for 416 input)
y1 = DarknetConv2D(num_anchors*(num_classes+5), (1,1))(x1)
return Model(inputs, [y1,y2])
| 45.803509
| 294
| 0.65275
| 2,009
| 13,054
| 4.0667
| 0.072673
| 0.034272
| 0.03672
| 0.064749
| 0.950061
| 0.918482
| 0.895838
| 0.87552
| 0.849327
| 0.843452
| 0
| 0.125674
| 0.219167
| 13,054
| 284
| 295
| 45.964789
| 0.675856
| 0.396966
| 0
| 0.758065
| 0
| 0
| 0.009098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.032258
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60eabf1457c87bd05d11022a57128fa8bed01c0a
| 30,290
|
py
|
Python
|
tests/unit/test_policies.py
|
vijay-khanna/cassandra-python-driver
|
efc854ebe8d718cfea59e124eb4778ff18e4bf3c
|
[
"Apache-2.0"
] | 12
|
2016-02-05T21:14:21.000Z
|
2022-03-06T11:22:51.000Z
|
tests/unit/test_policies.py
|
vijay-khanna/cassandra-python-driver
|
efc854ebe8d718cfea59e124eb4778ff18e4bf3c
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_policies.py
|
vijay-khanna/cassandra-python-driver
|
efc854ebe8d718cfea59e124eb4778ff18e4bf3c
|
[
"Apache-2.0"
] | 19
|
2016-05-03T18:07:07.000Z
|
2020-09-09T15:38:14.000Z
|
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from itertools import islice, cycle
from mock import Mock
import struct
from threading import Thread
from cassandra import ConsistencyLevel
from cassandra.cluster import Cluster
from cassandra.metadata import Metadata
from cassandra.policies import (RoundRobinPolicy, DCAwareRoundRobinPolicy,
TokenAwarePolicy, SimpleConvictionPolicy,
HostDistance, ExponentialReconnectionPolicy,
RetryPolicy, WriteType,
DowngradingConsistencyRetryPolicy, ConstantReconnectionPolicy,
LoadBalancingPolicy, ConvictionPolicy, ReconnectionPolicy, FallthroughRetryPolicy)
from cassandra.pool import Host
from cassandra.query import Statement
class TestLoadBalancingPolicy(unittest.TestCase):
def test_non_implemented(self):
"""
Code coverage for interface-style base class
"""
policy = LoadBalancingPolicy()
host = Host("ip1", SimpleConvictionPolicy)
host.set_location_info("dc1", "rack1")
self.assertRaises(NotImplementedError, policy.distance, host)
self.assertRaises(NotImplementedError, policy.populate, None, host)
self.assertRaises(NotImplementedError, policy.make_query_plan)
self.assertRaises(NotImplementedError, policy.on_up, host)
self.assertRaises(NotImplementedError, policy.on_down, host)
self.assertRaises(NotImplementedError, policy.on_add, host)
self.assertRaises(NotImplementedError, policy.on_remove, host)
class TestRoundRobinPolicy(unittest.TestCase):
def test_basic(self):
hosts = [0, 1, 2, 3]
policy = RoundRobinPolicy()
policy.populate(None, hosts)
qplan = list(policy.make_query_plan())
self.assertEqual(sorted(qplan), hosts)
def test_multiple_query_plans(self):
hosts = [0, 1, 2, 3]
policy = RoundRobinPolicy()
policy.populate(None, hosts)
for i in xrange(20):
qplan = list(policy.make_query_plan())
self.assertEqual(sorted(qplan), hosts)
def test_single_host(self):
policy = RoundRobinPolicy()
policy.populate(None, [0])
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [0])
def test_status_updates(self):
hosts = [0, 1, 2, 3]
policy = RoundRobinPolicy()
policy.populate(None, hosts)
policy.on_down(0)
policy.on_remove(1)
policy.on_up(4)
policy.on_add(5)
qplan = list(policy.make_query_plan())
self.assertEqual(sorted(qplan), [2, 3, 4, 5])
def test_thread_safety(self):
hosts = range(100)
policy = RoundRobinPolicy()
policy.populate(None, hosts)
def check_query_plan():
for i in range(100):
qplan = list(policy.make_query_plan())
self.assertEqual(sorted(qplan), hosts)
threads = [Thread(target=check_query_plan) for i in range(4)]
map(lambda t: t.start(), threads)
map(lambda t: t.join(), threads)
def test_no_live_nodes(self):
"""
Ensure query plan for a downed cluster will execute without errors
"""
hosts = [0, 1, 2, 3]
policy = RoundRobinPolicy()
policy.populate(None, hosts)
for i in range(4):
policy.on_down(i)
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [])
class DCAwareRoundRobinPolicyTest(unittest.TestCase):
def test_no_remote(self):
hosts = []
for i in range(4):
h = Host(i, SimpleConvictionPolicy)
h.set_location_info("dc1", "rack1")
hosts.append(h)
policy = DCAwareRoundRobinPolicy("dc1")
policy.populate(None, hosts)
qplan = list(policy.make_query_plan())
self.assertEqual(sorted(qplan), sorted(hosts))
def test_with_remotes(self):
hosts = [Host(i, SimpleConvictionPolicy) for i in range(4)]
for h in hosts[:2]:
h.set_location_info("dc1", "rack1")
for h in hosts[2:]:
h.set_location_info("dc2", "rack1")
local_hosts = set(h for h in hosts if h.datacenter == "dc1")
remote_hosts = set(h for h in hosts if h.datacenter != "dc1")
# allow all of the remote hosts to be used
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=2)
policy.populate(None, hosts)
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan[:2]), local_hosts)
self.assertEqual(set(qplan[2:]), remote_hosts)
# allow only one of the remote hosts to be used
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1)
policy.populate(None, hosts)
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan[:2]), local_hosts)
used_remotes = set(qplan[2:])
self.assertEqual(1, len(used_remotes))
self.assertIn(qplan[2], remote_hosts)
# allow no remote hosts to be used
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=0)
policy.populate(None, hosts)
qplan = list(policy.make_query_plan())
self.assertEqual(2, len(qplan))
self.assertEqual(local_hosts, set(qplan))
def test_get_distance(self):
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=0)
host = Host("ip1", SimpleConvictionPolicy)
host.set_location_info("dc1", "rack1")
policy.populate(None, [host])
self.assertEqual(policy.distance(host), HostDistance.LOCAL)
# used_hosts_per_remote_dc is set to 0, so ignore it
remote_host = Host("ip2", SimpleConvictionPolicy)
remote_host.set_location_info("dc2", "rack1")
self.assertEqual(policy.distance(remote_host), HostDistance.IGNORED)
# dc2 isn't registered in the policy's live_hosts dict
policy.used_hosts_per_remote_dc = 1
self.assertEqual(policy.distance(remote_host), HostDistance.IGNORED)
# make sure the policy has both dcs registered
policy.populate(None, [host, remote_host])
self.assertEqual(policy.distance(remote_host), HostDistance.REMOTE)
# since used_hosts_per_remote_dc is set to 1, only the first
# remote host in dc2 will be REMOTE, the rest are IGNORED
second_remote_host = Host("ip3", SimpleConvictionPolicy)
second_remote_host.set_location_info("dc2", "rack1")
policy.populate(None, [host, remote_host, second_remote_host])
distances = set([policy.distance(remote_host), policy.distance(second_remote_host)])
self.assertEqual(distances, set([HostDistance.REMOTE, HostDistance.IGNORED]))
def test_status_updates(self):
hosts = [Host(i, SimpleConvictionPolicy) for i in range(4)]
for h in hosts[:2]:
h.set_location_info("dc1", "rack1")
for h in hosts[2:]:
h.set_location_info("dc2", "rack1")
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1)
policy.populate(None, hosts)
policy.on_down(hosts[0])
policy.on_remove(hosts[2])
new_local_host = Host(4, SimpleConvictionPolicy)
new_local_host.set_location_info("dc1", "rack1")
policy.on_up(new_local_host)
new_remote_host = Host(5, SimpleConvictionPolicy)
new_remote_host.set_location_info("dc9000", "rack1")
policy.on_add(new_remote_host)
# we now have two local hosts and two remote hosts in separate dcs
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan[:2]), set([hosts[1], new_local_host]))
self.assertEqual(set(qplan[2:]), set([hosts[3], new_remote_host]))
# since we have hosts in dc9000, the distance shouldn't be IGNORED
self.assertEqual(policy.distance(new_remote_host), HostDistance.REMOTE)
policy.on_down(new_local_host)
policy.on_down(hosts[1])
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan), set([hosts[3], new_remote_host]))
policy.on_down(new_remote_host)
policy.on_down(hosts[3])
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [])
def test_no_live_nodes(self):
"""
Ensure query plan for a downed cluster will execute without errors
"""
hosts = []
for i in range(4):
h = Host(i, SimpleConvictionPolicy)
h.set_location_info("dc1", "rack1")
hosts.append(h)
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1)
policy.populate(None, hosts)
for host in hosts:
policy.on_down(host)
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [])
def test_no_nodes(self):
"""
Ensure query plan for an empty cluster will execute without errors
"""
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1)
policy.populate(None, [])
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [])
class TokenAwarePolicyTest(unittest.TestCase):
def test_wrap_round_robin(self):
cluster = Mock(spec=Cluster)
cluster.metadata = Mock(spec=Metadata)
hosts = [Host(str(i), SimpleConvictionPolicy) for i in range(4)]
def get_replicas(keyspace, packed_key):
index = struct.unpack('>i', packed_key)[0]
return list(islice(cycle(hosts), index, index + 2))
cluster.metadata.get_replicas.side_effect = get_replicas
policy = TokenAwarePolicy(RoundRobinPolicy())
policy.populate(cluster, hosts)
for i in range(4):
query = Statement(routing_key=struct.pack('>i', i))
qplan = list(policy.make_query_plan(None, query))
replicas = get_replicas(None, struct.pack('>i', i))
other = set(h for h in hosts if h not in replicas)
self.assertEquals(replicas, qplan[:2])
self.assertEquals(other, set(qplan[2:]))
# Should use the secondary policy
for i in range(4):
qplan = list(policy.make_query_plan())
self.assertEquals(set(qplan), set(hosts))
def test_wrap_dc_aware(self):
cluster = Mock(spec=Cluster)
cluster.metadata = Mock(spec=Metadata)
hosts = [Host(str(i), SimpleConvictionPolicy) for i in range(4)]
for h in hosts[:2]:
h.set_location_info("dc1", "rack1")
for h in hosts[2:]:
h.set_location_info("dc2", "rack1")
def get_replicas(keyspace, packed_key):
index = struct.unpack('>i', packed_key)[0]
# return one node from each DC
if index % 2 == 0:
return [hosts[0], hosts[2]]
else:
return [hosts[1], hosts[3]]
cluster.metadata.get_replicas.side_effect = get_replicas
policy = TokenAwarePolicy(DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1))
policy.populate(cluster, hosts)
for i in range(4):
query = Statement(routing_key=struct.pack('>i', i))
qplan = list(policy.make_query_plan(None, query))
replicas = get_replicas(None, struct.pack('>i', i))
# first should be the only local replica
self.assertIn(qplan[0], replicas)
self.assertEquals(qplan[0].datacenter, "dc1")
# then the local non-replica
self.assertNotIn(qplan[1], replicas)
self.assertEquals(qplan[1].datacenter, "dc1")
# then one of the remotes (used_hosts_per_remote_dc is 1, so we
# shouldn't see two remotes)
self.assertEquals(qplan[2].datacenter, "dc2")
self.assertEquals(3, len(qplan))
class FakeCluster:
def __init__(self):
self.metadata = None
def test_get_distance(self):
"""
Same test as DCAwareRoundRobinPolicyTest.test_get_distance()
Except a FakeCluster is needed for the metadata variable and
policy.child_policy is needed to change child policy settings
"""
policy = TokenAwarePolicy(DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=0))
host = Host("ip1", SimpleConvictionPolicy)
host.set_location_info("dc1", "rack1")
policy.populate(self.FakeCluster(), [host])
self.assertEqual(policy.distance(host), HostDistance.LOCAL)
# used_hosts_per_remote_dc is set to 0, so ignore it
remote_host = Host("ip2", SimpleConvictionPolicy)
remote_host.set_location_info("dc2", "rack1")
self.assertEqual(policy.distance(remote_host), HostDistance.IGNORED)
# dc2 isn't registered in the policy's live_hosts dict
policy.child_policy.used_hosts_per_remote_dc = 1
self.assertEqual(policy.distance(remote_host), HostDistance.IGNORED)
# make sure the policy has both dcs registered
policy.populate(self.FakeCluster(), [host, remote_host])
self.assertEqual(policy.distance(remote_host), HostDistance.REMOTE)
# since used_hosts_per_remote_dc is set to 1, only the first
# remote host in dc2 will be REMOTE, the rest are IGNORED
second_remote_host = Host("ip3", SimpleConvictionPolicy)
second_remote_host.set_location_info("dc2", "rack1")
policy.populate(self.FakeCluster(), [host, remote_host, second_remote_host])
distances = set([policy.distance(remote_host), policy.distance(second_remote_host)])
self.assertEqual(distances, set([HostDistance.REMOTE, HostDistance.IGNORED]))
def test_status_updates(self):
"""
Same test as DCAwareRoundRobinPolicyTest.test_status_updates()
"""
hosts = [Host(i, SimpleConvictionPolicy) for i in range(4)]
for h in hosts[:2]:
h.set_location_info("dc1", "rack1")
for h in hosts[2:]:
h.set_location_info("dc2", "rack1")
policy = TokenAwarePolicy(DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1))
policy.populate(self.FakeCluster(), hosts)
policy.on_down(hosts[0])
policy.on_remove(hosts[2])
new_local_host = Host(4, SimpleConvictionPolicy)
new_local_host.set_location_info("dc1", "rack1")
policy.on_up(new_local_host)
new_remote_host = Host(5, SimpleConvictionPolicy)
new_remote_host.set_location_info("dc9000", "rack1")
policy.on_add(new_remote_host)
# we now have two local hosts and two remote hosts in separate dcs
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan[:2]), set([hosts[1], new_local_host]))
self.assertEqual(set(qplan[2:]), set([hosts[3], new_remote_host]))
# since we have hosts in dc9000, the distance shouldn't be IGNORED
self.assertEqual(policy.distance(new_remote_host), HostDistance.REMOTE)
policy.on_down(new_local_host)
policy.on_down(hosts[1])
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan), set([hosts[3], new_remote_host]))
policy.on_down(new_remote_host)
policy.on_down(hosts[3])
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [])
class ConvictionPolicyTest(unittest.TestCase):
def test_not_implemented(self):
"""
Code coverage for interface-style base class
"""
conviction_policy = ConvictionPolicy(1)
self.assertRaises(NotImplementedError, conviction_policy.add_failure, 1)
self.assertRaises(NotImplementedError, conviction_policy.reset)
class SimpleConvictionPolicyTest(unittest.TestCase):
def test_basic_responses(self):
"""
Code coverage for SimpleConvictionPolicy
"""
conviction_policy = SimpleConvictionPolicy(1)
self.assertEqual(conviction_policy.add_failure(1), True)
self.assertEqual(conviction_policy.reset(), None)
class ReconnectionPolicyTest(unittest.TestCase):
def test_basic_responses(self):
"""
Code coverage for interface-style base class
"""
policy = ReconnectionPolicy()
self.assertRaises(NotImplementedError, policy.new_schedule)
class ConstantReconnectionPolicyTest(unittest.TestCase):
def test_bad_vals(self):
"""
Test initialization values
"""
self.assertRaises(ValueError, ConstantReconnectionPolicy, -1, 0)
def test_schedule(self):
"""
Test ConstantReconnectionPolicy schedule
"""
delay = 2
max_attempts = 100
policy = ConstantReconnectionPolicy(delay=delay, max_attempts=max_attempts)
schedule = list(policy.new_schedule())
self.assertEqual(len(schedule), max_attempts)
for i, delay in enumerate(schedule):
self.assertEqual(delay, delay)
def test_schedule_negative_max_attempts(self):
"""
Test how negative max_attempts are handled
"""
delay = 2
max_attempts = -100
try:
policy = ConstantReconnectionPolicy(delay=delay, max_attempts=max_attempts)
self.fail('max_attempts should throw ValueError when negative')
except ValueError:
pass
class ExponentialReconnectionPolicyTest(unittest.TestCase):
def test_bad_vals(self):
self.assertRaises(ValueError, ExponentialReconnectionPolicy, -1, 0)
self.assertRaises(ValueError, ExponentialReconnectionPolicy, 0, -1)
self.assertRaises(ValueError, ExponentialReconnectionPolicy, 9000, 1)
def test_schedule(self):
policy = ExponentialReconnectionPolicy(base_delay=2, max_delay=100)
schedule = list(policy.new_schedule())
self.assertEqual(len(schedule), 64)
for i, delay in enumerate(schedule):
if i == 0:
self.assertEqual(delay, 2)
elif i < 6:
self.assertEqual(delay, schedule[i - 1] * 2)
else:
self.assertEqual(delay, 100)
class RetryPolicyTest(unittest.TestCase):
def test_read_timeout(self):
policy = RetryPolicy()
# if this is the second or greater attempt, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=1, received_responses=2,
data_retrieved=True, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# if we didn't get enough responses, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=1,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# if we got enough responses, but also got a data response, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=2,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# we got enough responses but no data response, so retry
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=2,
data_retrieved=False, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, 'ONE')
def test_write_timeout(self):
policy = RetryPolicy()
# if this is the second or greater attempt, rethrow
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.SIMPLE,
required_responses=1, received_responses=2, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# if it's not a BATCH_LOG write, don't retry it
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.SIMPLE,
required_responses=1, received_responses=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# retry BATCH_LOG writes regardless of received responses
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.BATCH_LOG,
required_responses=10000, received_responses=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, 'ONE')
def test_unavailable(self):
"""
Use the same tests for test_write_timeout, but ensure they only RETHROW
"""
policy = RetryPolicy()
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=1, alive_replicas=2, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=1, alive_replicas=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=10000, alive_replicas=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
class FallthroughRetryPolicyTest(unittest.TestCase):
"""
Use the same tests for test_write_timeout, but ensure they only RETHROW
"""
def test_read_timeout(self):
policy = FallthroughRetryPolicy()
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=1, received_responses=2,
data_retrieved=True, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=1,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=2,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=2,
data_retrieved=False, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
def test_write_timeout(self):
policy = FallthroughRetryPolicy()
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.SIMPLE,
required_responses=1, received_responses=2, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.SIMPLE,
required_responses=1, received_responses=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.BATCH_LOG,
required_responses=10000, received_responses=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
def test_unavailable(self):
policy = FallthroughRetryPolicy()
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=1, alive_replicas=2, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=1, alive_replicas=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=10000, alive_replicas=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
class DowngradingConsistencyRetryPolicyTest(unittest.TestCase):
def test_read_timeout(self):
policy = DowngradingConsistencyRetryPolicy()
# if this is the second or greater attempt, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=1, received_responses=2,
data_retrieved=True, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# if we didn't get enough responses, retry at a lower consistency
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=4, received_responses=3,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, ConsistencyLevel.THREE)
# if we didn't get enough responses, retry at a lower consistency
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=3, received_responses=2,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, ConsistencyLevel.TWO)
# retry consistency level goes down based on the # of recv'd responses
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=3, received_responses=1,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, ConsistencyLevel.ONE)
# if we got no responses, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=3, received_responses=0,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# if we got enough response but no data, retry
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=3, received_responses=3,
data_retrieved=False, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, 'ONE')
# if we got enough responses, but also got a data response, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=2,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
def test_write_timeout(self):
policy = DowngradingConsistencyRetryPolicy()
# if this is the second or greater attempt, rethrow
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.SIMPLE,
required_responses=1, received_responses=2, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# ignore failures on these types of writes
for write_type in (WriteType.SIMPLE, WriteType.BATCH, WriteType.COUNTER):
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=write_type,
required_responses=1, received_responses=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.IGNORE)
# downgrade consistency level on unlogged batch writes
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.UNLOGGED_BATCH,
required_responses=3, received_responses=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, ConsistencyLevel.ONE)
# retry batch log writes at the same consistency level
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.BATCH_LOG,
required_responses=3, received_responses=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, "ONE")
# timeout on an unknown write_type
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=None,
required_responses=1, received_responses=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
def test_unavailable(self):
policy = DowngradingConsistencyRetryPolicy()
# if this is the second or greater attempt, rethrow
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE", required_replicas=3, alive_replicas=1, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# downgrade consistency on unavailable exceptions
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE", required_replicas=3, alive_replicas=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, ConsistencyLevel.ONE)
| 39.802891
| 114
| 0.661803
| 3,483
| 30,290
| 5.585128
| 0.08441
| 0.08482
| 0.038452
| 0.041947
| 0.811083
| 0.784558
| 0.75649
| 0.745438
| 0.732021
| 0.71516
| 0
| 0.015504
| 0.241928
| 30,290
| 760
| 115
| 39.855263
| 0.831678
| 0.102047
| 0
| 0.711501
| 0
| 0
| 0.015621
| 0
| 0
| 0
| 0
| 0
| 0.261209
| 1
| 0.074074
| false
| 0.001949
| 0.025341
| 0
| 0.130604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
880e1672c218c5df99a823f315224fc545c4cdff
| 133,037
|
py
|
Python
|
sdk/python/pulumi_gcp/monitoring/outputs.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/monitoring/outputs.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/monitoring/outputs.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'AlertPolicyCondition',
'AlertPolicyConditionConditionAbsent',
'AlertPolicyConditionConditionAbsentAggregation',
'AlertPolicyConditionConditionAbsentTrigger',
'AlertPolicyConditionConditionThreshold',
'AlertPolicyConditionConditionThresholdAggregation',
'AlertPolicyConditionConditionThresholdDenominatorAggregation',
'AlertPolicyConditionConditionThresholdTrigger',
'AlertPolicyCreationRecord',
'AlertPolicyDocumentation',
'CustomServiceTelemetry',
'MetricDescriptorLabel',
'MetricDescriptorMetadata',
'NotificationChannelSensitiveLabels',
'SloBasicSli',
'SloBasicSliLatency',
'SloRequestBasedSli',
'SloRequestBasedSliDistributionCut',
'SloRequestBasedSliDistributionCutRange',
'SloRequestBasedSliGoodTotalRatio',
'SloWindowsBasedSli',
'SloWindowsBasedSliGoodTotalRatioThreshold',
'SloWindowsBasedSliGoodTotalRatioThresholdBasicSliPerformance',
'SloWindowsBasedSliGoodTotalRatioThresholdBasicSliPerformanceLatency',
'SloWindowsBasedSliGoodTotalRatioThresholdPerformance',
'SloWindowsBasedSliGoodTotalRatioThresholdPerformanceDistributionCut',
'SloWindowsBasedSliGoodTotalRatioThresholdPerformanceDistributionCutRange',
'SloWindowsBasedSliGoodTotalRatioThresholdPerformanceGoodTotalRatio',
'SloWindowsBasedSliMetricMeanInRange',
'SloWindowsBasedSliMetricMeanInRangeRange',
'SloWindowsBasedSliMetricSumInRange',
'SloWindowsBasedSliMetricSumInRangeRange',
'UptimeCheckConfigContentMatcher',
'UptimeCheckConfigHttpCheck',
'UptimeCheckConfigHttpCheckAuthInfo',
'UptimeCheckConfigMonitoredResource',
'UptimeCheckConfigResourceGroup',
'UptimeCheckConfigTcpCheck',
'GetAppEngineServiceTelemetryResult',
'GetNotificationChannelSensitiveLabelResult',
'GetUptimeCheckIPsUptimeCheckIpResult',
]
@pulumi.output_type
class AlertPolicyCondition(dict):
def __init__(__self__, *,
display_name: str,
condition_absent: Optional['outputs.AlertPolicyConditionConditionAbsent'] = None,
condition_threshold: Optional['outputs.AlertPolicyConditionConditionThreshold'] = None,
name: Optional[str] = None):
"""
:param str display_name: A short name or phrase used to identify the
condition in dashboards, notifications, and
incidents. To avoid confusion, don't use the same
display name for multiple conditions in the same
policy.
:param 'AlertPolicyConditionConditionAbsentArgs' condition_absent: A condition that checks that a time series
continues to receive new data points.
Structure is documented below.
:param 'AlertPolicyConditionConditionThresholdArgs' condition_threshold: A condition that compares a time series against a
threshold.
Structure is documented below.
:param str name: -
The unique resource name for this condition.
Its syntax is:
projects/[PROJECT_ID]/alertPolicies/[POLICY_ID]/conditions/[CONDITION_ID]
[CONDITION_ID] is assigned by Stackdriver Monitoring when
the condition is created as part of a new or updated alerting
policy.
"""
pulumi.set(__self__, "display_name", display_name)
if condition_absent is not None:
pulumi.set(__self__, "condition_absent", condition_absent)
if condition_threshold is not None:
pulumi.set(__self__, "condition_threshold", condition_threshold)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
A short name or phrase used to identify the
condition in dashboards, notifications, and
incidents. To avoid confusion, don't use the same
display name for multiple conditions in the same
policy.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="conditionAbsent")
def condition_absent(self) -> Optional['outputs.AlertPolicyConditionConditionAbsent']:
"""
A condition that checks that a time series
continues to receive new data points.
Structure is documented below.
"""
return pulumi.get(self, "condition_absent")
@property
@pulumi.getter(name="conditionThreshold")
def condition_threshold(self) -> Optional['outputs.AlertPolicyConditionConditionThreshold']:
"""
A condition that compares a time series against a
threshold.
Structure is documented below.
"""
return pulumi.get(self, "condition_threshold")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
-
The unique resource name for this condition.
Its syntax is:
projects/[PROJECT_ID]/alertPolicies/[POLICY_ID]/conditions/[CONDITION_ID]
[CONDITION_ID] is assigned by Stackdriver Monitoring when
the condition is created as part of a new or updated alerting
policy.
"""
return pulumi.get(self, "name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertPolicyConditionConditionAbsent(dict):
def __init__(__self__, *,
duration: str,
aggregations: Optional[List['outputs.AlertPolicyConditionConditionAbsentAggregation']] = None,
filter: Optional[str] = None,
trigger: Optional['outputs.AlertPolicyConditionConditionAbsentTrigger'] = None):
"""
:param str duration: The amount of time that a time series must
violate the threshold to be considered
failing. Currently, only values that are a
multiple of a minute--e.g., 0, 60, 120, or
300 seconds--are supported. If an invalid
value is given, an error will be returned.
When choosing a duration, it is useful to
keep in mind the frequency of the underlying
time series data (which may also be affected
by any alignments specified in the
aggregations field); a good duration is long
enough so that a single outlier does not
generate spurious alerts, but short enough
that unhealthy states are detected and
alerted on quickly.
:param List['AlertPolicyConditionConditionAbsentAggregationArgs'] aggregations: Specifies the alignment of data points in
individual time series as well as how to
combine the retrieved time series together
(such as when aggregating multiple streams
on each resource to a single stream for each
resource or when aggregating streams across
all members of a group of resources).
Multiple aggregations are applied in the
order specified.This field is similar to the
one in the MetricService.ListTimeSeries
request. It is advisable to use the
ListTimeSeries method when debugging this
field.
Structure is documented below.
:param str filter: A filter that identifies which time series
should be compared with the threshold.The
filter is similar to the one that is
specified in the
MetricService.ListTimeSeries request (that
call is useful to verify the time series
that will be retrieved / processed) and must
specify the metric type and optionally may
contain restrictions on resource type,
resource labels, and metric labels. This
field may not exceed 2048 Unicode characters
in length.
:param 'AlertPolicyConditionConditionAbsentTriggerArgs' trigger: The number/percent of time series for which
the comparison must hold in order for the
condition to trigger. If unspecified, then
the condition will trigger if the comparison
is true for any of the time series that have
been identified by filter and aggregations,
or by the ratio, if denominator_filter and
denominator_aggregations are specified.
Structure is documented below.
"""
pulumi.set(__self__, "duration", duration)
if aggregations is not None:
pulumi.set(__self__, "aggregations", aggregations)
if filter is not None:
pulumi.set(__self__, "filter", filter)
if trigger is not None:
pulumi.set(__self__, "trigger", trigger)
@property
@pulumi.getter
def duration(self) -> str:
"""
The amount of time that a time series must
violate the threshold to be considered
failing. Currently, only values that are a
multiple of a minute--e.g., 0, 60, 120, or
300 seconds--are supported. If an invalid
value is given, an error will be returned.
When choosing a duration, it is useful to
keep in mind the frequency of the underlying
time series data (which may also be affected
by any alignments specified in the
aggregations field); a good duration is long
enough so that a single outlier does not
generate spurious alerts, but short enough
that unhealthy states are detected and
alerted on quickly.
"""
return pulumi.get(self, "duration")
@property
@pulumi.getter
def aggregations(self) -> Optional[List['outputs.AlertPolicyConditionConditionAbsentAggregation']]:
"""
Specifies the alignment of data points in
individual time series as well as how to
combine the retrieved time series together
(such as when aggregating multiple streams
on each resource to a single stream for each
resource or when aggregating streams across
all members of a group of resources).
Multiple aggregations are applied in the
order specified.This field is similar to the
one in the MetricService.ListTimeSeries
request. It is advisable to use the
ListTimeSeries method when debugging this
field.
Structure is documented below.
"""
return pulumi.get(self, "aggregations")
@property
@pulumi.getter
def filter(self) -> Optional[str]:
"""
A filter that identifies which time series
should be compared with the threshold.The
filter is similar to the one that is
specified in the
MetricService.ListTimeSeries request (that
call is useful to verify the time series
that will be retrieved / processed) and must
specify the metric type and optionally may
contain restrictions on resource type,
resource labels, and metric labels. This
field may not exceed 2048 Unicode characters
in length.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter
def trigger(self) -> Optional['outputs.AlertPolicyConditionConditionAbsentTrigger']:
"""
The number/percent of time series for which
the comparison must hold in order for the
condition to trigger. If unspecified, then
the condition will trigger if the comparison
is true for any of the time series that have
been identified by filter and aggregations,
or by the ratio, if denominator_filter and
denominator_aggregations are specified.
Structure is documented below.
"""
return pulumi.get(self, "trigger")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertPolicyConditionConditionAbsentAggregation(dict):
def __init__(__self__, *,
alignment_period: Optional[str] = None,
cross_series_reducer: Optional[str] = None,
group_by_fields: Optional[List[str]] = None,
per_series_aligner: Optional[str] = None):
"""
:param str alignment_period: The alignment period for per-time
series alignment. If present,
alignmentPeriod must be at least
60 seconds. After per-time series
alignment, each time series will
contain data points only on the
period boundaries. If
perSeriesAligner is not specified
or equals ALIGN_NONE, then this
field is ignored. If
perSeriesAligner is specified and
does not equal ALIGN_NONE, then
this field must be defined;
otherwise an error is returned.
:param str cross_series_reducer: The approach to be used to combine
time series. Not all reducer
functions may be applied to all
time series, depending on the
metric type and the value type of
the original time series.
Reduction may change the metric
type of value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `REDUCE_NONE`, `REDUCE_MEAN`, `REDUCE_MIN`, `REDUCE_MAX`, `REDUCE_SUM`, `REDUCE_STDDEV`, `REDUCE_COUNT`, `REDUCE_COUNT_TRUE`, `REDUCE_COUNT_FALSE`, `REDUCE_FRACTION_TRUE`, `REDUCE_PERCENTILE_99`, `REDUCE_PERCENTILE_95`, `REDUCE_PERCENTILE_50`, and `REDUCE_PERCENTILE_05`.
:param List[str] group_by_fields: The set of fields to preserve when
crossSeriesReducer is specified.
The groupByFields determine how
the time series are partitioned
into subsets prior to applying the
aggregation function. Each subset
contains time series that have the
same value for each of the
grouping fields. Each individual
time series is a member of exactly
one subset. The crossSeriesReducer
is applied to each subset of time
series. It is not possible to
reduce across different resource
types, so this field implicitly
contains resource.type. Fields not
specified in groupByFields are
aggregated away. If groupByFields
is not specified and all the time
series have the same resource
type, then the time series are
aggregated into a single output
time series. If crossSeriesReducer
is not defined, this field is
ignored.
:param str per_series_aligner: The approach to be used to align
individual time series. Not all
alignment functions may be applied
to all time series, depending on
the metric type and value type of
the original time series.
Alignment may change the metric
type or the value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `ALIGN_NONE`, `ALIGN_DELTA`, `ALIGN_RATE`, `ALIGN_INTERPOLATE`, `ALIGN_NEXT_OLDER`, `ALIGN_MIN`, `ALIGN_MAX`, `ALIGN_MEAN`, `ALIGN_COUNT`, `ALIGN_SUM`, `ALIGN_STDDEV`, `ALIGN_COUNT_TRUE`, `ALIGN_COUNT_FALSE`, `ALIGN_FRACTION_TRUE`, `ALIGN_PERCENTILE_99`, `ALIGN_PERCENTILE_95`, `ALIGN_PERCENTILE_50`, `ALIGN_PERCENTILE_05`, and `ALIGN_PERCENT_CHANGE`.
"""
if alignment_period is not None:
pulumi.set(__self__, "alignment_period", alignment_period)
if cross_series_reducer is not None:
pulumi.set(__self__, "cross_series_reducer", cross_series_reducer)
if group_by_fields is not None:
pulumi.set(__self__, "group_by_fields", group_by_fields)
if per_series_aligner is not None:
pulumi.set(__self__, "per_series_aligner", per_series_aligner)
@property
@pulumi.getter(name="alignmentPeriod")
def alignment_period(self) -> Optional[str]:
"""
The alignment period for per-time
series alignment. If present,
alignmentPeriod must be at least
60 seconds. After per-time series
alignment, each time series will
contain data points only on the
period boundaries. If
perSeriesAligner is not specified
or equals ALIGN_NONE, then this
field is ignored. If
perSeriesAligner is specified and
does not equal ALIGN_NONE, then
this field must be defined;
otherwise an error is returned.
"""
return pulumi.get(self, "alignment_period")
@property
@pulumi.getter(name="crossSeriesReducer")
def cross_series_reducer(self) -> Optional[str]:
"""
The approach to be used to combine
time series. Not all reducer
functions may be applied to all
time series, depending on the
metric type and the value type of
the original time series.
Reduction may change the metric
type of value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `REDUCE_NONE`, `REDUCE_MEAN`, `REDUCE_MIN`, `REDUCE_MAX`, `REDUCE_SUM`, `REDUCE_STDDEV`, `REDUCE_COUNT`, `REDUCE_COUNT_TRUE`, `REDUCE_COUNT_FALSE`, `REDUCE_FRACTION_TRUE`, `REDUCE_PERCENTILE_99`, `REDUCE_PERCENTILE_95`, `REDUCE_PERCENTILE_50`, and `REDUCE_PERCENTILE_05`.
"""
return pulumi.get(self, "cross_series_reducer")
@property
@pulumi.getter(name="groupByFields")
def group_by_fields(self) -> Optional[List[str]]:
"""
The set of fields to preserve when
crossSeriesReducer is specified.
The groupByFields determine how
the time series are partitioned
into subsets prior to applying the
aggregation function. Each subset
contains time series that have the
same value for each of the
grouping fields. Each individual
time series is a member of exactly
one subset. The crossSeriesReducer
is applied to each subset of time
series. It is not possible to
reduce across different resource
types, so this field implicitly
contains resource.type. Fields not
specified in groupByFields are
aggregated away. If groupByFields
is not specified and all the time
series have the same resource
type, then the time series are
aggregated into a single output
time series. If crossSeriesReducer
is not defined, this field is
ignored.
"""
return pulumi.get(self, "group_by_fields")
@property
@pulumi.getter(name="perSeriesAligner")
def per_series_aligner(self) -> Optional[str]:
"""
The approach to be used to align
individual time series. Not all
alignment functions may be applied
to all time series, depending on
the metric type and value type of
the original time series.
Alignment may change the metric
type or the value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `ALIGN_NONE`, `ALIGN_DELTA`, `ALIGN_RATE`, `ALIGN_INTERPOLATE`, `ALIGN_NEXT_OLDER`, `ALIGN_MIN`, `ALIGN_MAX`, `ALIGN_MEAN`, `ALIGN_COUNT`, `ALIGN_SUM`, `ALIGN_STDDEV`, `ALIGN_COUNT_TRUE`, `ALIGN_COUNT_FALSE`, `ALIGN_FRACTION_TRUE`, `ALIGN_PERCENTILE_99`, `ALIGN_PERCENTILE_95`, `ALIGN_PERCENTILE_50`, `ALIGN_PERCENTILE_05`, and `ALIGN_PERCENT_CHANGE`.
"""
return pulumi.get(self, "per_series_aligner")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertPolicyConditionConditionAbsentTrigger(dict):
def __init__(__self__, *,
count: Optional[float] = None,
percent: Optional[float] = None):
"""
:param float count: The absolute number of time series
that must fail the predicate for the
condition to be triggered.
:param float percent: The percentage of time series that
must fail the predicate for the
condition to be triggered.
"""
if count is not None:
pulumi.set(__self__, "count", count)
if percent is not None:
pulumi.set(__self__, "percent", percent)
@property
@pulumi.getter
def count(self) -> Optional[float]:
"""
The absolute number of time series
that must fail the predicate for the
condition to be triggered.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter
def percent(self) -> Optional[float]:
"""
The percentage of time series that
must fail the predicate for the
condition to be triggered.
"""
return pulumi.get(self, "percent")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertPolicyConditionConditionThreshold(dict):
def __init__(__self__, *,
comparison: str,
duration: str,
aggregations: Optional[List['outputs.AlertPolicyConditionConditionThresholdAggregation']] = None,
denominator_aggregations: Optional[List['outputs.AlertPolicyConditionConditionThresholdDenominatorAggregation']] = None,
denominator_filter: Optional[str] = None,
filter: Optional[str] = None,
threshold_value: Optional[float] = None,
trigger: Optional['outputs.AlertPolicyConditionConditionThresholdTrigger'] = None):
"""
:param str comparison: The comparison to apply between the time
series (indicated by filter and aggregation)
and the threshold (indicated by
threshold_value). The comparison is applied
on each time series, with the time series on
the left-hand side and the threshold on the
right-hand side. Only COMPARISON_LT and
COMPARISON_GT are supported currently.
Possible values are `COMPARISON_GT`, `COMPARISON_GE`, `COMPARISON_LT`, `COMPARISON_LE`, `COMPARISON_EQ`, and `COMPARISON_NE`.
:param str duration: The amount of time that a time series must
violate the threshold to be considered
failing. Currently, only values that are a
multiple of a minute--e.g., 0, 60, 120, or
300 seconds--are supported. If an invalid
value is given, an error will be returned.
When choosing a duration, it is useful to
keep in mind the frequency of the underlying
time series data (which may also be affected
by any alignments specified in the
aggregations field); a good duration is long
enough so that a single outlier does not
generate spurious alerts, but short enough
that unhealthy states are detected and
alerted on quickly.
:param List['AlertPolicyConditionConditionThresholdAggregationArgs'] aggregations: Specifies the alignment of data points in
individual time series as well as how to
combine the retrieved time series together
(such as when aggregating multiple streams
on each resource to a single stream for each
resource or when aggregating streams across
all members of a group of resources).
Multiple aggregations are applied in the
order specified.This field is similar to the
one in the MetricService.ListTimeSeries
request. It is advisable to use the
ListTimeSeries method when debugging this
field.
Structure is documented below.
:param List['AlertPolicyConditionConditionThresholdDenominatorAggregationArgs'] denominator_aggregations: Specifies the alignment of data points in
individual time series selected by
denominatorFilter as well as how to combine
the retrieved time series together (such as
when aggregating multiple streams on each
resource to a single stream for each
resource or when aggregating streams across
all members of a group of resources).When
computing ratios, the aggregations and
denominator_aggregations fields must use the
same alignment period and produce time
series that have the same periodicity and
labels.This field is similar to the one in
the MetricService.ListTimeSeries request. It
is advisable to use the ListTimeSeries
method when debugging this field.
Structure is documented below.
:param str denominator_filter: A filter that identifies a time series that
should be used as the denominator of a ratio
that will be compared with the threshold. If
a denominator_filter is specified, the time
series specified by the filter field will be
used as the numerator.The filter is similar
to the one that is specified in the
MetricService.ListTimeSeries request (that
call is useful to verify the time series
that will be retrieved / processed) and must
specify the metric type and optionally may
contain restrictions on resource type,
resource labels, and metric labels. This
field may not exceed 2048 Unicode characters
in length.
:param str filter: A filter that identifies which time series
should be compared with the threshold.The
filter is similar to the one that is
specified in the
MetricService.ListTimeSeries request (that
call is useful to verify the time series
that will be retrieved / processed) and must
specify the metric type and optionally may
contain restrictions on resource type,
resource labels, and metric labels. This
field may not exceed 2048 Unicode characters
in length.
:param float threshold_value: A value against which to compare the time
series.
:param 'AlertPolicyConditionConditionThresholdTriggerArgs' trigger: The number/percent of time series for which
the comparison must hold in order for the
condition to trigger. If unspecified, then
the condition will trigger if the comparison
is true for any of the time series that have
been identified by filter and aggregations,
or by the ratio, if denominator_filter and
denominator_aggregations are specified.
Structure is documented below.
"""
pulumi.set(__self__, "comparison", comparison)
pulumi.set(__self__, "duration", duration)
if aggregations is not None:
pulumi.set(__self__, "aggregations", aggregations)
if denominator_aggregations is not None:
pulumi.set(__self__, "denominator_aggregations", denominator_aggregations)
if denominator_filter is not None:
pulumi.set(__self__, "denominator_filter", denominator_filter)
if filter is not None:
pulumi.set(__self__, "filter", filter)
if threshold_value is not None:
pulumi.set(__self__, "threshold_value", threshold_value)
if trigger is not None:
pulumi.set(__self__, "trigger", trigger)
@property
@pulumi.getter
def comparison(self) -> str:
"""
The comparison to apply between the time
series (indicated by filter and aggregation)
and the threshold (indicated by
threshold_value). The comparison is applied
on each time series, with the time series on
the left-hand side and the threshold on the
right-hand side. Only COMPARISON_LT and
COMPARISON_GT are supported currently.
Possible values are `COMPARISON_GT`, `COMPARISON_GE`, `COMPARISON_LT`, `COMPARISON_LE`, `COMPARISON_EQ`, and `COMPARISON_NE`.
"""
return pulumi.get(self, "comparison")
@property
@pulumi.getter
def duration(self) -> str:
"""
The amount of time that a time series must
violate the threshold to be considered
failing. Currently, only values that are a
multiple of a minute--e.g., 0, 60, 120, or
300 seconds--are supported. If an invalid
value is given, an error will be returned.
When choosing a duration, it is useful to
keep in mind the frequency of the underlying
time series data (which may also be affected
by any alignments specified in the
aggregations field); a good duration is long
enough so that a single outlier does not
generate spurious alerts, but short enough
that unhealthy states are detected and
alerted on quickly.
"""
return pulumi.get(self, "duration")
@property
@pulumi.getter
def aggregations(self) -> Optional[List['outputs.AlertPolicyConditionConditionThresholdAggregation']]:
"""
Specifies the alignment of data points in
individual time series as well as how to
combine the retrieved time series together
(such as when aggregating multiple streams
on each resource to a single stream for each
resource or when aggregating streams across
all members of a group of resources).
Multiple aggregations are applied in the
order specified.This field is similar to the
one in the MetricService.ListTimeSeries
request. It is advisable to use the
ListTimeSeries method when debugging this
field.
Structure is documented below.
"""
return pulumi.get(self, "aggregations")
@property
@pulumi.getter(name="denominatorAggregations")
def denominator_aggregations(self) -> Optional[List['outputs.AlertPolicyConditionConditionThresholdDenominatorAggregation']]:
"""
Specifies the alignment of data points in
individual time series selected by
denominatorFilter as well as how to combine
the retrieved time series together (such as
when aggregating multiple streams on each
resource to a single stream for each
resource or when aggregating streams across
all members of a group of resources).When
computing ratios, the aggregations and
denominator_aggregations fields must use the
same alignment period and produce time
series that have the same periodicity and
labels.This field is similar to the one in
the MetricService.ListTimeSeries request. It
is advisable to use the ListTimeSeries
method when debugging this field.
Structure is documented below.
"""
return pulumi.get(self, "denominator_aggregations")
@property
@pulumi.getter(name="denominatorFilter")
def denominator_filter(self) -> Optional[str]:
"""
A filter that identifies a time series that
should be used as the denominator of a ratio
that will be compared with the threshold. If
a denominator_filter is specified, the time
series specified by the filter field will be
used as the numerator.The filter is similar
to the one that is specified in the
MetricService.ListTimeSeries request (that
call is useful to verify the time series
that will be retrieved / processed) and must
specify the metric type and optionally may
contain restrictions on resource type,
resource labels, and metric labels. This
field may not exceed 2048 Unicode characters
in length.
"""
return pulumi.get(self, "denominator_filter")
@property
@pulumi.getter
def filter(self) -> Optional[str]:
"""
A filter that identifies which time series
should be compared with the threshold.The
filter is similar to the one that is
specified in the
MetricService.ListTimeSeries request (that
call is useful to verify the time series
that will be retrieved / processed) and must
specify the metric type and optionally may
contain restrictions on resource type,
resource labels, and metric labels. This
field may not exceed 2048 Unicode characters
in length.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter(name="thresholdValue")
def threshold_value(self) -> Optional[float]:
"""
A value against which to compare the time
series.
"""
return pulumi.get(self, "threshold_value")
@property
@pulumi.getter
def trigger(self) -> Optional['outputs.AlertPolicyConditionConditionThresholdTrigger']:
"""
The number/percent of time series for which
the comparison must hold in order for the
condition to trigger. If unspecified, then
the condition will trigger if the comparison
is true for any of the time series that have
been identified by filter and aggregations,
or by the ratio, if denominator_filter and
denominator_aggregations are specified.
Structure is documented below.
"""
return pulumi.get(self, "trigger")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertPolicyConditionConditionThresholdAggregation(dict):
def __init__(__self__, *,
alignment_period: Optional[str] = None,
cross_series_reducer: Optional[str] = None,
group_by_fields: Optional[List[str]] = None,
per_series_aligner: Optional[str] = None):
"""
:param str alignment_period: The alignment period for per-time
series alignment. If present,
alignmentPeriod must be at least
60 seconds. After per-time series
alignment, each time series will
contain data points only on the
period boundaries. If
perSeriesAligner is not specified
or equals ALIGN_NONE, then this
field is ignored. If
perSeriesAligner is specified and
does not equal ALIGN_NONE, then
this field must be defined;
otherwise an error is returned.
:param str cross_series_reducer: The approach to be used to combine
time series. Not all reducer
functions may be applied to all
time series, depending on the
metric type and the value type of
the original time series.
Reduction may change the metric
type of value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `REDUCE_NONE`, `REDUCE_MEAN`, `REDUCE_MIN`, `REDUCE_MAX`, `REDUCE_SUM`, `REDUCE_STDDEV`, `REDUCE_COUNT`, `REDUCE_COUNT_TRUE`, `REDUCE_COUNT_FALSE`, `REDUCE_FRACTION_TRUE`, `REDUCE_PERCENTILE_99`, `REDUCE_PERCENTILE_95`, `REDUCE_PERCENTILE_50`, and `REDUCE_PERCENTILE_05`.
:param List[str] group_by_fields: The set of fields to preserve when
crossSeriesReducer is specified.
The groupByFields determine how
the time series are partitioned
into subsets prior to applying the
aggregation function. Each subset
contains time series that have the
same value for each of the
grouping fields. Each individual
time series is a member of exactly
one subset. The crossSeriesReducer
is applied to each subset of time
series. It is not possible to
reduce across different resource
types, so this field implicitly
contains resource.type. Fields not
specified in groupByFields are
aggregated away. If groupByFields
is not specified and all the time
series have the same resource
type, then the time series are
aggregated into a single output
time series. If crossSeriesReducer
is not defined, this field is
ignored.
:param str per_series_aligner: The approach to be used to align
individual time series. Not all
alignment functions may be applied
to all time series, depending on
the metric type and value type of
the original time series.
Alignment may change the metric
type or the value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `ALIGN_NONE`, `ALIGN_DELTA`, `ALIGN_RATE`, `ALIGN_INTERPOLATE`, `ALIGN_NEXT_OLDER`, `ALIGN_MIN`, `ALIGN_MAX`, `ALIGN_MEAN`, `ALIGN_COUNT`, `ALIGN_SUM`, `ALIGN_STDDEV`, `ALIGN_COUNT_TRUE`, `ALIGN_COUNT_FALSE`, `ALIGN_FRACTION_TRUE`, `ALIGN_PERCENTILE_99`, `ALIGN_PERCENTILE_95`, `ALIGN_PERCENTILE_50`, `ALIGN_PERCENTILE_05`, and `ALIGN_PERCENT_CHANGE`.
"""
if alignment_period is not None:
pulumi.set(__self__, "alignment_period", alignment_period)
if cross_series_reducer is not None:
pulumi.set(__self__, "cross_series_reducer", cross_series_reducer)
if group_by_fields is not None:
pulumi.set(__self__, "group_by_fields", group_by_fields)
if per_series_aligner is not None:
pulumi.set(__self__, "per_series_aligner", per_series_aligner)
@property
@pulumi.getter(name="alignmentPeriod")
def alignment_period(self) -> Optional[str]:
"""
The alignment period for per-time
series alignment. If present,
alignmentPeriod must be at least
60 seconds. After per-time series
alignment, each time series will
contain data points only on the
period boundaries. If
perSeriesAligner is not specified
or equals ALIGN_NONE, then this
field is ignored. If
perSeriesAligner is specified and
does not equal ALIGN_NONE, then
this field must be defined;
otherwise an error is returned.
"""
return pulumi.get(self, "alignment_period")
@property
@pulumi.getter(name="crossSeriesReducer")
def cross_series_reducer(self) -> Optional[str]:
"""
The approach to be used to combine
time series. Not all reducer
functions may be applied to all
time series, depending on the
metric type and the value type of
the original time series.
Reduction may change the metric
type of value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `REDUCE_NONE`, `REDUCE_MEAN`, `REDUCE_MIN`, `REDUCE_MAX`, `REDUCE_SUM`, `REDUCE_STDDEV`, `REDUCE_COUNT`, `REDUCE_COUNT_TRUE`, `REDUCE_COUNT_FALSE`, `REDUCE_FRACTION_TRUE`, `REDUCE_PERCENTILE_99`, `REDUCE_PERCENTILE_95`, `REDUCE_PERCENTILE_50`, and `REDUCE_PERCENTILE_05`.
"""
return pulumi.get(self, "cross_series_reducer")
@property
@pulumi.getter(name="groupByFields")
def group_by_fields(self) -> Optional[List[str]]:
"""
The set of fields to preserve when
crossSeriesReducer is specified.
The groupByFields determine how
the time series are partitioned
into subsets prior to applying the
aggregation function. Each subset
contains time series that have the
same value for each of the
grouping fields. Each individual
time series is a member of exactly
one subset. The crossSeriesReducer
is applied to each subset of time
series. It is not possible to
reduce across different resource
types, so this field implicitly
contains resource.type. Fields not
specified in groupByFields are
aggregated away. If groupByFields
is not specified and all the time
series have the same resource
type, then the time series are
aggregated into a single output
time series. If crossSeriesReducer
is not defined, this field is
ignored.
"""
return pulumi.get(self, "group_by_fields")
@property
@pulumi.getter(name="perSeriesAligner")
def per_series_aligner(self) -> Optional[str]:
"""
The approach to be used to align
individual time series. Not all
alignment functions may be applied
to all time series, depending on
the metric type and value type of
the original time series.
Alignment may change the metric
type or the value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `ALIGN_NONE`, `ALIGN_DELTA`, `ALIGN_RATE`, `ALIGN_INTERPOLATE`, `ALIGN_NEXT_OLDER`, `ALIGN_MIN`, `ALIGN_MAX`, `ALIGN_MEAN`, `ALIGN_COUNT`, `ALIGN_SUM`, `ALIGN_STDDEV`, `ALIGN_COUNT_TRUE`, `ALIGN_COUNT_FALSE`, `ALIGN_FRACTION_TRUE`, `ALIGN_PERCENTILE_99`, `ALIGN_PERCENTILE_95`, `ALIGN_PERCENTILE_50`, `ALIGN_PERCENTILE_05`, and `ALIGN_PERCENT_CHANGE`.
"""
return pulumi.get(self, "per_series_aligner")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertPolicyConditionConditionThresholdDenominatorAggregation(dict):
def __init__(__self__, *,
alignment_period: Optional[str] = None,
cross_series_reducer: Optional[str] = None,
group_by_fields: Optional[List[str]] = None,
per_series_aligner: Optional[str] = None):
"""
:param str alignment_period: The alignment period for per-time
series alignment. If present,
alignmentPeriod must be at least
60 seconds. After per-time series
alignment, each time series will
contain data points only on the
period boundaries. If
perSeriesAligner is not specified
or equals ALIGN_NONE, then this
field is ignored. If
perSeriesAligner is specified and
does not equal ALIGN_NONE, then
this field must be defined;
otherwise an error is returned.
:param str cross_series_reducer: The approach to be used to combine
time series. Not all reducer
functions may be applied to all
time series, depending on the
metric type and the value type of
the original time series.
Reduction may change the metric
type of value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `REDUCE_NONE`, `REDUCE_MEAN`, `REDUCE_MIN`, `REDUCE_MAX`, `REDUCE_SUM`, `REDUCE_STDDEV`, `REDUCE_COUNT`, `REDUCE_COUNT_TRUE`, `REDUCE_COUNT_FALSE`, `REDUCE_FRACTION_TRUE`, `REDUCE_PERCENTILE_99`, `REDUCE_PERCENTILE_95`, `REDUCE_PERCENTILE_50`, and `REDUCE_PERCENTILE_05`.
:param List[str] group_by_fields: The set of fields to preserve when
crossSeriesReducer is specified.
The groupByFields determine how
the time series are partitioned
into subsets prior to applying the
aggregation function. Each subset
contains time series that have the
same value for each of the
grouping fields. Each individual
time series is a member of exactly
one subset. The crossSeriesReducer
is applied to each subset of time
series. It is not possible to
reduce across different resource
types, so this field implicitly
contains resource.type. Fields not
specified in groupByFields are
aggregated away. If groupByFields
is not specified and all the time
series have the same resource
type, then the time series are
aggregated into a single output
time series. If crossSeriesReducer
is not defined, this field is
ignored.
:param str per_series_aligner: The approach to be used to align
individual time series. Not all
alignment functions may be applied
to all time series, depending on
the metric type and value type of
the original time series.
Alignment may change the metric
type or the value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `ALIGN_NONE`, `ALIGN_DELTA`, `ALIGN_RATE`, `ALIGN_INTERPOLATE`, `ALIGN_NEXT_OLDER`, `ALIGN_MIN`, `ALIGN_MAX`, `ALIGN_MEAN`, `ALIGN_COUNT`, `ALIGN_SUM`, `ALIGN_STDDEV`, `ALIGN_COUNT_TRUE`, `ALIGN_COUNT_FALSE`, `ALIGN_FRACTION_TRUE`, `ALIGN_PERCENTILE_99`, `ALIGN_PERCENTILE_95`, `ALIGN_PERCENTILE_50`, `ALIGN_PERCENTILE_05`, and `ALIGN_PERCENT_CHANGE`.
"""
if alignment_period is not None:
pulumi.set(__self__, "alignment_period", alignment_period)
if cross_series_reducer is not None:
pulumi.set(__self__, "cross_series_reducer", cross_series_reducer)
if group_by_fields is not None:
pulumi.set(__self__, "group_by_fields", group_by_fields)
if per_series_aligner is not None:
pulumi.set(__self__, "per_series_aligner", per_series_aligner)
@property
@pulumi.getter(name="alignmentPeriod")
def alignment_period(self) -> Optional[str]:
"""
The alignment period for per-time
series alignment. If present,
alignmentPeriod must be at least
60 seconds. After per-time series
alignment, each time series will
contain data points only on the
period boundaries. If
perSeriesAligner is not specified
or equals ALIGN_NONE, then this
field is ignored. If
perSeriesAligner is specified and
does not equal ALIGN_NONE, then
this field must be defined;
otherwise an error is returned.
"""
return pulumi.get(self, "alignment_period")
@property
@pulumi.getter(name="crossSeriesReducer")
def cross_series_reducer(self) -> Optional[str]:
"""
The approach to be used to combine
time series. Not all reducer
functions may be applied to all
time series, depending on the
metric type and the value type of
the original time series.
Reduction may change the metric
type of value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `REDUCE_NONE`, `REDUCE_MEAN`, `REDUCE_MIN`, `REDUCE_MAX`, `REDUCE_SUM`, `REDUCE_STDDEV`, `REDUCE_COUNT`, `REDUCE_COUNT_TRUE`, `REDUCE_COUNT_FALSE`, `REDUCE_FRACTION_TRUE`, `REDUCE_PERCENTILE_99`, `REDUCE_PERCENTILE_95`, `REDUCE_PERCENTILE_50`, and `REDUCE_PERCENTILE_05`.
"""
return pulumi.get(self, "cross_series_reducer")
@property
@pulumi.getter(name="groupByFields")
def group_by_fields(self) -> Optional[List[str]]:
"""
The set of fields to preserve when
crossSeriesReducer is specified.
The groupByFields determine how
the time series are partitioned
into subsets prior to applying the
aggregation function. Each subset
contains time series that have the
same value for each of the
grouping fields. Each individual
time series is a member of exactly
one subset. The crossSeriesReducer
is applied to each subset of time
series. It is not possible to
reduce across different resource
types, so this field implicitly
contains resource.type. Fields not
specified in groupByFields are
aggregated away. If groupByFields
is not specified and all the time
series have the same resource
type, then the time series are
aggregated into a single output
time series. If crossSeriesReducer
is not defined, this field is
ignored.
"""
return pulumi.get(self, "group_by_fields")
@property
@pulumi.getter(name="perSeriesAligner")
def per_series_aligner(self) -> Optional[str]:
"""
The approach to be used to align
individual time series. Not all
alignment functions may be applied
to all time series, depending on
the metric type and value type of
the original time series.
Alignment may change the metric
type or the value type of the time
series.Time series data must be
aligned in order to perform cross-
time series reduction. If
crossSeriesReducer is specified,
then perSeriesAligner must be
specified and not equal ALIGN_NONE
and alignmentPeriod must be
specified; otherwise, an error is
returned.
Possible values are `ALIGN_NONE`, `ALIGN_DELTA`, `ALIGN_RATE`, `ALIGN_INTERPOLATE`, `ALIGN_NEXT_OLDER`, `ALIGN_MIN`, `ALIGN_MAX`, `ALIGN_MEAN`, `ALIGN_COUNT`, `ALIGN_SUM`, `ALIGN_STDDEV`, `ALIGN_COUNT_TRUE`, `ALIGN_COUNT_FALSE`, `ALIGN_FRACTION_TRUE`, `ALIGN_PERCENTILE_99`, `ALIGN_PERCENTILE_95`, `ALIGN_PERCENTILE_50`, `ALIGN_PERCENTILE_05`, and `ALIGN_PERCENT_CHANGE`.
"""
return pulumi.get(self, "per_series_aligner")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertPolicyConditionConditionThresholdTrigger(dict):
def __init__(__self__, *,
count: Optional[float] = None,
percent: Optional[float] = None):
"""
:param float count: The absolute number of time series
that must fail the predicate for the
condition to be triggered.
:param float percent: The percentage of time series that
must fail the predicate for the
condition to be triggered.
"""
if count is not None:
pulumi.set(__self__, "count", count)
if percent is not None:
pulumi.set(__self__, "percent", percent)
@property
@pulumi.getter
def count(self) -> Optional[float]:
"""
The absolute number of time series
that must fail the predicate for the
condition to be triggered.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter
def percent(self) -> Optional[float]:
"""
The percentage of time series that
must fail the predicate for the
condition to be triggered.
"""
return pulumi.get(self, "percent")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertPolicyCreationRecord(dict):
def __init__(__self__, *,
mutate_time: Optional[str] = None,
mutated_by: Optional[str] = None):
if mutate_time is not None:
pulumi.set(__self__, "mutate_time", mutate_time)
if mutated_by is not None:
pulumi.set(__self__, "mutated_by", mutated_by)
@property
@pulumi.getter(name="mutateTime")
def mutate_time(self) -> Optional[str]:
return pulumi.get(self, "mutate_time")
@property
@pulumi.getter(name="mutatedBy")
def mutated_by(self) -> Optional[str]:
return pulumi.get(self, "mutated_by")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertPolicyDocumentation(dict):
def __init__(__self__, *,
content: Optional[str] = None,
mime_type: Optional[str] = None):
"""
:param str content: The text of the documentation, interpreted according to mimeType.
The content may not exceed 8,192 Unicode characters and may not
exceed more than 10,240 bytes when encoded in UTF-8 format,
whichever is smaller.
:param str mime_type: The format of the content field. Presently, only the value
"text/markdown" is supported.
"""
if content is not None:
pulumi.set(__self__, "content", content)
if mime_type is not None:
pulumi.set(__self__, "mime_type", mime_type)
@property
@pulumi.getter
def content(self) -> Optional[str]:
"""
The text of the documentation, interpreted according to mimeType.
The content may not exceed 8,192 Unicode characters and may not
exceed more than 10,240 bytes when encoded in UTF-8 format,
whichever is smaller.
"""
return pulumi.get(self, "content")
@property
@pulumi.getter(name="mimeType")
def mime_type(self) -> Optional[str]:
"""
The format of the content field. Presently, only the value
"text/markdown" is supported.
"""
return pulumi.get(self, "mime_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class CustomServiceTelemetry(dict):
def __init__(__self__, *,
resource_name: Optional[str] = None):
"""
:param str resource_name: The full name of the resource that defines this service.
Formatted as described in
https://cloud.google.com/apis/design/resource_names.
"""
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
"""
The full name of the resource that defines this service.
Formatted as described in
https://cloud.google.com/apis/design/resource_names.
"""
return pulumi.get(self, "resource_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MetricDescriptorLabel(dict):
def __init__(__self__, *,
key: str,
description: Optional[str] = None,
value_type: Optional[str] = None):
"""
:param str key: The key for this label. The key must not exceed 100 characters. The first character of the key must be an upper- or lower-case letter, the remaining characters must be letters, digits or underscores, and the key must match the regular expression [a-zA-Z][a-zA-Z0-9_]*
:param str description: A human-readable description for the label.
:param str value_type: The type of data that can be assigned to the label.
Default value is `STRING`.
Possible values are `STRING`, `BOOL`, and `INT64`.
"""
pulumi.set(__self__, "key", key)
if description is not None:
pulumi.set(__self__, "description", description)
if value_type is not None:
pulumi.set(__self__, "value_type", value_type)
@property
@pulumi.getter
def key(self) -> str:
"""
The key for this label. The key must not exceed 100 characters. The first character of the key must be an upper- or lower-case letter, the remaining characters must be letters, digits or underscores, and the key must match the regular expression [a-zA-Z][a-zA-Z0-9_]*
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
A human-readable description for the label.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="valueType")
def value_type(self) -> Optional[str]:
"""
The type of data that can be assigned to the label.
Default value is `STRING`.
Possible values are `STRING`, `BOOL`, and `INT64`.
"""
return pulumi.get(self, "value_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MetricDescriptorMetadata(dict):
def __init__(__self__, *,
ingest_delay: Optional[str] = None,
sample_period: Optional[str] = None):
"""
:param str ingest_delay: The delay of data points caused by ingestion. Data points older than this age are guaranteed to be ingested and available to be read, excluding data loss due to errors. In `[duration format](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf?&_ga=2.264881487.1507873253.1593446723-935052455.1591817775#google.protobuf.Duration)`.
:param str sample_period: The sampling period of metric data points. For metrics which are written periodically, consecutive data points are stored at this time interval, excluding data loss due to errors. Metrics with a higher granularity have a smaller sampling period. In `[duration format](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf?&_ga=2.264881487.1507873253.1593446723-935052455.1591817775#google.protobuf.Duration)`.
"""
if ingest_delay is not None:
pulumi.set(__self__, "ingest_delay", ingest_delay)
if sample_period is not None:
pulumi.set(__self__, "sample_period", sample_period)
@property
@pulumi.getter(name="ingestDelay")
def ingest_delay(self) -> Optional[str]:
"""
The delay of data points caused by ingestion. Data points older than this age are guaranteed to be ingested and available to be read, excluding data loss due to errors. In `[duration format](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf?&_ga=2.264881487.1507873253.1593446723-935052455.1591817775#google.protobuf.Duration)`.
"""
return pulumi.get(self, "ingest_delay")
@property
@pulumi.getter(name="samplePeriod")
def sample_period(self) -> Optional[str]:
"""
The sampling period of metric data points. For metrics which are written periodically, consecutive data points are stored at this time interval, excluding data loss due to errors. Metrics with a higher granularity have a smaller sampling period. In `[duration format](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf?&_ga=2.264881487.1507873253.1593446723-935052455.1591817775#google.protobuf.Duration)`.
"""
return pulumi.get(self, "sample_period")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class NotificationChannelSensitiveLabels(dict):
def __init__(__self__, *,
auth_token: Optional[str] = None,
password: Optional[str] = None,
service_key: Optional[str] = None):
"""
:param str auth_token: An authorization token for a notification channel. Channel types that support this field include: slack
**Note**: This property is sensitive and will not be displayed in the plan.
:param str password: An password for a notification channel. Channel types that support this field include: webhook_basicauth
**Note**: This property is sensitive and will not be displayed in the plan.
:param str service_key: An servicekey token for a notification channel. Channel types that support this field include: pagerduty
**Note**: This property is sensitive and will not be displayed in the plan.
"""
if auth_token is not None:
pulumi.set(__self__, "auth_token", auth_token)
if password is not None:
pulumi.set(__self__, "password", password)
if service_key is not None:
pulumi.set(__self__, "service_key", service_key)
@property
@pulumi.getter(name="authToken")
def auth_token(self) -> Optional[str]:
"""
An authorization token for a notification channel. Channel types that support this field include: slack
**Note**: This property is sensitive and will not be displayed in the plan.
"""
return pulumi.get(self, "auth_token")
@property
@pulumi.getter
def password(self) -> Optional[str]:
"""
An password for a notification channel. Channel types that support this field include: webhook_basicauth
**Note**: This property is sensitive and will not be displayed in the plan.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter(name="serviceKey")
def service_key(self) -> Optional[str]:
"""
An servicekey token for a notification channel. Channel types that support this field include: pagerduty
**Note**: This property is sensitive and will not be displayed in the plan.
"""
return pulumi.get(self, "service_key")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloBasicSli(dict):
def __init__(__self__, *,
latency: 'outputs.SloBasicSliLatency',
locations: Optional[List[str]] = None,
methods: Optional[List[str]] = None,
versions: Optional[List[str]] = None):
"""
:param 'SloBasicSliLatencyArgs' latency: Parameters for a latency threshold SLI.
Structure is documented below.
:param List[str] locations: An optional set of locations to which this SLI is relevant.
Telemetry from other locations will not be used to calculate
performance for this SLI. If omitted, this SLI applies to all
locations in which the Service has activity. For service types
that don't support breaking down by location, setting this
field will result in an error.
:param List[str] methods: An optional set of RPCs to which this SLI is relevant.
Telemetry from other methods will not be used to calculate
performance for this SLI. If omitted, this SLI applies to all
the Service's methods. For service types that don't support
breaking down by method, setting this field will result in an
error.
:param List[str] versions: The set of API versions to which this SLI is relevant.
Telemetry from other API versions will not be used to
calculate performance for this SLI. If omitted,
this SLI applies to all API versions. For service types
that don't support breaking down by version, setting this
field will result in an error.
"""
pulumi.set(__self__, "latency", latency)
if locations is not None:
pulumi.set(__self__, "locations", locations)
if methods is not None:
pulumi.set(__self__, "methods", methods)
if versions is not None:
pulumi.set(__self__, "versions", versions)
@property
@pulumi.getter
def latency(self) -> 'outputs.SloBasicSliLatency':
"""
Parameters for a latency threshold SLI.
Structure is documented below.
"""
return pulumi.get(self, "latency")
@property
@pulumi.getter
def locations(self) -> Optional[List[str]]:
"""
An optional set of locations to which this SLI is relevant.
Telemetry from other locations will not be used to calculate
performance for this SLI. If omitted, this SLI applies to all
locations in which the Service has activity. For service types
that don't support breaking down by location, setting this
field will result in an error.
"""
return pulumi.get(self, "locations")
@property
@pulumi.getter
def methods(self) -> Optional[List[str]]:
"""
An optional set of RPCs to which this SLI is relevant.
Telemetry from other methods will not be used to calculate
performance for this SLI. If omitted, this SLI applies to all
the Service's methods. For service types that don't support
breaking down by method, setting this field will result in an
error.
"""
return pulumi.get(self, "methods")
@property
@pulumi.getter
def versions(self) -> Optional[List[str]]:
"""
The set of API versions to which this SLI is relevant.
Telemetry from other API versions will not be used to
calculate performance for this SLI. If omitted,
this SLI applies to all API versions. For service types
that don't support breaking down by version, setting this
field will result in an error.
"""
return pulumi.get(self, "versions")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloBasicSliLatency(dict):
def __init__(__self__, *,
threshold: str):
"""
:param str threshold: A duration string, e.g. 10s.
Good service is defined to be the count of requests made to
this service that return in no more than threshold.
"""
pulumi.set(__self__, "threshold", threshold)
@property
@pulumi.getter
def threshold(self) -> str:
"""
A duration string, e.g. 10s.
Good service is defined to be the count of requests made to
this service that return in no more than threshold.
"""
return pulumi.get(self, "threshold")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloRequestBasedSli(dict):
def __init__(__self__, *,
distribution_cut: Optional['outputs.SloRequestBasedSliDistributionCut'] = None,
good_total_ratio: Optional['outputs.SloRequestBasedSliGoodTotalRatio'] = None):
"""
:param 'SloRequestBasedSliDistributionCutArgs' distribution_cut: Used when good_service is defined by a count of values aggregated in a
Distribution that fall into a good range. The total_service is the
total count of all values aggregated in the Distribution.
Defines a distribution TimeSeries filter and thresholds used for
measuring good service and total service.
Structure is documented below.
:param 'SloRequestBasedSliGoodTotalRatioArgs' good_total_ratio: A means to compute a ratio of `good_service` to `total_service`.
Defines computing this ratio with two TimeSeries [monitoring filters](https://cloud.google.com/monitoring/api/v3/filters)
Must specify exactly two of good, bad, and total service filters.
The relationship good_service + bad_service = total_service
will be assumed.
Structure is documented below.
"""
if distribution_cut is not None:
pulumi.set(__self__, "distribution_cut", distribution_cut)
if good_total_ratio is not None:
pulumi.set(__self__, "good_total_ratio", good_total_ratio)
@property
@pulumi.getter(name="distributionCut")
def distribution_cut(self) -> Optional['outputs.SloRequestBasedSliDistributionCut']:
"""
Used when good_service is defined by a count of values aggregated in a
Distribution that fall into a good range. The total_service is the
total count of all values aggregated in the Distribution.
Defines a distribution TimeSeries filter and thresholds used for
measuring good service and total service.
Structure is documented below.
"""
return pulumi.get(self, "distribution_cut")
@property
@pulumi.getter(name="goodTotalRatio")
def good_total_ratio(self) -> Optional['outputs.SloRequestBasedSliGoodTotalRatio']:
"""
A means to compute a ratio of `good_service` to `total_service`.
Defines computing this ratio with two TimeSeries [monitoring filters](https://cloud.google.com/monitoring/api/v3/filters)
Must specify exactly two of good, bad, and total service filters.
The relationship good_service + bad_service = total_service
will be assumed.
Structure is documented below.
"""
return pulumi.get(self, "good_total_ratio")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloRequestBasedSliDistributionCut(dict):
def __init__(__self__, *,
distribution_filter: str,
range: 'outputs.SloRequestBasedSliDistributionCutRange'):
"""
:param str distribution_filter: A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
aggregating values to quantify the good service provided.
Must have ValueType = DISTRIBUTION and
MetricKind = DELTA or MetricKind = CUMULATIVE.
:param 'SloRequestBasedSliDistributionCutRangeArgs' range: Range of numerical values. The computed good_service
will be the count of values x in the Distribution such
that range.min <= x < range.max. inclusive of min and
exclusive of max. Open ranges can be defined by setting
just one of min or max. Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
Structure is documented below.
"""
pulumi.set(__self__, "distribution_filter", distribution_filter)
pulumi.set(__self__, "range", range)
@property
@pulumi.getter(name="distributionFilter")
def distribution_filter(self) -> str:
"""
A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
aggregating values to quantify the good service provided.
Must have ValueType = DISTRIBUTION and
MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "distribution_filter")
@property
@pulumi.getter
def range(self) -> 'outputs.SloRequestBasedSliDistributionCutRange':
"""
Range of numerical values. The computed good_service
will be the count of values x in the Distribution such
that range.min <= x < range.max. inclusive of min and
exclusive of max. Open ranges can be defined by setting
just one of min or max. Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
Structure is documented below.
"""
return pulumi.get(self, "range")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloRequestBasedSliDistributionCutRange(dict):
def __init__(__self__, *,
max: Optional[float] = None,
min: Optional[float] = None):
"""
:param float max: max value for the range (inclusive). If not given,
will be set to "infinity", defining an open range
">= range.min"
:param float min: Min value for the range (inclusive). If not given,
will be set to "-infinity", defining an open range
"< range.max"
"""
if max is not None:
pulumi.set(__self__, "max", max)
if min is not None:
pulumi.set(__self__, "min", min)
@property
@pulumi.getter
def max(self) -> Optional[float]:
"""
max value for the range (inclusive). If not given,
will be set to "infinity", defining an open range
">= range.min"
"""
return pulumi.get(self, "max")
@property
@pulumi.getter
def min(self) -> Optional[float]:
"""
Min value for the range (inclusive). If not given,
will be set to "-infinity", defining an open range
"< range.max"
"""
return pulumi.get(self, "min")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloRequestBasedSliGoodTotalRatio(dict):
def __init__(__self__, *,
bad_service_filter: Optional[str] = None,
good_service_filter: Optional[str] = None,
total_service_filter: Optional[str] = None):
"""
:param str bad_service_filter: A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying bad service provided, either demanded service that
was not provided or demanded service that was of inadequate
quality. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
:param str good_service_filter: A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying good service provided. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
:param str total_service_filter: A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying total demanded service. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
if bad_service_filter is not None:
pulumi.set(__self__, "bad_service_filter", bad_service_filter)
if good_service_filter is not None:
pulumi.set(__self__, "good_service_filter", good_service_filter)
if total_service_filter is not None:
pulumi.set(__self__, "total_service_filter", total_service_filter)
@property
@pulumi.getter(name="badServiceFilter")
def bad_service_filter(self) -> Optional[str]:
"""
A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying bad service provided, either demanded service that
was not provided or demanded service that was of inadequate
quality. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "bad_service_filter")
@property
@pulumi.getter(name="goodServiceFilter")
def good_service_filter(self) -> Optional[str]:
"""
A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying good service provided. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "good_service_filter")
@property
@pulumi.getter(name="totalServiceFilter")
def total_service_filter(self) -> Optional[str]:
"""
A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying total demanded service. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "total_service_filter")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSli(dict):
def __init__(__self__, *,
good_bad_metric_filter: Optional[str] = None,
good_total_ratio_threshold: Optional['outputs.SloWindowsBasedSliGoodTotalRatioThreshold'] = None,
metric_mean_in_range: Optional['outputs.SloWindowsBasedSliMetricMeanInRange'] = None,
metric_sum_in_range: Optional['outputs.SloWindowsBasedSliMetricSumInRange'] = None,
window_period: Optional[str] = None):
"""
:param str good_bad_metric_filter: A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
with ValueType = BOOL. The window is good if any true values
appear in the window. One of `good_bad_metric_filter`,
`good_total_ratio_threshold`, `metric_mean_in_range`,
`metric_sum_in_range` must be set for `windows_based_sli`.
:param 'SloWindowsBasedSliGoodTotalRatioThresholdArgs' good_total_ratio_threshold: Criterion that describes a window as good if its performance is
high enough. One of `good_bad_metric_filter`,
`good_total_ratio_threshold`, `metric_mean_in_range`,
`metric_sum_in_range` must be set for `windows_based_sli`.
Structure is documented below.
:param 'SloWindowsBasedSliMetricMeanInRangeArgs' metric_mean_in_range: Criterion that describes a window as good if the metric's value
is in a good range, *averaged* across returned streams.
One of `good_bad_metric_filter`,
`good_total_ratio_threshold`, `metric_mean_in_range`,
`metric_sum_in_range` must be set for `windows_based_sli`.
Average value X of `time_series` should satisfy
`range.min <= X < range.max` for a good window.
Structure is documented below.
:param 'SloWindowsBasedSliMetricSumInRangeArgs' metric_sum_in_range: Criterion that describes a window as good if the metric's value
is in a good range, *summed* across returned streams.
Summed value `X` of `time_series` should satisfy
`range.min <= X < range.max` for a good window.
One of `good_bad_metric_filter`,
`good_total_ratio_threshold`, `metric_mean_in_range`,
`metric_sum_in_range` must be set for `windows_based_sli`.
Structure is documented below.
:param str window_period: Duration over which window quality is evaluated, given as a
duration string "{X}s" representing X seconds. Must be an
integer fraction of a day and at least 60s.
"""
if good_bad_metric_filter is not None:
pulumi.set(__self__, "good_bad_metric_filter", good_bad_metric_filter)
if good_total_ratio_threshold is not None:
pulumi.set(__self__, "good_total_ratio_threshold", good_total_ratio_threshold)
if metric_mean_in_range is not None:
pulumi.set(__self__, "metric_mean_in_range", metric_mean_in_range)
if metric_sum_in_range is not None:
pulumi.set(__self__, "metric_sum_in_range", metric_sum_in_range)
if window_period is not None:
pulumi.set(__self__, "window_period", window_period)
@property
@pulumi.getter(name="goodBadMetricFilter")
def good_bad_metric_filter(self) -> Optional[str]:
"""
A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
with ValueType = BOOL. The window is good if any true values
appear in the window. One of `good_bad_metric_filter`,
`good_total_ratio_threshold`, `metric_mean_in_range`,
`metric_sum_in_range` must be set for `windows_based_sli`.
"""
return pulumi.get(self, "good_bad_metric_filter")
@property
@pulumi.getter(name="goodTotalRatioThreshold")
def good_total_ratio_threshold(self) -> Optional['outputs.SloWindowsBasedSliGoodTotalRatioThreshold']:
"""
Criterion that describes a window as good if its performance is
high enough. One of `good_bad_metric_filter`,
`good_total_ratio_threshold`, `metric_mean_in_range`,
`metric_sum_in_range` must be set for `windows_based_sli`.
Structure is documented below.
"""
return pulumi.get(self, "good_total_ratio_threshold")
@property
@pulumi.getter(name="metricMeanInRange")
def metric_mean_in_range(self) -> Optional['outputs.SloWindowsBasedSliMetricMeanInRange']:
"""
Criterion that describes a window as good if the metric's value
is in a good range, *averaged* across returned streams.
One of `good_bad_metric_filter`,
`good_total_ratio_threshold`, `metric_mean_in_range`,
`metric_sum_in_range` must be set for `windows_based_sli`.
Average value X of `time_series` should satisfy
`range.min <= X < range.max` for a good window.
Structure is documented below.
"""
return pulumi.get(self, "metric_mean_in_range")
@property
@pulumi.getter(name="metricSumInRange")
def metric_sum_in_range(self) -> Optional['outputs.SloWindowsBasedSliMetricSumInRange']:
"""
Criterion that describes a window as good if the metric's value
is in a good range, *summed* across returned streams.
Summed value `X` of `time_series` should satisfy
`range.min <= X < range.max` for a good window.
One of `good_bad_metric_filter`,
`good_total_ratio_threshold`, `metric_mean_in_range`,
`metric_sum_in_range` must be set for `windows_based_sli`.
Structure is documented below.
"""
return pulumi.get(self, "metric_sum_in_range")
@property
@pulumi.getter(name="windowPeriod")
def window_period(self) -> Optional[str]:
"""
Duration over which window quality is evaluated, given as a
duration string "{X}s" representing X seconds. Must be an
integer fraction of a day and at least 60s.
"""
return pulumi.get(self, "window_period")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliGoodTotalRatioThreshold(dict):
def __init__(__self__, *,
basic_sli_performance: Optional['outputs.SloWindowsBasedSliGoodTotalRatioThresholdBasicSliPerformance'] = None,
performance: Optional['outputs.SloWindowsBasedSliGoodTotalRatioThresholdPerformance'] = None,
threshold: Optional[float] = None):
"""
:param 'SloWindowsBasedSliGoodTotalRatioThresholdBasicSliPerformanceArgs' basic_sli_performance: Basic SLI to evaluate to judge window quality.
Structure is documented below.
:param 'SloWindowsBasedSliGoodTotalRatioThresholdPerformanceArgs' performance: Request-based SLI to evaluate to judge window quality.
Structure is documented below.
:param float threshold: A duration string, e.g. 10s.
Good service is defined to be the count of requests made to
this service that return in no more than threshold.
"""
if basic_sli_performance is not None:
pulumi.set(__self__, "basic_sli_performance", basic_sli_performance)
if performance is not None:
pulumi.set(__self__, "performance", performance)
if threshold is not None:
pulumi.set(__self__, "threshold", threshold)
@property
@pulumi.getter(name="basicSliPerformance")
def basic_sli_performance(self) -> Optional['outputs.SloWindowsBasedSliGoodTotalRatioThresholdBasicSliPerformance']:
"""
Basic SLI to evaluate to judge window quality.
Structure is documented below.
"""
return pulumi.get(self, "basic_sli_performance")
@property
@pulumi.getter
def performance(self) -> Optional['outputs.SloWindowsBasedSliGoodTotalRatioThresholdPerformance']:
"""
Request-based SLI to evaluate to judge window quality.
Structure is documented below.
"""
return pulumi.get(self, "performance")
@property
@pulumi.getter
def threshold(self) -> Optional[float]:
"""
A duration string, e.g. 10s.
Good service is defined to be the count of requests made to
this service that return in no more than threshold.
"""
return pulumi.get(self, "threshold")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliGoodTotalRatioThresholdBasicSliPerformance(dict):
def __init__(__self__, *,
latency: 'outputs.SloWindowsBasedSliGoodTotalRatioThresholdBasicSliPerformanceLatency',
locations: Optional[List[str]] = None,
methods: Optional[List[str]] = None,
versions: Optional[List[str]] = None):
"""
:param 'SloWindowsBasedSliGoodTotalRatioThresholdBasicSliPerformanceLatencyArgs' latency: Parameters for a latency threshold SLI.
Structure is documented below.
:param List[str] locations: An optional set of locations to which this SLI is relevant.
Telemetry from other locations will not be used to calculate
performance for this SLI. If omitted, this SLI applies to all
locations in which the Service has activity. For service types
that don't support breaking down by location, setting this
field will result in an error.
:param List[str] methods: An optional set of RPCs to which this SLI is relevant.
Telemetry from other methods will not be used to calculate
performance for this SLI. If omitted, this SLI applies to all
the Service's methods. For service types that don't support
breaking down by method, setting this field will result in an
error.
:param List[str] versions: The set of API versions to which this SLI is relevant.
Telemetry from other API versions will not be used to
calculate performance for this SLI. If omitted,
this SLI applies to all API versions. For service types
that don't support breaking down by version, setting this
field will result in an error.
"""
pulumi.set(__self__, "latency", latency)
if locations is not None:
pulumi.set(__self__, "locations", locations)
if methods is not None:
pulumi.set(__self__, "methods", methods)
if versions is not None:
pulumi.set(__self__, "versions", versions)
@property
@pulumi.getter
def latency(self) -> 'outputs.SloWindowsBasedSliGoodTotalRatioThresholdBasicSliPerformanceLatency':
"""
Parameters for a latency threshold SLI.
Structure is documented below.
"""
return pulumi.get(self, "latency")
@property
@pulumi.getter
def locations(self) -> Optional[List[str]]:
"""
An optional set of locations to which this SLI is relevant.
Telemetry from other locations will not be used to calculate
performance for this SLI. If omitted, this SLI applies to all
locations in which the Service has activity. For service types
that don't support breaking down by location, setting this
field will result in an error.
"""
return pulumi.get(self, "locations")
@property
@pulumi.getter
def methods(self) -> Optional[List[str]]:
"""
An optional set of RPCs to which this SLI is relevant.
Telemetry from other methods will not be used to calculate
performance for this SLI. If omitted, this SLI applies to all
the Service's methods. For service types that don't support
breaking down by method, setting this field will result in an
error.
"""
return pulumi.get(self, "methods")
@property
@pulumi.getter
def versions(self) -> Optional[List[str]]:
"""
The set of API versions to which this SLI is relevant.
Telemetry from other API versions will not be used to
calculate performance for this SLI. If omitted,
this SLI applies to all API versions. For service types
that don't support breaking down by version, setting this
field will result in an error.
"""
return pulumi.get(self, "versions")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliGoodTotalRatioThresholdBasicSliPerformanceLatency(dict):
def __init__(__self__, *,
threshold: str):
"""
:param str threshold: A duration string, e.g. 10s.
Good service is defined to be the count of requests made to
this service that return in no more than threshold.
"""
pulumi.set(__self__, "threshold", threshold)
@property
@pulumi.getter
def threshold(self) -> str:
"""
A duration string, e.g. 10s.
Good service is defined to be the count of requests made to
this service that return in no more than threshold.
"""
return pulumi.get(self, "threshold")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliGoodTotalRatioThresholdPerformance(dict):
def __init__(__self__, *,
distribution_cut: Optional['outputs.SloWindowsBasedSliGoodTotalRatioThresholdPerformanceDistributionCut'] = None,
good_total_ratio: Optional['outputs.SloWindowsBasedSliGoodTotalRatioThresholdPerformanceGoodTotalRatio'] = None):
"""
:param 'SloWindowsBasedSliGoodTotalRatioThresholdPerformanceDistributionCutArgs' distribution_cut: Used when good_service is defined by a count of values aggregated in a
Distribution that fall into a good range. The total_service is the
total count of all values aggregated in the Distribution.
Defines a distribution TimeSeries filter and thresholds used for
measuring good service and total service.
Structure is documented below.
:param 'SloWindowsBasedSliGoodTotalRatioThresholdPerformanceGoodTotalRatioArgs' good_total_ratio: A means to compute a ratio of `good_service` to `total_service`.
Defines computing this ratio with two TimeSeries [monitoring filters](https://cloud.google.com/monitoring/api/v3/filters)
Must specify exactly two of good, bad, and total service filters.
The relationship good_service + bad_service = total_service
will be assumed.
Structure is documented below.
"""
if distribution_cut is not None:
pulumi.set(__self__, "distribution_cut", distribution_cut)
if good_total_ratio is not None:
pulumi.set(__self__, "good_total_ratio", good_total_ratio)
@property
@pulumi.getter(name="distributionCut")
def distribution_cut(self) -> Optional['outputs.SloWindowsBasedSliGoodTotalRatioThresholdPerformanceDistributionCut']:
"""
Used when good_service is defined by a count of values aggregated in a
Distribution that fall into a good range. The total_service is the
total count of all values aggregated in the Distribution.
Defines a distribution TimeSeries filter and thresholds used for
measuring good service and total service.
Structure is documented below.
"""
return pulumi.get(self, "distribution_cut")
@property
@pulumi.getter(name="goodTotalRatio")
def good_total_ratio(self) -> Optional['outputs.SloWindowsBasedSliGoodTotalRatioThresholdPerformanceGoodTotalRatio']:
"""
A means to compute a ratio of `good_service` to `total_service`.
Defines computing this ratio with two TimeSeries [monitoring filters](https://cloud.google.com/monitoring/api/v3/filters)
Must specify exactly two of good, bad, and total service filters.
The relationship good_service + bad_service = total_service
will be assumed.
Structure is documented below.
"""
return pulumi.get(self, "good_total_ratio")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliGoodTotalRatioThresholdPerformanceDistributionCut(dict):
def __init__(__self__, *,
distribution_filter: str,
range: 'outputs.SloWindowsBasedSliGoodTotalRatioThresholdPerformanceDistributionCutRange'):
"""
:param str distribution_filter: A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
aggregating values to quantify the good service provided.
Must have ValueType = DISTRIBUTION and
MetricKind = DELTA or MetricKind = CUMULATIVE.
:param 'SloWindowsBasedSliGoodTotalRatioThresholdPerformanceDistributionCutRangeArgs' range: Range of numerical values. The computed good_service
will be the count of values x in the Distribution such
that range.min <= x < range.max. inclusive of min and
exclusive of max. Open ranges can be defined by setting
just one of min or max. Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
Structure is documented below.
"""
pulumi.set(__self__, "distribution_filter", distribution_filter)
pulumi.set(__self__, "range", range)
@property
@pulumi.getter(name="distributionFilter")
def distribution_filter(self) -> str:
"""
A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
aggregating values to quantify the good service provided.
Must have ValueType = DISTRIBUTION and
MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "distribution_filter")
@property
@pulumi.getter
def range(self) -> 'outputs.SloWindowsBasedSliGoodTotalRatioThresholdPerformanceDistributionCutRange':
"""
Range of numerical values. The computed good_service
will be the count of values x in the Distribution such
that range.min <= x < range.max. inclusive of min and
exclusive of max. Open ranges can be defined by setting
just one of min or max. Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
Structure is documented below.
"""
return pulumi.get(self, "range")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliGoodTotalRatioThresholdPerformanceDistributionCutRange(dict):
def __init__(__self__, *,
max: Optional[float] = None,
min: Optional[float] = None):
"""
:param float max: max value for the range (inclusive). If not given,
will be set to "infinity", defining an open range
">= range.min"
:param float min: Min value for the range (inclusive). If not given,
will be set to "-infinity", defining an open range
"< range.max"
"""
if max is not None:
pulumi.set(__self__, "max", max)
if min is not None:
pulumi.set(__self__, "min", min)
@property
@pulumi.getter
def max(self) -> Optional[float]:
"""
max value for the range (inclusive). If not given,
will be set to "infinity", defining an open range
">= range.min"
"""
return pulumi.get(self, "max")
@property
@pulumi.getter
def min(self) -> Optional[float]:
"""
Min value for the range (inclusive). If not given,
will be set to "-infinity", defining an open range
"< range.max"
"""
return pulumi.get(self, "min")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliGoodTotalRatioThresholdPerformanceGoodTotalRatio(dict):
def __init__(__self__, *,
bad_service_filter: Optional[str] = None,
good_service_filter: Optional[str] = None,
total_service_filter: Optional[str] = None):
"""
:param str bad_service_filter: A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying bad service provided, either demanded service that
was not provided or demanded service that was of inadequate
quality. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
:param str good_service_filter: A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying good service provided. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
:param str total_service_filter: A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying total demanded service. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
if bad_service_filter is not None:
pulumi.set(__self__, "bad_service_filter", bad_service_filter)
if good_service_filter is not None:
pulumi.set(__self__, "good_service_filter", good_service_filter)
if total_service_filter is not None:
pulumi.set(__self__, "total_service_filter", total_service_filter)
@property
@pulumi.getter(name="badServiceFilter")
def bad_service_filter(self) -> Optional[str]:
"""
A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying bad service provided, either demanded service that
was not provided or demanded service that was of inadequate
quality. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "bad_service_filter")
@property
@pulumi.getter(name="goodServiceFilter")
def good_service_filter(self) -> Optional[str]:
"""
A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying good service provided. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "good_service_filter")
@property
@pulumi.getter(name="totalServiceFilter")
def total_service_filter(self) -> Optional[str]:
"""
A TimeSeries [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
quantifying total demanded service. Exactly two of
good, bad, or total service filter must be defined (where
good + bad = total is assumed)
Must have ValueType = DOUBLE or ValueType = INT64 and
must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "total_service_filter")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliMetricMeanInRange(dict):
def __init__(__self__, *,
range: 'outputs.SloWindowsBasedSliMetricMeanInRangeRange',
time_series: str):
"""
:param 'SloWindowsBasedSliMetricMeanInRangeRangeArgs' range: Range of numerical values. The computed good_service
will be the count of values x in the Distribution such
that range.min <= x < range.max. inclusive of min and
exclusive of max. Open ranges can be defined by setting
just one of min or max. Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
Structure is documented below.
:param str time_series: A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
specifying the TimeSeries to use for evaluating window
quality. The provided TimeSeries must have
ValueType = INT64 or ValueType = DOUBLE and
MetricKind = GAUGE.
Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
"""
pulumi.set(__self__, "range", range)
pulumi.set(__self__, "time_series", time_series)
@property
@pulumi.getter
def range(self) -> 'outputs.SloWindowsBasedSliMetricMeanInRangeRange':
"""
Range of numerical values. The computed good_service
will be the count of values x in the Distribution such
that range.min <= x < range.max. inclusive of min and
exclusive of max. Open ranges can be defined by setting
just one of min or max. Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
Structure is documented below.
"""
return pulumi.get(self, "range")
@property
@pulumi.getter(name="timeSeries")
def time_series(self) -> str:
"""
A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
specifying the TimeSeries to use for evaluating window
quality. The provided TimeSeries must have
ValueType = INT64 or ValueType = DOUBLE and
MetricKind = GAUGE.
Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
"""
return pulumi.get(self, "time_series")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliMetricMeanInRangeRange(dict):
def __init__(__self__, *,
max: Optional[float] = None,
min: Optional[float] = None):
"""
:param float max: max value for the range (inclusive). If not given,
will be set to "infinity", defining an open range
">= range.min"
:param float min: Min value for the range (inclusive). If not given,
will be set to "-infinity", defining an open range
"< range.max"
"""
if max is not None:
pulumi.set(__self__, "max", max)
if min is not None:
pulumi.set(__self__, "min", min)
@property
@pulumi.getter
def max(self) -> Optional[float]:
"""
max value for the range (inclusive). If not given,
will be set to "infinity", defining an open range
">= range.min"
"""
return pulumi.get(self, "max")
@property
@pulumi.getter
def min(self) -> Optional[float]:
"""
Min value for the range (inclusive). If not given,
will be set to "-infinity", defining an open range
"< range.max"
"""
return pulumi.get(self, "min")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliMetricSumInRange(dict):
def __init__(__self__, *,
range: 'outputs.SloWindowsBasedSliMetricSumInRangeRange',
time_series: str):
"""
:param 'SloWindowsBasedSliMetricSumInRangeRangeArgs' range: Range of numerical values. The computed good_service
will be the count of values x in the Distribution such
that range.min <= x < range.max. inclusive of min and
exclusive of max. Open ranges can be defined by setting
just one of min or max. Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
Structure is documented below.
:param str time_series: A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
specifying the TimeSeries to use for evaluating window
quality. The provided TimeSeries must have
ValueType = INT64 or ValueType = DOUBLE and
MetricKind = GAUGE.
Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
"""
pulumi.set(__self__, "range", range)
pulumi.set(__self__, "time_series", time_series)
@property
@pulumi.getter
def range(self) -> 'outputs.SloWindowsBasedSliMetricSumInRangeRange':
"""
Range of numerical values. The computed good_service
will be the count of values x in the Distribution such
that range.min <= x < range.max. inclusive of min and
exclusive of max. Open ranges can be defined by setting
just one of min or max. Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
Structure is documented below.
"""
return pulumi.get(self, "range")
@property
@pulumi.getter(name="timeSeries")
def time_series(self) -> str:
"""
A [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters)
specifying the TimeSeries to use for evaluating window
quality. The provided TimeSeries must have
ValueType = INT64 or ValueType = DOUBLE and
MetricKind = GAUGE.
Summed value `X` should satisfy
`range.min <= X < range.max` for a good window.
"""
return pulumi.get(self, "time_series")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SloWindowsBasedSliMetricSumInRangeRange(dict):
def __init__(__self__, *,
max: Optional[float] = None,
min: Optional[float] = None):
"""
:param float max: max value for the range (inclusive). If not given,
will be set to "infinity", defining an open range
">= range.min"
:param float min: Min value for the range (inclusive). If not given,
will be set to "-infinity", defining an open range
"< range.max"
"""
if max is not None:
pulumi.set(__self__, "max", max)
if min is not None:
pulumi.set(__self__, "min", min)
@property
@pulumi.getter
def max(self) -> Optional[float]:
"""
max value for the range (inclusive). If not given,
will be set to "infinity", defining an open range
">= range.min"
"""
return pulumi.get(self, "max")
@property
@pulumi.getter
def min(self) -> Optional[float]:
"""
Min value for the range (inclusive). If not given,
will be set to "-infinity", defining an open range
"< range.max"
"""
return pulumi.get(self, "min")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class UptimeCheckConfigContentMatcher(dict):
def __init__(__self__, *,
content: str,
matcher: Optional[str] = None):
"""
:param str content: String or regex content to match (max 1024 bytes)
:param str matcher: The type of content matcher that will be applied to the server output, compared to the content string when the check is run.
Default value is `CONTAINS_STRING`.
Possible values are `CONTAINS_STRING`, `NOT_CONTAINS_STRING`, `MATCHES_REGEX`, and `NON_MATCHES_REGEX`.
"""
pulumi.set(__self__, "content", content)
if matcher is not None:
pulumi.set(__self__, "matcher", matcher)
@property
@pulumi.getter
def content(self) -> str:
"""
String or regex content to match (max 1024 bytes)
"""
return pulumi.get(self, "content")
@property
@pulumi.getter
def matcher(self) -> Optional[str]:
"""
The type of content matcher that will be applied to the server output, compared to the content string when the check is run.
Default value is `CONTAINS_STRING`.
Possible values are `CONTAINS_STRING`, `NOT_CONTAINS_STRING`, `MATCHES_REGEX`, and `NON_MATCHES_REGEX`.
"""
return pulumi.get(self, "matcher")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class UptimeCheckConfigHttpCheck(dict):
def __init__(__self__, *,
auth_info: Optional['outputs.UptimeCheckConfigHttpCheckAuthInfo'] = None,
body: Optional[str] = None,
content_type: Optional[str] = None,
headers: Optional[Mapping[str, str]] = None,
mask_headers: Optional[bool] = None,
path: Optional[str] = None,
port: Optional[float] = None,
request_method: Optional[str] = None,
use_ssl: Optional[bool] = None,
validate_ssl: Optional[bool] = None):
"""
:param 'UptimeCheckConfigHttpCheckAuthInfoArgs' auth_info: The authentication information. Optional when creating an HTTP check; defaults to empty.
Structure is documented below.
:param str body: The request body associated with the HTTP POST request. If contentType is URL_ENCODED, the body passed in must be URL-encoded. Users can provide a Content-Length header via the headers field or the API will do so. If the requestMethod is GET and body is not empty, the API will return an error. The maximum byte size is 1 megabyte. Note - As with all bytes fields JSON representations are base64 encoded. e.g. "foo=bar" in URL-encoded form is "foo%3Dbar" and in base64 encoding is "Zm9vJTI1M0RiYXI=".
:param str content_type: The content type to use for the check.
Possible values are `TYPE_UNSPECIFIED` and `URL_ENCODED`.
:param Mapping[str, str] headers: The list of headers to send as part of the uptime check request. If two headers have the same key and different values, they should be entered as a single header, with the value being a comma-separated list of all the desired values as described at https://www.w3.org/Protocols/rfc2616/rfc2616.txt (page 31). Entering two separate headers with the same key in a Create call will cause the first to be overwritten by the second. The maximum number of headers allowed is 100.
:param bool mask_headers: Boolean specifying whether to encrypt the header information. Encryption should be specified for any headers related to authentication that you do not wish to be seen when retrieving the configuration. The server will be responsible for encrypting the headers. On Get/List calls, if mask_headers is set to True then the headers will be obscured with ******.
:param str path: The path to the page to run the check against. Will be combined with the host (specified within the MonitoredResource) and port to construct the full URL. Optional (defaults to "/").
:param float port: The port to the page to run the check against. Will be combined with host (specified within the MonitoredResource) to construct the full URL.
:param str request_method: The HTTP request method to use for the check. If set to METHOD_UNSPECIFIED then requestMethod defaults to GET.
Default value is `GET`.
Possible values are `METHOD_UNSPECIFIED`, `GET`, and `POST`.
:param bool use_ssl: If true, use HTTPS instead of HTTP to run the check.
:param bool validate_ssl: Boolean specifying whether to include SSL certificate validation as a part of the Uptime check. Only applies to checks where monitoredResource is set to uptime_url. If useSsl is false, setting validateSsl to true has no effect.
"""
if auth_info is not None:
pulumi.set(__self__, "auth_info", auth_info)
if body is not None:
pulumi.set(__self__, "body", body)
if content_type is not None:
pulumi.set(__self__, "content_type", content_type)
if headers is not None:
pulumi.set(__self__, "headers", headers)
if mask_headers is not None:
pulumi.set(__self__, "mask_headers", mask_headers)
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
if request_method is not None:
pulumi.set(__self__, "request_method", request_method)
if use_ssl is not None:
pulumi.set(__self__, "use_ssl", use_ssl)
if validate_ssl is not None:
pulumi.set(__self__, "validate_ssl", validate_ssl)
@property
@pulumi.getter(name="authInfo")
def auth_info(self) -> Optional['outputs.UptimeCheckConfigHttpCheckAuthInfo']:
"""
The authentication information. Optional when creating an HTTP check; defaults to empty.
Structure is documented below.
"""
return pulumi.get(self, "auth_info")
@property
@pulumi.getter
def body(self) -> Optional[str]:
"""
The request body associated with the HTTP POST request. If contentType is URL_ENCODED, the body passed in must be URL-encoded. Users can provide a Content-Length header via the headers field or the API will do so. If the requestMethod is GET and body is not empty, the API will return an error. The maximum byte size is 1 megabyte. Note - As with all bytes fields JSON representations are base64 encoded. e.g. "foo=bar" in URL-encoded form is "foo%3Dbar" and in base64 encoding is "Zm9vJTI1M0RiYXI=".
"""
return pulumi.get(self, "body")
@property
@pulumi.getter(name="contentType")
def content_type(self) -> Optional[str]:
"""
The content type to use for the check.
Possible values are `TYPE_UNSPECIFIED` and `URL_ENCODED`.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter
def headers(self) -> Optional[Mapping[str, str]]:
"""
The list of headers to send as part of the uptime check request. If two headers have the same key and different values, they should be entered as a single header, with the value being a comma-separated list of all the desired values as described at https://www.w3.org/Protocols/rfc2616/rfc2616.txt (page 31). Entering two separate headers with the same key in a Create call will cause the first to be overwritten by the second. The maximum number of headers allowed is 100.
"""
return pulumi.get(self, "headers")
@property
@pulumi.getter(name="maskHeaders")
def mask_headers(self) -> Optional[bool]:
"""
Boolean specifying whether to encrypt the header information. Encryption should be specified for any headers related to authentication that you do not wish to be seen when retrieving the configuration. The server will be responsible for encrypting the headers. On Get/List calls, if mask_headers is set to True then the headers will be obscured with ******.
"""
return pulumi.get(self, "mask_headers")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
The path to the page to run the check against. Will be combined with the host (specified within the MonitoredResource) and port to construct the full URL. Optional (defaults to "/").
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port to the page to run the check against. Will be combined with host (specified within the MonitoredResource) to construct the full URL.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="requestMethod")
def request_method(self) -> Optional[str]:
"""
The HTTP request method to use for the check. If set to METHOD_UNSPECIFIED then requestMethod defaults to GET.
Default value is `GET`.
Possible values are `METHOD_UNSPECIFIED`, `GET`, and `POST`.
"""
return pulumi.get(self, "request_method")
@property
@pulumi.getter(name="useSsl")
def use_ssl(self) -> Optional[bool]:
"""
If true, use HTTPS instead of HTTP to run the check.
"""
return pulumi.get(self, "use_ssl")
@property
@pulumi.getter(name="validateSsl")
def validate_ssl(self) -> Optional[bool]:
"""
Boolean specifying whether to include SSL certificate validation as a part of the Uptime check. Only applies to checks where monitoredResource is set to uptime_url. If useSsl is false, setting validateSsl to true has no effect.
"""
return pulumi.get(self, "validate_ssl")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class UptimeCheckConfigHttpCheckAuthInfo(dict):
def __init__(__self__, *,
password: str,
username: str):
"""
:param str password: The password to authenticate.
**Note**: This property is sensitive and will not be displayed in the plan.
:param str username: The username to authenticate.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password to authenticate.
**Note**: This property is sensitive and will not be displayed in the plan.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The username to authenticate.
"""
return pulumi.get(self, "username")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class UptimeCheckConfigMonitoredResource(dict):
def __init__(__self__, *,
labels: Mapping[str, str],
type: str):
"""
:param Mapping[str, str] labels: Values for all of the labels listed in the associated monitored resource descriptor. For example, Compute Engine VM instances use the labels "project_id", "instance_id", and "zone".
:param str type: The monitored resource type. This field must match the type field of a MonitoredResourceDescriptor (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.monitoredResourceDescriptors#MonitoredResourceDescriptor) object. For example, the type of a Compute Engine VM instance is gce_instance. For a list of types, see Monitoring resource types (https://cloud.google.com/monitoring/api/resources) and Logging resource types (https://cloud.google.com/logging/docs/api/v2/resource-list).
"""
pulumi.set(__self__, "labels", labels)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
"""
Values for all of the labels listed in the associated monitored resource descriptor. For example, Compute Engine VM instances use the labels "project_id", "instance_id", and "zone".
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def type(self) -> str:
"""
The monitored resource type. This field must match the type field of a MonitoredResourceDescriptor (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.monitoredResourceDescriptors#MonitoredResourceDescriptor) object. For example, the type of a Compute Engine VM instance is gce_instance. For a list of types, see Monitoring resource types (https://cloud.google.com/monitoring/api/resources) and Logging resource types (https://cloud.google.com/logging/docs/api/v2/resource-list).
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class UptimeCheckConfigResourceGroup(dict):
def __init__(__self__, *,
group_id: Optional[str] = None,
resource_type: Optional[str] = None):
"""
:param str group_id: The group of resources being monitored. Should be the `name` of a group
:param str resource_type: The resource type of the group members.
Possible values are `RESOURCE_TYPE_UNSPECIFIED`, `INSTANCE`, and `AWS_ELB_LOAD_BALANCER`.
"""
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if resource_type is not None:
pulumi.set(__self__, "resource_type", resource_type)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[str]:
"""
The group of resources being monitored. Should be the `name` of a group
"""
return pulumi.get(self, "group_id")
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> Optional[str]:
"""
The resource type of the group members.
Possible values are `RESOURCE_TYPE_UNSPECIFIED`, `INSTANCE`, and `AWS_ELB_LOAD_BALANCER`.
"""
return pulumi.get(self, "resource_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class UptimeCheckConfigTcpCheck(dict):
def __init__(__self__, *,
port: float):
"""
:param float port: The port to the page to run the check against. Will be combined with host (specified within the MonitoredResource) to construct the full URL.
"""
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def port(self) -> float:
"""
The port to the page to run the check against. Will be combined with host (specified within the MonitoredResource) to construct the full URL.
"""
return pulumi.get(self, "port")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class GetAppEngineServiceTelemetryResult(dict):
def __init__(__self__, *,
resource_name: str):
pulumi.set(__self__, "resource_name", resource_name)
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> str:
return pulumi.get(self, "resource_name")
@pulumi.output_type
class GetNotificationChannelSensitiveLabelResult(dict):
def __init__(__self__, *,
auth_token: str,
password: str,
service_key: str):
pulumi.set(__self__, "auth_token", auth_token)
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "service_key", service_key)
@property
@pulumi.getter(name="authToken")
def auth_token(self) -> str:
return pulumi.get(self, "auth_token")
@property
@pulumi.getter
def password(self) -> str:
return pulumi.get(self, "password")
@property
@pulumi.getter(name="serviceKey")
def service_key(self) -> str:
return pulumi.get(self, "service_key")
@pulumi.output_type
class GetUptimeCheckIPsUptimeCheckIpResult(dict):
def __init__(__self__, *,
ip_address: str,
location: str,
region: str):
"""
:param str ip_address: The IP address from which the Uptime check originates. This is a fully specified IP address
(not an IP address range). Most IP addresses, as of this publication, are in IPv4 format; however, one should not
rely on the IP addresses being in IPv4 format indefinitely, and should support interpreting this field in either
IPv4 or IPv6 format.
:param str location: A more specific location within the region that typically encodes a particular city/town/metro
(and its containing state/province or country) within the broader umbrella region category.
:param str region: A broad region category in which the IP address is located.
"""
pulumi.set(__self__, "ip_address", ip_address)
pulumi.set(__self__, "location", location)
pulumi.set(__self__, "region", region)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> str:
"""
The IP address from which the Uptime check originates. This is a fully specified IP address
(not an IP address range). Most IP addresses, as of this publication, are in IPv4 format; however, one should not
rely on the IP addresses being in IPv4 format indefinitely, and should support interpreting this field in either
IPv4 or IPv6 format.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter
def location(self) -> str:
"""
A more specific location within the region that typically encodes a particular city/town/metro
(and its containing state/province or country) within the broader umbrella region category.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def region(self) -> str:
"""
A broad region category in which the IP address is located.
"""
return pulumi.get(self, "region")
| 45.764362
| 525
| 0.650744
| 15,792
| 133,037
| 5.339856
| 0.049139
| 0.02514
| 0.017729
| 0.025911
| 0.858468
| 0.844427
| 0.829402
| 0.817935
| 0.812551
| 0.807476
| 0
| 0.005664
| 0.282042
| 133,037
| 2,906
| 526
| 45.78011
| 0.877203
| 0.561438
| 0
| 0.636364
| 1
| 0
| 0.161209
| 0.090825
| 0
| 0
| 0
| 0
| 0
| 1
| 0.189638
| false
| 0.012708
| 0.005865
| 0.043011
| 0.385142
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
716c87a6d61bfbee392796ad887f043a091b5bde
| 31
|
py
|
Python
|
tests/samples/issue-274-support-one-package-without-package-dir/hello/__init__.py
|
oiffrig/scikit-build
|
4e2928d93ba275f5cfc3837c174c25e6c4a73ac0
|
[
"MIT"
] | 1
|
2021-12-14T18:49:49.000Z
|
2021-12-14T18:49:49.000Z
|
tests/samples/issue-274-support-one-package-without-package-dir/hello/__init__.py
|
oiffrig/scikit-build
|
4e2928d93ba275f5cfc3837c174c25e6c4a73ac0
|
[
"MIT"
] | null | null | null |
tests/samples/issue-274-support-one-package-without-package-dir/hello/__init__.py
|
oiffrig/scikit-build
|
4e2928d93ba275f5cfc3837c174c25e6c4a73ac0
|
[
"MIT"
] | 1
|
2021-11-12T01:03:02.000Z
|
2021-11-12T01:03:02.000Z
|
def who():
return "world"
| 7.75
| 18
| 0.548387
| 4
| 31
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.290323
| 31
| 3
| 19
| 10.333333
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
71b01bb3c5131271edbf4be4e3dcecf4067eb3a6
| 12,130
|
py
|
Python
|
tests/base/test_transforms.py
|
jungr-ait/spatialmath-python
|
140d499e733ed9775762df90d36e4b2c4c2fc6eb
|
[
"MIT"
] | 183
|
2020-04-24T02:49:36.000Z
|
2022-03-31T16:13:38.000Z
|
tests/base/test_transforms.py
|
jungr-ait/spatialmath-python
|
140d499e733ed9775762df90d36e4b2c4c2fc6eb
|
[
"MIT"
] | 29
|
2020-05-21T04:13:33.000Z
|
2022-02-15T12:46:17.000Z
|
tests/base/test_transforms.py
|
jungr-ait/spatialmath-python
|
140d499e733ed9775762df90d36e4b2c4c2fc6eb
|
[
"MIT"
] | 39
|
2020-05-06T11:22:55.000Z
|
2022-03-21T14:15:16.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 10 14:19:04 2020
@author: corkep
"""
import numpy as np
import numpy.testing as nt
import unittest
from math import pi
import math
from scipy.linalg import logm, expm
from spatialmath.base import *
from spatialmath.base import sym
import matplotlib.pyplot as plt
class TestLie(unittest.TestCase):
def test_vex(self):
S = np.array([[0, -3], [3, 0]])
nt.assert_array_almost_equal(vex(S), np.array([3]))
nt.assert_array_almost_equal(vex(-S), np.array([-3]))
S = np.array([[0, -3, 2], [3, 0, -1], [-2, 1, 0]])
nt.assert_array_almost_equal(vex(S), np.array([1, 2, 3]))
nt.assert_array_almost_equal(vex(-S), -np.array([1, 2, 3]))
def test_skew(self):
R = skew(3)
nt.assert_equal(isrot2(R, check=False), True) # check size
nt.assert_array_almost_equal(np.linalg.norm(R.T + R), 0) # check is skew
nt.assert_array_almost_equal(
vex(R), np.array([3])
) # check contents, vex already verified
R = skew([1, 2, 3])
nt.assert_equal(isrot(R, check=False), True) # check size
nt.assert_array_almost_equal(np.linalg.norm(R.T + R), 0) # check is skew
nt.assert_array_almost_equal(
vex(R), np.array([1, 2, 3])
) # check contents, vex already verified
def test_vexa(self):
S = np.array([[0, -3, 1], [3, 0, 2], [0, 0, 0]])
nt.assert_array_almost_equal(vexa(S), np.array([1, 2, 3]))
S = np.array([[0, 3, -1], [-3, 0, 2], [0, 0, 0]])
nt.assert_array_almost_equal(vexa(S), np.array([-1, 2, -3]))
S = np.array([[0, -6, 5, 1], [6, 0, -4, 2], [-5, 4, 0, 3], [0, 0, 0, 0]])
nt.assert_array_almost_equal(vexa(S), np.array([1, 2, 3, 4, 5, 6]))
S = np.array([[0, 6, 5, 1], [-6, 0, 4, -2], [-5, -4, 0, 3], [0, 0, 0, 0]])
nt.assert_array_almost_equal(vexa(S), np.array([1, -2, 3, -4, 5, -6]))
def test_skewa(self):
T = skewa([3, 4, 5])
nt.assert_equal(ishom2(T, check=False), True) # check size
R = t2r(T)
nt.assert_equal(np.linalg.norm(R.T + R), 0) # check is skew
nt.assert_array_almost_equal(
vexa(T), np.array([3, 4, 5])
) # check contents, vexa already verified
T = skewa([1, 2, 3, 4, 5, 6])
nt.assert_equal(ishom(T, check=False), True) # check size
R = t2r(T)
nt.assert_equal(np.linalg.norm(R.T + R), 0) # check is skew
nt.assert_array_almost_equal(
vexa(T), np.array([1, 2, 3, 4, 5, 6])
) # check contents, vexa already verified
def test_trlog(self):
# %%% SO(3) tests
# zero rotation case
nt.assert_array_almost_equal(trlog(np.eye(3)), skew([0, 0, 0]))
nt.assert_array_almost_equal(trlog(np.eye(3), twist=True), np.r_[0, 0, 0])
# rotation by pi case
nt.assert_array_almost_equal(trlog(rotx(pi)), skew([pi, 0, 0]))
nt.assert_array_almost_equal(trlog(roty(pi)), skew([0, pi, 0]))
nt.assert_array_almost_equal(trlog(rotz(pi)), skew([0, 0, pi]))
nt.assert_array_almost_equal(trlog(rotx(pi), twist=True), np.r_[pi, 0, 0])
nt.assert_array_almost_equal(trlog(roty(pi), twist=True), np.r_[0, pi, 0])
nt.assert_array_almost_equal(trlog(rotz(pi), twist=True), np.r_[0, 0, pi])
# general case
nt.assert_array_almost_equal(trlog(rotx(0.2)), skew([0.2, 0, 0]))
nt.assert_array_almost_equal(trlog(roty(0.3)), skew([0, 0.3, 0]))
nt.assert_array_almost_equal(trlog(rotz(0.4)), skew([0, 0, 0.4]))
nt.assert_array_almost_equal(trlog(rotx(0.2), twist=True), np.r_[0.2, 0, 0])
nt.assert_array_almost_equal(trlog(roty(0.3), twist=True), np.r_[0, 0.3, 0])
nt.assert_array_almost_equal(trlog(rotz(0.4), twist=True), np.r_[0, 0, 0.4])
R = rotx(0.2) @ roty(0.3) @ rotz(0.4)
nt.assert_array_almost_equal(trlog(R), logm(R))
nt.assert_array_almost_equal(trlog(R, twist=True), vex(logm(R)))
# SE(3) tests
# pure translation
nt.assert_array_almost_equal(
trlog(transl([1, 2, 3])),
np.array([[0, 0, 0, 1], [0, 0, 0, 2], [0, 0, 0, 3], [0, 0, 0, 0]]),
)
nt.assert_array_almost_equal(
trlog(transl([1, 2, 3]), twist=True), np.r_[1, 2, 3, 0, 0, 0]
)
# pure rotation
# rotation by pi case
nt.assert_array_almost_equal(trlog(trotx(pi)), skewa([0, 0, 0, pi, 0, 0]))
nt.assert_array_almost_equal(trlog(troty(pi)), skewa([0, 0, 0, 0, pi, 0]))
nt.assert_array_almost_equal(trlog(trotz(pi)), skewa([0, 0, 0, 0, 0, pi]))
nt.assert_array_almost_equal(
trlog(trotx(pi), twist=True), np.r_[0, 0, 0, pi, 0, 0]
)
nt.assert_array_almost_equal(
trlog(troty(pi), twist=True), np.r_[0, 0, 0, 0, pi, 0]
)
nt.assert_array_almost_equal(
trlog(trotz(pi), twist=True), np.r_[0, 0, 0, 0, 0, pi]
)
# general case
nt.assert_array_almost_equal(trlog(trotx(0.2)), skewa([0, 0, 0, 0.2, 0, 0]))
nt.assert_array_almost_equal(trlog(troty(0.3)), skewa([0, 0, 0, 0, 0.3, 0]))
nt.assert_array_almost_equal(trlog(trotz(0.4)), skewa([0, 0, 0, 0, 0, 0.4]))
nt.assert_array_almost_equal(
trlog(trotx(0.2), twist=True), np.r_[0, 0, 0, 0.2, 0, 0]
)
nt.assert_array_almost_equal(
trlog(troty(0.3), twist=True), np.r_[0, 0, 0, 0, 0.3, 0]
)
nt.assert_array_almost_equal(
trlog(trotz(0.4), twist=True), np.r_[0, 0, 0, 0, 0, 0.4]
)
# mixture
T = transl([1, 2, 3]) @ trotx(0.3)
nt.assert_array_almost_equal(trlog(T), logm(T))
nt.assert_array_almost_equal(trlog(T, twist=True), vexa(logm(T)))
T = transl([1, 2, 3]) @ troty(0.3)
nt.assert_array_almost_equal(trlog(T), logm(T))
nt.assert_array_almost_equal(trlog(T, twist=True), vexa(logm(T)))
# def test_trlog2(self):
# #%%% SO(2) tests
# # zero rotation case
# nt.assert_array_almost_equal(trlog2( np.eye(2) ), skew([0]))
# # rotation by pi case
# nt.assert_array_almost_equal(trlog2( rot2(pi) ), skew([pi]))
# # general case
# nt.assert_array_almost_equal(trlog2( rotx(0.2) ), skew([0.2]))
# #%% SE(3) tests
# # pure translation
# nt.assert_array_almost_equal(trlog2( transl2([1, 2]) ), np.array([[0, 0, 1], [ 0, 0, 2], [ 0, 0, 0]]))
# # pure rotation
# # rotation by pi case
# nt.assert_array_almost_equal(trlog( trot2(pi) ), skewa([0, 0, pi]))
# # general case
# nt.assert_array_almost_equal(trlog( trot2(0.2) ), skewa([0, 0, 0.2]))
# # mixture
# T = transl([1, 2, 3]) @ trot2(0.3)
# nt.assert_array_almost_equal(trlog2(T), logm(T))
# TODO
def test_trexp(self):
# %% SO(3) tests
# % so(3)
# zero rotation case
nt.assert_array_almost_equal(trexp(skew([0, 0, 0])), np.eye(3))
nt.assert_array_almost_equal(trexp([0, 0, 0]), np.eye(3))
# % so(3), theta
# rotation by pi case
nt.assert_array_almost_equal(trexp(skew([pi, 0, 0])), rotx(pi))
nt.assert_array_almost_equal(trexp(skew([0, pi, 0])), roty(pi))
nt.assert_array_almost_equal(trexp(skew([0, 0, pi])), rotz(pi))
# general case
nt.assert_array_almost_equal(trexp(skew([0.2, 0, 0])), rotx(0.2))
nt.assert_array_almost_equal(trexp(skew([0, 0.3, 0])), roty(0.3))
nt.assert_array_almost_equal(trexp(skew([0, 0, 0.4])), rotz(0.4))
nt.assert_array_almost_equal(trexp(skew([1, 0, 0]), 0.2), rotx(0.2))
nt.assert_array_almost_equal(trexp(skew([0, 1, 0]), 0.3), roty(0.3))
nt.assert_array_almost_equal(trexp(skew([0, 0, 1]), 0.4), rotz(0.4))
nt.assert_array_almost_equal(trexp([1, 0, 0], 0.2), rotx(0.2))
nt.assert_array_almost_equal(trexp([0, 1, 0], 0.3), roty(0.3))
nt.assert_array_almost_equal(trexp([0, 0, 1], 0.4), rotz(0.4))
nt.assert_array_almost_equal(trexp(np.r_[1, 0, 0] * 0.2), rotx(0.2))
nt.assert_array_almost_equal(trexp(np.r_[0, 1, 0] * 0.3), roty(0.3))
nt.assert_array_almost_equal(trexp(np.r_[0, 0, 1] * 0.4), rotz(0.4))
# %% SE(3) tests
# zero motion case
nt.assert_array_almost_equal(trexp(skewa([0, 0, 0, 0, 0, 0])), np.eye(4))
nt.assert_array_almost_equal(trexp([0, 0, 0, 0, 0, 0]), np.eye(4))
# % sigma = se(3)
# pure translation
nt.assert_array_almost_equal(
trexp(skewa([1, 2, 3, 0, 0, 0])), transl([1, 2, 3])
)
nt.assert_array_almost_equal(trexp(skewa([0, 0, 0, 0.2, 0, 0])), trotx(0.2))
nt.assert_array_almost_equal(trexp(skewa([0, 0, 0, 0, 0.3, 0])), troty(0.3))
nt.assert_array_almost_equal(trexp(skewa([0, 0, 0, 0, 0, 0.4])), trotz(0.4))
nt.assert_array_almost_equal(trexp([1, 2, 3, 0, 0, 0]), transl([1, 2, 3]))
nt.assert_array_almost_equal(trexp([0, 0, 0, 0.2, 0, 0]), trotx(0.2))
nt.assert_array_almost_equal(trexp([0, 0, 0, 0, 0.3, 0]), troty(0.3))
nt.assert_array_almost_equal(trexp([0, 0, 0, 0, 0, 0.4]), trotz(0.4))
# mixture
S = skewa([1, 2, 3, 0.1, -0.2, 0.3])
nt.assert_array_almost_equal(trexp(S), expm(S))
# twist vector
# nt.assert_array_almost_equal(trexp( double(Twist(T))), T)
# (sigma, theta)
nt.assert_array_almost_equal(
trexp(skewa([1, 0, 0, 0, 0, 0]), 2), transl([2, 0, 0])
)
nt.assert_array_almost_equal(
trexp(skewa([0, 1, 0, 0, 0, 0]), 2), transl([0, 2, 0])
)
nt.assert_array_almost_equal(
trexp(skewa([0, 0, 1, 0, 0, 0]), 2), transl([0, 0, 2])
)
nt.assert_array_almost_equal(trexp(skewa([0, 0, 0, 1, 0, 0]), 0.2), trotx(0.2))
nt.assert_array_almost_equal(trexp(skewa([0, 0, 0, 0, 1, 0]), 0.2), troty(0.2))
nt.assert_array_almost_equal(trexp(skewa([0, 0, 0, 0, 0, 1]), 0.2), trotz(0.2))
# (twist, theta)
# nt.assert_array_almost_equal(trexp(Twist('R', [1, 0, 0], [0, 0, 0]).S, 0.3), trotx(0.3))
T = transl([1, 2, 3]) @ trotz(0.3)
nt.assert_array_almost_equal(trexp(trlog(T)), T)
def test_trexp2(self):
# % so(2)
# zero rotation case
nt.assert_array_almost_equal(trexp2(skew([0])), np.eye(2))
nt.assert_array_almost_equal(trexp2(skew(0)), np.eye(2))
# % so(2), theta
# rotation by pi case
nt.assert_array_almost_equal(trexp2(skew(pi)), rot2(pi))
# general case
nt.assert_array_almost_equal(trexp2(skew(0.2)), rot2(0.2))
nt.assert_array_almost_equal(trexp2(1, 0.2), rot2(0.2))
# %% SE(3) tests
# % sigma = se(3)
# pure translation
nt.assert_array_almost_equal(trexp2(skewa([1, 2, 0])), transl2([1, 2]))
nt.assert_array_almost_equal(trexp2([0, 0, 0.2]), trot2(0.2))
# mixture
S = skewa([1, 2, 0.3])
nt.assert_array_almost_equal(trexp2(S), expm(S))
# twist vector
# nt.assert_array_almost_equal(trexp( double(Twist(T))), T)
# (sigma, theta)
nt.assert_array_almost_equal(trexp2(skewa([1, 0, 0]), 2), transl2([2, 0]))
nt.assert_array_almost_equal(trexp2(skewa([0, 1, 0]), 2), transl2([0, 2]))
nt.assert_array_almost_equal(trexp2(skewa([0, 0, 1]), 0.2), trot2(0.2))
# (twist, theta)
# nt.assert_array_almost_equal(trexp(Twist('R', [1, 0, 0], [0, 0, 0]).S, 0.3), trotx(0.3))
# T = transl2([1, 2])@trot2(0.3)
# nt.assert_array_almost_equal(trexp2(trlog2(T)), T)
# TODO
def test_trnorm(self):
T0 = transl(-1, -2, -3) @ trotx(-0.3)
nt.assert_array_almost_equal(trnorm(T0), T0)
# ---------------------------------------------------------------------------------------#
if __name__ == "__main__":
unittest.main()
| 36.981707
| 112
| 0.55911
| 2,013
| 12,130
| 3.189767
| 0.056632
| 0.058558
| 0.216633
| 0.316617
| 0.873696
| 0.840991
| 0.802834
| 0.783679
| 0.691637
| 0.600997
| 0
| 0.080541
| 0.249711
| 12,130
| 327
| 113
| 37.094801
| 0.624986
| 0.174773
| 0
| 0.150289
| 0
| 0
| 0.000807
| 0
| 0
| 0
| 0
| 0.003058
| 0.583815
| 1
| 0.046243
| false
| 0
| 0.052023
| 0
| 0.104046
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
71daba4de4d549e6550cce3856bb2eb6be18b156
| 93
|
py
|
Python
|
example-files/python.py
|
FredHappyface/Android.FHCode
|
1e412c76497b9a5efb336aa5096a2adc6e9cc3cb
|
[
"MIT"
] | 10
|
2021-11-28T11:37:37.000Z
|
2022-03-24T03:18:51.000Z
|
example-files/python.py
|
FredHappyface/Android.FHCode
|
1e412c76497b9a5efb336aa5096a2adc6e9cc3cb
|
[
"MIT"
] | 1
|
2021-12-15T12:39:48.000Z
|
2021-12-15T12:39:48.000Z
|
example-files/python.py
|
FredHappyface/Android.FHCode
|
1e412c76497b9a5efb336aa5096a2adc6e9cc3cb
|
[
"MIT"
] | 1
|
2021-12-14T19:42:24.000Z
|
2021-12-14T19:42:24.000Z
|
def add(a:int, b:int=0):
return a + b
print(add(1)) # Prints 1
print(add(1, 2)) # Prints 3
| 15.5
| 27
| 0.602151
| 21
| 93
| 2.666667
| 0.571429
| 0.285714
| 0.321429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 0.193548
| 93
| 5
| 28
| 18.6
| 0.666667
| 0.182796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 7
|
e0a9aa5bf553b1ded04dbe884a0de9574bbd236d
| 36,529
|
py
|
Python
|
purity_fb/purity_fb_1dot12/apis/object_store_users_api.py
|
tlewis-ps/purity_fb_python_client
|
652835cbd485c95a86da27f8b661679727ec6ea0
|
[
"Apache-2.0"
] | null | null | null |
purity_fb/purity_fb_1dot12/apis/object_store_users_api.py
|
tlewis-ps/purity_fb_python_client
|
652835cbd485c95a86da27f8b661679727ec6ea0
|
[
"Apache-2.0"
] | null | null | null |
purity_fb/purity_fb_1dot12/apis/object_store_users_api.py
|
tlewis-ps/purity_fb_python_client
|
652835cbd485c95a86da27f8b661679727ec6ea0
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Pure Storage FlashBlade REST 1.12 Python SDK
Pure Storage FlashBlade REST 1.12 Python SDK. Compatible with REST API versions 1.0 - 1.12. Developed by [Pure Storage, Inc](http://www.purestorage.com/). Documentations can be found at [purity-fb.readthedocs.io](http://purity-fb.readthedocs.io/).
OpenAPI spec version: 1.12
Contact: info@purestorage.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ObjectStoreUsersApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_object_store_users_object_store_access_policies(self, **kwargs):
"""
Add a policy to an object store user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_object_store_users_object_store_access_policies(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] policy_ids: A comma-separated list of policy IDs. This cannot be provided together with the policy names query parameters.
:param list[str] policy_names: A comma-separated list of policy names. This cannot be provided together with the policy ids query parameters.
:param list[str] member_ids: A comma-separated list of member ids. This cannot be provided together with the member names query parameters.
:param list[str] member_names: A comma-separated list of member names. This cannot be provided together with the member ids query parameters.
:return: ObjectStoreUserObjectStoreAccessPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_object_store_users_object_store_access_policies_with_http_info(**kwargs)
else:
(data) = self.add_object_store_users_object_store_access_policies_with_http_info(**kwargs)
return data
def add_object_store_users_object_store_access_policies_with_http_info(self, **kwargs):
"""
Add a policy to an object store user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_object_store_users_object_store_access_policies_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] policy_ids: A comma-separated list of policy IDs. This cannot be provided together with the policy names query parameters.
:param list[str] policy_names: A comma-separated list of policy names. This cannot be provided together with the policy ids query parameters.
:param list[str] member_ids: A comma-separated list of member ids. This cannot be provided together with the member names query parameters.
:param list[str] member_names: A comma-separated list of member names. This cannot be provided together with the member ids query parameters.
:return: ObjectStoreUserObjectStoreAccessPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['policy_ids', 'policy_names', 'member_ids', 'member_names']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_object_store_users_object_store_access_policies" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'policy_ids' in params:
query_params.append(('policy_ids', params['policy_ids']))
collection_formats['policy_ids'] = 'csv'
if 'policy_names' in params:
query_params.append(('policy_names', params['policy_names']))
collection_formats['policy_names'] = 'csv'
if 'member_ids' in params:
query_params.append(('member_ids', params['member_ids']))
collection_formats['member_ids'] = 'csv'
if 'member_names' in params:
query_params.append(('member_names', params['member_names']))
collection_formats['member_names'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.12/object-store-users/object-store-access-policies', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ObjectStoreUserObjectStoreAccessPolicyResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_object_store_users(self, **kwargs):
"""
Create a new object store user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_object_store_users(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param bool full_access: specifies whether object store user will be created with full permissions
:return: ObjectStoreUserResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_object_store_users_with_http_info(**kwargs)
else:
(data) = self.create_object_store_users_with_http_info(**kwargs)
return data
def create_object_store_users_with_http_info(self, **kwargs):
"""
Create a new object store user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_object_store_users_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param bool full_access: specifies whether object store user will be created with full permissions
:return: ObjectStoreUserResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['names', 'full_access']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_object_store_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'full_access' in params:
query_params.append(('full_access', params['full_access']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.12/object-store-users', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ObjectStoreUserResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_object_store_users(self, **kwargs):
"""
Delete an object store user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_object_store_users(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] ids: A comma-separated list of resource IDs. This cannot be provided together with the name or names query parameters.
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_object_store_users_with_http_info(**kwargs)
else:
(data) = self.delete_object_store_users_with_http_info(**kwargs)
return data
def delete_object_store_users_with_http_info(self, **kwargs):
"""
Delete an object store user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_object_store_users_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] ids: A comma-separated list of resource IDs. This cannot be provided together with the name or names query parameters.
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ids', 'names']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_object_store_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.12/object-store-users', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_object_store_users(self, **kwargs):
"""
List object store users.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_object_store_users(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: The filter to be used for query.
:param list[str] ids: A comma-separated list of resource IDs. This cannot be provided together with the name or names query parameters.
:param int limit: limit, should be >= 0
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param str sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name).
:param int start: The offset of the first resource to return from a collection.
:param str token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:return: ObjectStoreUserResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_object_store_users_with_http_info(**kwargs)
else:
(data) = self.list_object_store_users_with_http_info(**kwargs)
return data
def list_object_store_users_with_http_info(self, **kwargs):
"""
List object store users.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_object_store_users_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: The filter to be used for query.
:param list[str] ids: A comma-separated list of resource IDs. This cannot be provided together with the name or names query parameters.
:param int limit: limit, should be >= 0
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param str sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name).
:param int start: The offset of the first resource to return from a collection.
:param str token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:return: ObjectStoreUserResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter', 'ids', 'limit', 'names', 'sort', 'start', 'token']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_object_store_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'start' in params:
query_params.append(('start', params['start']))
if 'token' in params:
query_params.append(('token', params['token']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.12/object-store-users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ObjectStoreUserResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_object_store_users_object_store_access_policies(self, **kwargs):
"""
List object store access policies for object store users.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_object_store_users_object_store_access_policies(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: The filter to be used for query.
:param int limit: limit, should be >= 0
:param list[str] policy_ids: A comma-separated list of policy IDs. This cannot be provided together with the policy names query parameters.
:param list[str] policy_names: A comma-separated list of policy names. This cannot be provided together with the policy ids query parameters.
:param list[str] member_ids: A comma-separated list of member ids. This cannot be provided together with the member names query parameters.
:param list[str] member_names: A comma-separated list of member names. This cannot be provided together with the member ids query parameters.
:param str sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name).
:param int start: The offset of the first resource to return from a collection.
:param str token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:return: ObjectStoreUserObjectStoreAccessPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_object_store_users_object_store_access_policies_with_http_info(**kwargs)
else:
(data) = self.list_object_store_users_object_store_access_policies_with_http_info(**kwargs)
return data
def list_object_store_users_object_store_access_policies_with_http_info(self, **kwargs):
"""
List object store access policies for object store users.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_object_store_users_object_store_access_policies_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: The filter to be used for query.
:param int limit: limit, should be >= 0
:param list[str] policy_ids: A comma-separated list of policy IDs. This cannot be provided together with the policy names query parameters.
:param list[str] policy_names: A comma-separated list of policy names. This cannot be provided together with the policy ids query parameters.
:param list[str] member_ids: A comma-separated list of member ids. This cannot be provided together with the member names query parameters.
:param list[str] member_names: A comma-separated list of member names. This cannot be provided together with the member ids query parameters.
:param str sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name).
:param int start: The offset of the first resource to return from a collection.
:param str token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:return: ObjectStoreUserObjectStoreAccessPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter', 'limit', 'policy_ids', 'policy_names', 'member_ids', 'member_names', 'sort', 'start', 'token']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_object_store_users_object_store_access_policies" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'policy_ids' in params:
query_params.append(('policy_ids', params['policy_ids']))
collection_formats['policy_ids'] = 'csv'
if 'policy_names' in params:
query_params.append(('policy_names', params['policy_names']))
collection_formats['policy_names'] = 'csv'
if 'member_ids' in params:
query_params.append(('member_ids', params['member_ids']))
collection_formats['member_ids'] = 'csv'
if 'member_names' in params:
query_params.append(('member_names', params['member_names']))
collection_formats['member_names'] = 'csv'
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'start' in params:
query_params.append(('start', params['start']))
if 'token' in params:
query_params.append(('token', params['token']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.12/object-store-users/object-store-access-policies', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ObjectStoreUserObjectStoreAccessPolicyResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_object_store_users_object_store_access_policies(self, **kwargs):
"""
Remove a policy from an object store user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_object_store_users_object_store_access_policies(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] policy_ids: A comma-separated list of policy IDs. This cannot be provided together with the policy names query parameters.
:param list[str] policy_names: A comma-separated list of policy names. This cannot be provided together with the policy ids query parameters.
:param list[str] member_ids: A comma-separated list of member ids. This cannot be provided together with the member names query parameters.
:param list[str] member_names: A comma-separated list of member names. This cannot be provided together with the member ids query parameters.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.remove_object_store_users_object_store_access_policies_with_http_info(**kwargs)
else:
(data) = self.remove_object_store_users_object_store_access_policies_with_http_info(**kwargs)
return data
def remove_object_store_users_object_store_access_policies_with_http_info(self, **kwargs):
"""
Remove a policy from an object store user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_object_store_users_object_store_access_policies_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] policy_ids: A comma-separated list of policy IDs. This cannot be provided together with the policy names query parameters.
:param list[str] policy_names: A comma-separated list of policy names. This cannot be provided together with the policy ids query parameters.
:param list[str] member_ids: A comma-separated list of member ids. This cannot be provided together with the member names query parameters.
:param list[str] member_names: A comma-separated list of member names. This cannot be provided together with the member ids query parameters.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['policy_ids', 'policy_names', 'member_ids', 'member_names']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_object_store_users_object_store_access_policies" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'policy_ids' in params:
query_params.append(('policy_ids', params['policy_ids']))
collection_formats['policy_ids'] = 'csv'
if 'policy_names' in params:
query_params.append(('policy_names', params['policy_names']))
collection_formats['policy_names'] = 'csv'
if 'member_ids' in params:
query_params.append(('member_ids', params['member_ids']))
collection_formats['member_ids'] = 'csv'
if 'member_names' in params:
query_params.append(('member_names', params['member_names']))
collection_formats['member_names'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.12/object-store-users/object-store-access-policies', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 48.511288
| 251
| 0.609132
| 4,056
| 36,529
| 5.272682
| 0.053254
| 0.045263
| 0.038904
| 0.030207
| 0.967222
| 0.961751
| 0.961751
| 0.957636
| 0.948939
| 0.942252
| 0
| 0.001553
| 0.312574
| 36,529
| 752
| 252
| 48.575798
| 0.850106
| 0.403871
| 0
| 0.832891
| 0
| 0
| 0.169995
| 0.050177
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.018568
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0afe065dd78d33d6288d5c254114eeedbd18914
| 139,047
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_subscriber_ipsub_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_subscriber_ipsub_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_subscriber_ipsub_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_subscriber_ipsub_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR subscriber\-ipsub package operational data.
This module contains definitions
for the following management objects\:
ip\-subscriber\: IP subscriber operational data
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class IpsubMaIntfInitiatorData(Enum):
"""
IpsubMaIntfInitiatorData (Enum Class)
Ipsub ma intf initiator data
.. data:: dhcp = 0
Session creation via DHCP discover packet
.. data:: packet_trigger = 1
Session creation via unclassified IPv4 packet
.. data:: invalid_trigger = 2
Invalid Trigger
"""
dhcp = Enum.YLeaf(0, "dhcp")
packet_trigger = Enum.YLeaf(1, "packet-trigger")
invalid_trigger = Enum.YLeaf(2, "invalid-trigger")
class IpsubMaIntfStateData(Enum):
"""
IpsubMaIntfStateData (Enum Class)
Interface states
.. data:: invalid = 0
Invalid state
.. data:: initialized = 1
Initial state
.. data:: session_creation_started = 2
Interface creation started
.. data:: control_policy_executing = 3
Interface created in IM, AAA session start
called
.. data:: control_policy_executed = 4
AAA session created
.. data:: session_features_applied = 5
Interface config activated
.. data:: vrf_configured = 6
Interface address and VRF information received
from IPv4
.. data:: adding_adjacency = 7
VRF configuration received and interface config
activated
.. data:: adjacency_added = 8
Subscriber AIB adjacency added
.. data:: up = 9
Session up
.. data:: down = 10
Session down
.. data:: address_family_down = 11
Session down in progress
.. data:: address_family_down_complete = 12
Session down complete
.. data:: disconnecting = 13
Session teardown in progress
.. data:: disconnected = 14
Session disconnected
.. data:: error = 15
Session in error state
"""
invalid = Enum.YLeaf(0, "invalid")
initialized = Enum.YLeaf(1, "initialized")
session_creation_started = Enum.YLeaf(2, "session-creation-started")
control_policy_executing = Enum.YLeaf(3, "control-policy-executing")
control_policy_executed = Enum.YLeaf(4, "control-policy-executed")
session_features_applied = Enum.YLeaf(5, "session-features-applied")
vrf_configured = Enum.YLeaf(6, "vrf-configured")
adding_adjacency = Enum.YLeaf(7, "adding-adjacency")
adjacency_added = Enum.YLeaf(8, "adjacency-added")
up = Enum.YLeaf(9, "up")
down = Enum.YLeaf(10, "down")
address_family_down = Enum.YLeaf(11, "address-family-down")
address_family_down_complete = Enum.YLeaf(12, "address-family-down-complete")
disconnecting = Enum.YLeaf(13, "disconnecting")
disconnected = Enum.YLeaf(14, "disconnected")
error = Enum.YLeaf(15, "error")
class IpsubMaParentIntfStateData(Enum):
"""
IpsubMaParentIntfStateData (Enum Class)
Parent interface state
.. data:: deleted = 0
Interface being deleted
.. data:: down = 1
Interface operationally down
.. data:: up = 2
Interface up
"""
deleted = Enum.YLeaf(0, "deleted")
down = Enum.YLeaf(1, "down")
up = Enum.YLeaf(2, "up")
class IpsubMaParentIntfVlan(Enum):
"""
IpsubMaParentIntfVlan (Enum Class)
Access interface VLAN type
.. data:: plain = 0
Plain
.. data:: ambiguous = 1
Ambiguous
"""
plain = Enum.YLeaf(0, "plain")
ambiguous = Enum.YLeaf(1, "ambiguous")
class IpSubscriber(Entity):
"""
IP subscriber operational data
.. attribute:: nodes
IP subscriber operational data for a particular location
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber, self).__init__()
self._top_entity = None
self.yang_name = "ip-subscriber"
self.yang_parent_name = "Cisco-IOS-XR-subscriber-ipsub-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("nodes", ("nodes", IpSubscriber.Nodes))])
self._leafs = OrderedDict()
self.nodes = IpSubscriber.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self._segment_path = lambda: "Cisco-IOS-XR-subscriber-ipsub-oper:ip-subscriber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber, [], name, value)
class Nodes(Entity):
"""
IP subscriber operational data for a particular
location
.. attribute:: node
Location. For eg., 0/1/CPU0
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "ip-subscriber"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", IpSubscriber.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-subscriber-ipsub-oper:ip-subscriber/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes, [], name, value)
class Node(Entity):
"""
Location. For eg., 0/1/CPU0
.. attribute:: node_name (key)
The node ID to filter on. For eg., 0/1/CPU0
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: summary
IP subscriber interface summary
**type**\: :py:class:`Summary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary>`
.. attribute:: interfaces
IP subscriber interface table
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Interfaces>`
.. attribute:: access_interfaces
IP subscriber access interface table
**type**\: :py:class:`AccessInterfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_name']
self._child_classes = OrderedDict([("summary", ("summary", IpSubscriber.Nodes.Node.Summary)), ("interfaces", ("interfaces", IpSubscriber.Nodes.Node.Interfaces)), ("access-interfaces", ("access_interfaces", IpSubscriber.Nodes.Node.AccessInterfaces))])
self._leafs = OrderedDict([
('node_name', (YLeaf(YType.str, 'node-name'), ['str'])),
])
self.node_name = None
self.summary = IpSubscriber.Nodes.Node.Summary()
self.summary.parent = self
self._children_name_map["summary"] = "summary"
self.interfaces = IpSubscriber.Nodes.Node.Interfaces()
self.interfaces.parent = self
self._children_name_map["interfaces"] = "interfaces"
self.access_interfaces = IpSubscriber.Nodes.Node.AccessInterfaces()
self.access_interfaces.parent = self
self._children_name_map["access_interfaces"] = "access-interfaces"
self._segment_path = lambda: "node" + "[node-name='" + str(self.node_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-subscriber-ipsub-oper:ip-subscriber/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node, ['node_name'], name, value)
class Summary(Entity):
"""
IP subscriber interface summary
.. attribute:: access_interface_summary
Access interface summary statistics
**type**\: :py:class:`AccessInterfaceSummary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary>`
.. attribute:: interface_counts
Initiator interface counts
**type**\: :py:class:`InterfaceCounts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.InterfaceCounts>`
.. attribute:: vrf
Array of VRFs with IPSUB interfaces
**type**\: list of :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.Vrf>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary, self).__init__()
self.yang_name = "summary"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("access-interface-summary", ("access_interface_summary", IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary)), ("interface-counts", ("interface_counts", IpSubscriber.Nodes.Node.Summary.InterfaceCounts)), ("vrf", ("vrf", IpSubscriber.Nodes.Node.Summary.Vrf))])
self._leafs = OrderedDict()
self.access_interface_summary = IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary()
self.access_interface_summary.parent = self
self._children_name_map["access_interface_summary"] = "access-interface-summary"
self.interface_counts = IpSubscriber.Nodes.Node.Summary.InterfaceCounts()
self.interface_counts.parent = self
self._children_name_map["interface_counts"] = "interface-counts"
self.vrf = YList(self)
self._segment_path = lambda: "summary"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary, [], name, value)
class AccessInterfaceSummary(Entity):
"""
Access interface summary statistics
.. attribute:: initiators
Summary counts per initiator
**type**\: :py:class:`Initiators <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators>`
.. attribute:: ipv6_initiators
Summary counts per initiator for ipv6 session
**type**\: :py:class:`Ipv6Initiators <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators>`
.. attribute:: interfaces
Number of interfaces with subscriber configuration
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary, self).__init__()
self.yang_name = "access-interface-summary"
self.yang_parent_name = "summary"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("initiators", ("initiators", IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators)), ("ipv6-initiators", ("ipv6_initiators", IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators))])
self._leafs = OrderedDict([
('interfaces', (YLeaf(YType.uint32, 'interfaces'), ['int'])),
])
self.interfaces = None
self.initiators = IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators()
self.initiators.parent = self
self._children_name_map["initiators"] = "initiators"
self.ipv6_initiators = IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators()
self.ipv6_initiators.parent = self
self._children_name_map["ipv6_initiators"] = "ipv6-initiators"
self._segment_path = lambda: "access-interface-summary"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary, [u'interfaces'], name, value)
class Initiators(Entity):
"""
Summary counts per initiator
.. attribute:: dhcp
DHCP summary statistics
**type**\: :py:class:`Dhcp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators.Dhcp>`
.. attribute:: packet_trigger
Packet trigger summary statistics
**type**\: :py:class:`PacketTrigger <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators.PacketTrigger>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators, self).__init__()
self.yang_name = "initiators"
self.yang_parent_name = "access-interface-summary"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("dhcp", ("dhcp", IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators.Dhcp)), ("packet-trigger", ("packet_trigger", IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators.PacketTrigger))])
self._leafs = OrderedDict()
self.dhcp = IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators.Dhcp()
self.dhcp.parent = self
self._children_name_map["dhcp"] = "dhcp"
self.packet_trigger = IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators.PacketTrigger()
self.packet_trigger.parent = self
self._children_name_map["packet_trigger"] = "packet-trigger"
self._segment_path = lambda: "initiators"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators, [], name, value)
class Dhcp(Entity):
"""
DHCP summary statistics
.. attribute:: fsol_packets
Number of first sign of life packets received for initiating protocol
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_bytes
Number of first sign of life bytes received for initiating protocol
**type**\: int
**range:** 0..4294967295
**units**\: byte
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators.Dhcp, self).__init__()
self.yang_name = "dhcp"
self.yang_parent_name = "initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('fsol_packets', (YLeaf(YType.uint32, 'fsol-packets'), ['int'])),
('fsol_bytes', (YLeaf(YType.uint32, 'fsol-bytes'), ['int'])),
])
self.fsol_packets = None
self.fsol_bytes = None
self._segment_path = lambda: "dhcp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators.Dhcp, [u'fsol_packets', u'fsol_bytes'], name, value)
class PacketTrigger(Entity):
"""
Packet trigger summary statistics
.. attribute:: fsol_packets
Number of first sign of life packets received for initiating protocol
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_bytes
Number of first sign of life bytes received for initiating protocol
**type**\: int
**range:** 0..4294967295
**units**\: byte
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators.PacketTrigger, self).__init__()
self.yang_name = "packet-trigger"
self.yang_parent_name = "initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('fsol_packets', (YLeaf(YType.uint32, 'fsol-packets'), ['int'])),
('fsol_bytes', (YLeaf(YType.uint32, 'fsol-bytes'), ['int'])),
])
self.fsol_packets = None
self.fsol_bytes = None
self._segment_path = lambda: "packet-trigger"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Initiators.PacketTrigger, [u'fsol_packets', u'fsol_bytes'], name, value)
class Ipv6Initiators(Entity):
"""
Summary counts per initiator for ipv6 session
.. attribute:: dhcp
DHCP summary statistics
**type**\: :py:class:`Dhcp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators.Dhcp>`
.. attribute:: packet_trigger
Packet trigger summary statistics
**type**\: :py:class:`PacketTrigger <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators.PacketTrigger>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators, self).__init__()
self.yang_name = "ipv6-initiators"
self.yang_parent_name = "access-interface-summary"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("dhcp", ("dhcp", IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators.Dhcp)), ("packet-trigger", ("packet_trigger", IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators.PacketTrigger))])
self._leafs = OrderedDict()
self.dhcp = IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators.Dhcp()
self.dhcp.parent = self
self._children_name_map["dhcp"] = "dhcp"
self.packet_trigger = IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators.PacketTrigger()
self.packet_trigger.parent = self
self._children_name_map["packet_trigger"] = "packet-trigger"
self._segment_path = lambda: "ipv6-initiators"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators, [], name, value)
class Dhcp(Entity):
"""
DHCP summary statistics
.. attribute:: fsol_packets
Number of first sign of life packets received for initiating protocol
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_bytes
Number of first sign of life bytes received for initiating protocol
**type**\: int
**range:** 0..4294967295
**units**\: byte
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators.Dhcp, self).__init__()
self.yang_name = "dhcp"
self.yang_parent_name = "ipv6-initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('fsol_packets', (YLeaf(YType.uint32, 'fsol-packets'), ['int'])),
('fsol_bytes', (YLeaf(YType.uint32, 'fsol-bytes'), ['int'])),
])
self.fsol_packets = None
self.fsol_bytes = None
self._segment_path = lambda: "dhcp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators.Dhcp, [u'fsol_packets', u'fsol_bytes'], name, value)
class PacketTrigger(Entity):
"""
Packet trigger summary statistics
.. attribute:: fsol_packets
Number of first sign of life packets received for initiating protocol
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_bytes
Number of first sign of life bytes received for initiating protocol
**type**\: int
**range:** 0..4294967295
**units**\: byte
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators.PacketTrigger, self).__init__()
self.yang_name = "packet-trigger"
self.yang_parent_name = "ipv6-initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('fsol_packets', (YLeaf(YType.uint32, 'fsol-packets'), ['int'])),
('fsol_bytes', (YLeaf(YType.uint32, 'fsol-bytes'), ['int'])),
])
self.fsol_packets = None
self.fsol_bytes = None
self._segment_path = lambda: "packet-trigger"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.AccessInterfaceSummary.Ipv6Initiators.PacketTrigger, [u'fsol_packets', u'fsol_bytes'], name, value)
class InterfaceCounts(Entity):
"""
Initiator interface counts
.. attribute:: initiators
Initiators
**type**\: :py:class:`Initiators <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators>`
.. attribute:: ipv6_initiators
IPv6 Initiators
**type**\: :py:class:`Ipv6Initiators <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.InterfaceCounts, self).__init__()
self.yang_name = "interface-counts"
self.yang_parent_name = "summary"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("initiators", ("initiators", IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators)), ("ipv6-initiators", ("ipv6_initiators", IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators))])
self._leafs = OrderedDict()
self.initiators = IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators()
self.initiators.parent = self
self._children_name_map["initiators"] = "initiators"
self.ipv6_initiators = IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators()
self.ipv6_initiators.parent = self
self._children_name_map["ipv6_initiators"] = "ipv6-initiators"
self._segment_path = lambda: "interface-counts"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.InterfaceCounts, [], name, value)
class Initiators(Entity):
"""
Initiators
.. attribute:: dhcp
DHCP
**type**\: :py:class:`Dhcp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators.Dhcp>`
.. attribute:: packet_trigger
Packet trigger
**type**\: :py:class:`PacketTrigger <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators.PacketTrigger>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators, self).__init__()
self.yang_name = "initiators"
self.yang_parent_name = "interface-counts"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("dhcp", ("dhcp", IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators.Dhcp)), ("packet-trigger", ("packet_trigger", IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators.PacketTrigger))])
self._leafs = OrderedDict()
self.dhcp = IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators.Dhcp()
self.dhcp.parent = self
self._children_name_map["dhcp"] = "dhcp"
self.packet_trigger = IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators.PacketTrigger()
self.packet_trigger.parent = self
self._children_name_map["packet_trigger"] = "packet-trigger"
self._segment_path = lambda: "initiators"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators, [], name, value)
class Dhcp(Entity):
"""
DHCP
.. attribute:: invalid
Invalid
**type**\: int
**range:** 0..4294967295
.. attribute:: initialized
Initialized
**type**\: int
**range:** 0..4294967295
.. attribute:: session_creation_started
Session creation started
**type**\: int
**range:** 0..4294967295
.. attribute:: control_policy_executing
Control policy executing
**type**\: int
**range:** 0..4294967295
.. attribute:: control_policy_executed
Control policy executed
**type**\: int
**range:** 0..4294967295
.. attribute:: session_features_applied
Session features applied
**type**\: int
**range:** 0..4294967295
.. attribute:: vrf_configured
VRF configured
**type**\: int
**range:** 0..4294967295
.. attribute:: adding_adjacency
Adding adjacency
**type**\: int
**range:** 0..4294967295
.. attribute:: adjacency_added
Adjacency added
**type**\: int
**range:** 0..4294967295
.. attribute:: up
Up
**type**\: int
**range:** 0..4294967295
.. attribute:: down
Down
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnecting
Disconnecting
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnected
Disconnected
**type**\: int
**range:** 0..4294967295
.. attribute:: error
Error
**type**\: int
**range:** 0..4294967295
.. attribute:: total_interfaces
Total number of interfaces in all states
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators.Dhcp, self).__init__()
self.yang_name = "dhcp"
self.yang_parent_name = "initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('invalid', (YLeaf(YType.uint32, 'invalid'), ['int'])),
('initialized', (YLeaf(YType.uint32, 'initialized'), ['int'])),
('session_creation_started', (YLeaf(YType.uint32, 'session-creation-started'), ['int'])),
('control_policy_executing', (YLeaf(YType.uint32, 'control-policy-executing'), ['int'])),
('control_policy_executed', (YLeaf(YType.uint32, 'control-policy-executed'), ['int'])),
('session_features_applied', (YLeaf(YType.uint32, 'session-features-applied'), ['int'])),
('vrf_configured', (YLeaf(YType.uint32, 'vrf-configured'), ['int'])),
('adding_adjacency', (YLeaf(YType.uint32, 'adding-adjacency'), ['int'])),
('adjacency_added', (YLeaf(YType.uint32, 'adjacency-added'), ['int'])),
('up', (YLeaf(YType.uint32, 'up'), ['int'])),
('down', (YLeaf(YType.uint32, 'down'), ['int'])),
('disconnecting', (YLeaf(YType.uint32, 'disconnecting'), ['int'])),
('disconnected', (YLeaf(YType.uint32, 'disconnected'), ['int'])),
('error', (YLeaf(YType.uint32, 'error'), ['int'])),
('total_interfaces', (YLeaf(YType.uint32, 'total-interfaces'), ['int'])),
])
self.invalid = None
self.initialized = None
self.session_creation_started = None
self.control_policy_executing = None
self.control_policy_executed = None
self.session_features_applied = None
self.vrf_configured = None
self.adding_adjacency = None
self.adjacency_added = None
self.up = None
self.down = None
self.disconnecting = None
self.disconnected = None
self.error = None
self.total_interfaces = None
self._segment_path = lambda: "dhcp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators.Dhcp, [u'invalid', u'initialized', u'session_creation_started', u'control_policy_executing', u'control_policy_executed', u'session_features_applied', u'vrf_configured', u'adding_adjacency', u'adjacency_added', u'up', u'down', u'disconnecting', u'disconnected', u'error', u'total_interfaces'], name, value)
class PacketTrigger(Entity):
"""
Packet trigger
.. attribute:: invalid
Invalid
**type**\: int
**range:** 0..4294967295
.. attribute:: initialized
Initialized
**type**\: int
**range:** 0..4294967295
.. attribute:: session_creation_started
Session creation started
**type**\: int
**range:** 0..4294967295
.. attribute:: control_policy_executing
Control policy executing
**type**\: int
**range:** 0..4294967295
.. attribute:: control_policy_executed
Control policy executed
**type**\: int
**range:** 0..4294967295
.. attribute:: session_features_applied
Session features applied
**type**\: int
**range:** 0..4294967295
.. attribute:: vrf_configured
VRF configured
**type**\: int
**range:** 0..4294967295
.. attribute:: adding_adjacency
Adding adjacency
**type**\: int
**range:** 0..4294967295
.. attribute:: adjacency_added
Adjacency added
**type**\: int
**range:** 0..4294967295
.. attribute:: up
Up
**type**\: int
**range:** 0..4294967295
.. attribute:: down
Down
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnecting
Disconnecting
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnected
Disconnected
**type**\: int
**range:** 0..4294967295
.. attribute:: error
Error
**type**\: int
**range:** 0..4294967295
.. attribute:: total_interfaces
Total number of interfaces in all states
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators.PacketTrigger, self).__init__()
self.yang_name = "packet-trigger"
self.yang_parent_name = "initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('invalid', (YLeaf(YType.uint32, 'invalid'), ['int'])),
('initialized', (YLeaf(YType.uint32, 'initialized'), ['int'])),
('session_creation_started', (YLeaf(YType.uint32, 'session-creation-started'), ['int'])),
('control_policy_executing', (YLeaf(YType.uint32, 'control-policy-executing'), ['int'])),
('control_policy_executed', (YLeaf(YType.uint32, 'control-policy-executed'), ['int'])),
('session_features_applied', (YLeaf(YType.uint32, 'session-features-applied'), ['int'])),
('vrf_configured', (YLeaf(YType.uint32, 'vrf-configured'), ['int'])),
('adding_adjacency', (YLeaf(YType.uint32, 'adding-adjacency'), ['int'])),
('adjacency_added', (YLeaf(YType.uint32, 'adjacency-added'), ['int'])),
('up', (YLeaf(YType.uint32, 'up'), ['int'])),
('down', (YLeaf(YType.uint32, 'down'), ['int'])),
('disconnecting', (YLeaf(YType.uint32, 'disconnecting'), ['int'])),
('disconnected', (YLeaf(YType.uint32, 'disconnected'), ['int'])),
('error', (YLeaf(YType.uint32, 'error'), ['int'])),
('total_interfaces', (YLeaf(YType.uint32, 'total-interfaces'), ['int'])),
])
self.invalid = None
self.initialized = None
self.session_creation_started = None
self.control_policy_executing = None
self.control_policy_executed = None
self.session_features_applied = None
self.vrf_configured = None
self.adding_adjacency = None
self.adjacency_added = None
self.up = None
self.down = None
self.disconnecting = None
self.disconnected = None
self.error = None
self.total_interfaces = None
self._segment_path = lambda: "packet-trigger"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Initiators.PacketTrigger, [u'invalid', u'initialized', u'session_creation_started', u'control_policy_executing', u'control_policy_executed', u'session_features_applied', u'vrf_configured', u'adding_adjacency', u'adjacency_added', u'up', u'down', u'disconnecting', u'disconnected', u'error', u'total_interfaces'], name, value)
class Ipv6Initiators(Entity):
"""
IPv6 Initiators
.. attribute:: dhcp
DHCP
**type**\: :py:class:`Dhcp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators.Dhcp>`
.. attribute:: packet_trigger
Packet trigger
**type**\: :py:class:`PacketTrigger <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators.PacketTrigger>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators, self).__init__()
self.yang_name = "ipv6-initiators"
self.yang_parent_name = "interface-counts"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("dhcp", ("dhcp", IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators.Dhcp)), ("packet-trigger", ("packet_trigger", IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators.PacketTrigger))])
self._leafs = OrderedDict()
self.dhcp = IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators.Dhcp()
self.dhcp.parent = self
self._children_name_map["dhcp"] = "dhcp"
self.packet_trigger = IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators.PacketTrigger()
self.packet_trigger.parent = self
self._children_name_map["packet_trigger"] = "packet-trigger"
self._segment_path = lambda: "ipv6-initiators"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators, [], name, value)
class Dhcp(Entity):
"""
DHCP
.. attribute:: invalid
Invalid
**type**\: int
**range:** 0..4294967295
.. attribute:: initialized
Initialized
**type**\: int
**range:** 0..4294967295
.. attribute:: session_creation_started
Session creation started
**type**\: int
**range:** 0..4294967295
.. attribute:: control_policy_executing
Control policy executing
**type**\: int
**range:** 0..4294967295
.. attribute:: control_policy_executed
Control policy executed
**type**\: int
**range:** 0..4294967295
.. attribute:: session_features_applied
Session features applied
**type**\: int
**range:** 0..4294967295
.. attribute:: vrf_configured
VRF configured
**type**\: int
**range:** 0..4294967295
.. attribute:: adding_adjacency
Adding adjacency
**type**\: int
**range:** 0..4294967295
.. attribute:: adjacency_added
Adjacency added
**type**\: int
**range:** 0..4294967295
.. attribute:: up
Up
**type**\: int
**range:** 0..4294967295
.. attribute:: down
Down
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnecting
Disconnecting
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnected
Disconnected
**type**\: int
**range:** 0..4294967295
.. attribute:: error
Error
**type**\: int
**range:** 0..4294967295
.. attribute:: total_interfaces
Total number of interfaces in all states
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators.Dhcp, self).__init__()
self.yang_name = "dhcp"
self.yang_parent_name = "ipv6-initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('invalid', (YLeaf(YType.uint32, 'invalid'), ['int'])),
('initialized', (YLeaf(YType.uint32, 'initialized'), ['int'])),
('session_creation_started', (YLeaf(YType.uint32, 'session-creation-started'), ['int'])),
('control_policy_executing', (YLeaf(YType.uint32, 'control-policy-executing'), ['int'])),
('control_policy_executed', (YLeaf(YType.uint32, 'control-policy-executed'), ['int'])),
('session_features_applied', (YLeaf(YType.uint32, 'session-features-applied'), ['int'])),
('vrf_configured', (YLeaf(YType.uint32, 'vrf-configured'), ['int'])),
('adding_adjacency', (YLeaf(YType.uint32, 'adding-adjacency'), ['int'])),
('adjacency_added', (YLeaf(YType.uint32, 'adjacency-added'), ['int'])),
('up', (YLeaf(YType.uint32, 'up'), ['int'])),
('down', (YLeaf(YType.uint32, 'down'), ['int'])),
('disconnecting', (YLeaf(YType.uint32, 'disconnecting'), ['int'])),
('disconnected', (YLeaf(YType.uint32, 'disconnected'), ['int'])),
('error', (YLeaf(YType.uint32, 'error'), ['int'])),
('total_interfaces', (YLeaf(YType.uint32, 'total-interfaces'), ['int'])),
])
self.invalid = None
self.initialized = None
self.session_creation_started = None
self.control_policy_executing = None
self.control_policy_executed = None
self.session_features_applied = None
self.vrf_configured = None
self.adding_adjacency = None
self.adjacency_added = None
self.up = None
self.down = None
self.disconnecting = None
self.disconnected = None
self.error = None
self.total_interfaces = None
self._segment_path = lambda: "dhcp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators.Dhcp, [u'invalid', u'initialized', u'session_creation_started', u'control_policy_executing', u'control_policy_executed', u'session_features_applied', u'vrf_configured', u'adding_adjacency', u'adjacency_added', u'up', u'down', u'disconnecting', u'disconnected', u'error', u'total_interfaces'], name, value)
class PacketTrigger(Entity):
"""
Packet trigger
.. attribute:: invalid
Invalid
**type**\: int
**range:** 0..4294967295
.. attribute:: initialized
Initialized
**type**\: int
**range:** 0..4294967295
.. attribute:: session_creation_started
Session creation started
**type**\: int
**range:** 0..4294967295
.. attribute:: control_policy_executing
Control policy executing
**type**\: int
**range:** 0..4294967295
.. attribute:: control_policy_executed
Control policy executed
**type**\: int
**range:** 0..4294967295
.. attribute:: session_features_applied
Session features applied
**type**\: int
**range:** 0..4294967295
.. attribute:: vrf_configured
VRF configured
**type**\: int
**range:** 0..4294967295
.. attribute:: adding_adjacency
Adding adjacency
**type**\: int
**range:** 0..4294967295
.. attribute:: adjacency_added
Adjacency added
**type**\: int
**range:** 0..4294967295
.. attribute:: up
Up
**type**\: int
**range:** 0..4294967295
.. attribute:: down
Down
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnecting
Disconnecting
**type**\: int
**range:** 0..4294967295
.. attribute:: disconnected
Disconnected
**type**\: int
**range:** 0..4294967295
.. attribute:: error
Error
**type**\: int
**range:** 0..4294967295
.. attribute:: total_interfaces
Total number of interfaces in all states
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators.PacketTrigger, self).__init__()
self.yang_name = "packet-trigger"
self.yang_parent_name = "ipv6-initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('invalid', (YLeaf(YType.uint32, 'invalid'), ['int'])),
('initialized', (YLeaf(YType.uint32, 'initialized'), ['int'])),
('session_creation_started', (YLeaf(YType.uint32, 'session-creation-started'), ['int'])),
('control_policy_executing', (YLeaf(YType.uint32, 'control-policy-executing'), ['int'])),
('control_policy_executed', (YLeaf(YType.uint32, 'control-policy-executed'), ['int'])),
('session_features_applied', (YLeaf(YType.uint32, 'session-features-applied'), ['int'])),
('vrf_configured', (YLeaf(YType.uint32, 'vrf-configured'), ['int'])),
('adding_adjacency', (YLeaf(YType.uint32, 'adding-adjacency'), ['int'])),
('adjacency_added', (YLeaf(YType.uint32, 'adjacency-added'), ['int'])),
('up', (YLeaf(YType.uint32, 'up'), ['int'])),
('down', (YLeaf(YType.uint32, 'down'), ['int'])),
('disconnecting', (YLeaf(YType.uint32, 'disconnecting'), ['int'])),
('disconnected', (YLeaf(YType.uint32, 'disconnected'), ['int'])),
('error', (YLeaf(YType.uint32, 'error'), ['int'])),
('total_interfaces', (YLeaf(YType.uint32, 'total-interfaces'), ['int'])),
])
self.invalid = None
self.initialized = None
self.session_creation_started = None
self.control_policy_executing = None
self.control_policy_executed = None
self.session_features_applied = None
self.vrf_configured = None
self.adding_adjacency = None
self.adjacency_added = None
self.up = None
self.down = None
self.disconnecting = None
self.disconnected = None
self.error = None
self.total_interfaces = None
self._segment_path = lambda: "packet-trigger"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.InterfaceCounts.Ipv6Initiators.PacketTrigger, [u'invalid', u'initialized', u'session_creation_started', u'control_policy_executing', u'control_policy_executed', u'session_features_applied', u'vrf_configured', u'adding_adjacency', u'adjacency_added', u'up', u'down', u'disconnecting', u'disconnected', u'error', u'total_interfaces'], name, value)
class Vrf(Entity):
"""
Array of VRFs with IPSUB interfaces
.. attribute:: vrf_name
IPv4 VRF
**type**\: str
.. attribute:: ipv6vrf_name
IPv6 VRF
**type**\: str
.. attribute:: interfaces
Number of IP subscriber interfaces in the VRF table
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ipv6_interfaces
Number of IPv6 subscriber interfaces in the VRF table
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Summary.Vrf, self).__init__()
self.yang_name = "vrf"
self.yang_parent_name = "summary"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
('ipv6vrf_name', (YLeaf(YType.str, 'ipv6vrf-name'), ['str'])),
('interfaces', (YLeaf(YType.uint64, 'interfaces'), ['int'])),
('ipv6_interfaces', (YLeaf(YType.uint64, 'ipv6-interfaces'), ['int'])),
])
self.vrf_name = None
self.ipv6vrf_name = None
self.interfaces = None
self.ipv6_interfaces = None
self._segment_path = lambda: "vrf"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Summary.Vrf, [u'vrf_name', u'ipv6vrf_name', u'interfaces', u'ipv6_interfaces'], name, value)
class Interfaces(Entity):
"""
IP subscriber interface table
.. attribute:: interface
IP subscriber interface entry
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Interfaces.Interface>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Interfaces, self).__init__()
self.yang_name = "interfaces"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("interface", ("interface", IpSubscriber.Nodes.Node.Interfaces.Interface))])
self._leafs = OrderedDict()
self.interface = YList(self)
self._segment_path = lambda: "interfaces"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Interfaces, [], name, value)
class Interface(Entity):
"""
IP subscriber interface entry
.. attribute:: interface_name (key)
Interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: vrf
IPv4 VRF details
**type**\: :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Interfaces.Interface.Vrf>`
.. attribute:: ipv6vrf
IPv6 VRF details
**type**\: :py:class:`Ipv6vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.Interfaces.Interface.Ipv6vrf>`
.. attribute:: access_interface
Access interface through which this subscriber is accessible
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: subscriber_ipv4_address
IPv4 Address of the subscriber
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: subscriber_ipv6_address
IPv6 Address of the subscriber
**type**\: str
.. attribute:: subscriber_mac_addres
MAC address of the subscriber
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: subscriber_label
Subscriber label for this subscriber interface
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_creation_time
Interface creation time in month day hh\:mm\:ss format
**type**\: str
.. attribute:: age
Age in hh\:mm\:ss format
**type**\: str
.. attribute:: initiator
Protocol trigger for creation of this subscriber session
**type**\: :py:class:`IpsubMaIntfInitiatorData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpsubMaIntfInitiatorData>`
.. attribute:: state
State of the subscriber session
**type**\: :py:class:`IpsubMaIntfStateData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpsubMaIntfStateData>`
.. attribute:: old_state
Previous state of the subscriber session
**type**\: :py:class:`IpsubMaIntfStateData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpsubMaIntfStateData>`
.. attribute:: last_state_change_time
Interface's last state change time in month day hh\:mm\:ss format
**type**\: str
.. attribute:: current_change_age
Current change age in hh\:mm\:ss format
**type**\: str
.. attribute:: ipv6_initiator
Protocol trigger for creation of this subscriber's IPv6 session
**type**\: :py:class:`IpsubMaIntfInitiatorData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpsubMaIntfInitiatorData>`
.. attribute:: ipv6_state
State of the subscriber's IPv6 session
**type**\: :py:class:`IpsubMaIntfStateData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpsubMaIntfStateData>`
.. attribute:: ipv6_old_state
Previous state of the subscriber's IPv6 session
**type**\: :py:class:`IpsubMaIntfStateData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpsubMaIntfStateData>`
.. attribute:: ipv6_last_state_change_time
Interface's IPV6 last state change time in month day hh\:mm\:ss format
**type**\: str
.. attribute:: ipv6_current_change_age
IPV6 Current change age in hh\:mm\:ss format
**type**\: str
.. attribute:: is_l2_connected
True if L2 connected
**type**\: bool
.. attribute:: session_type
Session Type
**type**\: str
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Interfaces.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_name']
self._child_classes = OrderedDict([("vrf", ("vrf", IpSubscriber.Nodes.Node.Interfaces.Interface.Vrf)), ("ipv6vrf", ("ipv6vrf", IpSubscriber.Nodes.Node.Interfaces.Interface.Ipv6vrf))])
self._leafs = OrderedDict([
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('access_interface', (YLeaf(YType.str, 'access-interface'), ['str'])),
('subscriber_ipv4_address', (YLeaf(YType.str, 'subscriber-ipv4-address'), ['str'])),
('subscriber_ipv6_address', (YLeaf(YType.str, 'subscriber-ipv6-address'), ['str'])),
('subscriber_mac_addres', (YLeaf(YType.str, 'subscriber-mac-addres'), ['str'])),
('subscriber_label', (YLeaf(YType.uint32, 'subscriber-label'), ['int'])),
('interface_creation_time', (YLeaf(YType.str, 'interface-creation-time'), ['str'])),
('age', (YLeaf(YType.str, 'age'), ['str'])),
('initiator', (YLeaf(YType.enumeration, 'initiator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper', 'IpsubMaIntfInitiatorData', '')])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper', 'IpsubMaIntfStateData', '')])),
('old_state', (YLeaf(YType.enumeration, 'old-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper', 'IpsubMaIntfStateData', '')])),
('last_state_change_time', (YLeaf(YType.str, 'last-state-change-time'), ['str'])),
('current_change_age', (YLeaf(YType.str, 'current-change-age'), ['str'])),
('ipv6_initiator', (YLeaf(YType.enumeration, 'ipv6-initiator'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper', 'IpsubMaIntfInitiatorData', '')])),
('ipv6_state', (YLeaf(YType.enumeration, 'ipv6-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper', 'IpsubMaIntfStateData', '')])),
('ipv6_old_state', (YLeaf(YType.enumeration, 'ipv6-old-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper', 'IpsubMaIntfStateData', '')])),
('ipv6_last_state_change_time', (YLeaf(YType.str, 'ipv6-last-state-change-time'), ['str'])),
('ipv6_current_change_age', (YLeaf(YType.str, 'ipv6-current-change-age'), ['str'])),
('is_l2_connected', (YLeaf(YType.boolean, 'is-l2-connected'), ['bool'])),
('session_type', (YLeaf(YType.str, 'session-type'), ['str'])),
])
self.interface_name = None
self.access_interface = None
self.subscriber_ipv4_address = None
self.subscriber_ipv6_address = None
self.subscriber_mac_addres = None
self.subscriber_label = None
self.interface_creation_time = None
self.age = None
self.initiator = None
self.state = None
self.old_state = None
self.last_state_change_time = None
self.current_change_age = None
self.ipv6_initiator = None
self.ipv6_state = None
self.ipv6_old_state = None
self.ipv6_last_state_change_time = None
self.ipv6_current_change_age = None
self.is_l2_connected = None
self.session_type = None
self.vrf = IpSubscriber.Nodes.Node.Interfaces.Interface.Vrf()
self.vrf.parent = self
self._children_name_map["vrf"] = "vrf"
self.ipv6vrf = IpSubscriber.Nodes.Node.Interfaces.Interface.Ipv6vrf()
self.ipv6vrf.parent = self
self._children_name_map["ipv6vrf"] = "ipv6vrf"
self._segment_path = lambda: "interface" + "[interface-name='" + str(self.interface_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Interfaces.Interface, ['interface_name', u'access_interface', u'subscriber_ipv4_address', u'subscriber_ipv6_address', u'subscriber_mac_addres', u'subscriber_label', u'interface_creation_time', u'age', u'initiator', u'state', u'old_state', u'last_state_change_time', u'current_change_age', u'ipv6_initiator', u'ipv6_state', u'ipv6_old_state', u'ipv6_last_state_change_time', u'ipv6_current_change_age', u'is_l2_connected', u'session_type'], name, value)
class Vrf(Entity):
"""
IPv4 VRF details
.. attribute:: vrf_name
VRF name
**type**\: str
.. attribute:: table_name
Table
**type**\: str
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Interfaces.Interface.Vrf, self).__init__()
self.yang_name = "vrf"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
('table_name', (YLeaf(YType.str, 'table-name'), ['str'])),
])
self.vrf_name = None
self.table_name = None
self._segment_path = lambda: "vrf"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Interfaces.Interface.Vrf, [u'vrf_name', u'table_name'], name, value)
class Ipv6vrf(Entity):
"""
IPv6 VRF details
.. attribute:: vrf_name
VRF name
**type**\: str
.. attribute:: table_name
Table
**type**\: str
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.Interfaces.Interface.Ipv6vrf, self).__init__()
self.yang_name = "ipv6vrf"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
('table_name', (YLeaf(YType.str, 'table-name'), ['str'])),
])
self.vrf_name = None
self.table_name = None
self._segment_path = lambda: "ipv6vrf"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.Interfaces.Interface.Ipv6vrf, [u'vrf_name', u'table_name'], name, value)
class AccessInterfaces(Entity):
"""
IP subscriber access interface table
.. attribute:: access_interface
IP subscriber access interface entry
**type**\: list of :py:class:`AccessInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces, self).__init__()
self.yang_name = "access-interfaces"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("access-interface", ("access_interface", IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface))])
self._leafs = OrderedDict()
self.access_interface = YList(self)
self._segment_path = lambda: "access-interfaces"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces, [], name, value)
class AccessInterface(Entity):
"""
IP subscriber access interface entry
.. attribute:: interface_name (key)
Interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: initiators
Configurational state\-statistics for each initiating protocol enabled on this parent interface
**type**\: :py:class:`Initiators <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators>`
.. attribute:: ipv6_initiators
Configurational state\-statistics for each initiating protocol enabled on this parent interface for IPv6 session
**type**\: :py:class:`Ipv6Initiators <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators>`
.. attribute:: session_limit
Configuration session limits for each session limit source and type
**type**\: :py:class:`SessionLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit>`
.. attribute:: interface_creation_time
Interface creation time in Month Date HH\:MM\:SS format
**type**\: str
.. attribute:: age
Age in HH\:MM\:SS format
**type**\: str
.. attribute:: interface_type
Interface Type
**type**\: str
.. attribute:: state
Operational state of this interface
**type**\: :py:class:`IpsubMaParentIntfStateData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpsubMaParentIntfStateData>`
.. attribute:: ipv6_state
Operational ipv6 state of this interface
**type**\: :py:class:`IpsubMaParentIntfStateData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpsubMaParentIntfStateData>`
.. attribute:: vlan_type
The VLAN type on the access interface
**type**\: :py:class:`IpsubMaParentIntfVlan <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpsubMaParentIntfVlan>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface, self).__init__()
self.yang_name = "access-interface"
self.yang_parent_name = "access-interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_name']
self._child_classes = OrderedDict([("initiators", ("initiators", IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators)), ("ipv6-initiators", ("ipv6_initiators", IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators)), ("session-limit", ("session_limit", IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit))])
self._leafs = OrderedDict([
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('interface_creation_time', (YLeaf(YType.str, 'interface-creation-time'), ['str'])),
('age', (YLeaf(YType.str, 'age'), ['str'])),
('interface_type', (YLeaf(YType.str, 'interface-type'), ['str'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper', 'IpsubMaParentIntfStateData', '')])),
('ipv6_state', (YLeaf(YType.enumeration, 'ipv6-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper', 'IpsubMaParentIntfStateData', '')])),
('vlan_type', (YLeaf(YType.enumeration, 'vlan-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper', 'IpsubMaParentIntfVlan', '')])),
])
self.interface_name = None
self.interface_creation_time = None
self.age = None
self.interface_type = None
self.state = None
self.ipv6_state = None
self.vlan_type = None
self.initiators = IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators()
self.initiators.parent = self
self._children_name_map["initiators"] = "initiators"
self.ipv6_initiators = IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators()
self.ipv6_initiators.parent = self
self._children_name_map["ipv6_initiators"] = "ipv6-initiators"
self.session_limit = IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit()
self.session_limit.parent = self
self._children_name_map["session_limit"] = "session-limit"
self._segment_path = lambda: "access-interface" + "[interface-name='" + str(self.interface_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface, ['interface_name', u'interface_creation_time', u'age', u'interface_type', u'state', u'ipv6_state', u'vlan_type'], name, value)
class Initiators(Entity):
"""
Configurational state\-statistics for each
initiating protocol enabled on this parent
interface
.. attribute:: dhcp
DHCP information
**type**\: :py:class:`Dhcp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators.Dhcp>`
.. attribute:: packet_trigger
packet trigger information
**type**\: :py:class:`PacketTrigger <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators.PacketTrigger>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators, self).__init__()
self.yang_name = "initiators"
self.yang_parent_name = "access-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("dhcp", ("dhcp", IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators.Dhcp)), ("packet-trigger", ("packet_trigger", IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators.PacketTrigger))])
self._leafs = OrderedDict()
self.dhcp = IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators.Dhcp()
self.dhcp.parent = self
self._children_name_map["dhcp"] = "dhcp"
self.packet_trigger = IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators.PacketTrigger()
self.packet_trigger.parent = self
self._children_name_map["packet_trigger"] = "packet-trigger"
self._segment_path = lambda: "initiators"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators, [], name, value)
class Dhcp(Entity):
"""
DHCP information
.. attribute:: is_configured
Ture if the initiator is configred
**type**\: bool
.. attribute:: unique_ip_check
True if check for subscriber address uniquenessduring first sign of life is enabled
**type**\: bool
.. attribute:: sessions
Number of sessions currently up for each initiator
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_packets
Number of first sign of life packets received for initiating protocol on this interface
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_bytes
Number of first sign of life bytes received for initiating protocol on this interface
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: fsol_dropped_packets
Number of first sign of life packets received for initiating protocol on this interface that were dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_bytes
Number of first sign of life bytes received for initiating protocol on this interface that were dropped
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: fsol_dropped_packets_flow
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to exceeding creation rate
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_packets_session_limit
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to exceeding one or more of the configured session limits
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_packets_dup_addr
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to duplicate source address
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators.Dhcp, self).__init__()
self.yang_name = "dhcp"
self.yang_parent_name = "initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_configured', (YLeaf(YType.boolean, 'is-configured'), ['bool'])),
('unique_ip_check', (YLeaf(YType.boolean, 'unique-ip-check'), ['bool'])),
('sessions', (YLeaf(YType.uint32, 'sessions'), ['int'])),
('fsol_packets', (YLeaf(YType.uint32, 'fsol-packets'), ['int'])),
('fsol_bytes', (YLeaf(YType.uint32, 'fsol-bytes'), ['int'])),
('fsol_dropped_packets', (YLeaf(YType.uint32, 'fsol-dropped-packets'), ['int'])),
('fsol_dropped_bytes', (YLeaf(YType.uint32, 'fsol-dropped-bytes'), ['int'])),
('fsol_dropped_packets_flow', (YLeaf(YType.uint32, 'fsol-dropped-packets-flow'), ['int'])),
('fsol_dropped_packets_session_limit', (YLeaf(YType.uint32, 'fsol-dropped-packets-session-limit'), ['int'])),
('fsol_dropped_packets_dup_addr', (YLeaf(YType.uint32, 'fsol-dropped-packets-dup-addr'), ['int'])),
])
self.is_configured = None
self.unique_ip_check = None
self.sessions = None
self.fsol_packets = None
self.fsol_bytes = None
self.fsol_dropped_packets = None
self.fsol_dropped_bytes = None
self.fsol_dropped_packets_flow = None
self.fsol_dropped_packets_session_limit = None
self.fsol_dropped_packets_dup_addr = None
self._segment_path = lambda: "dhcp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators.Dhcp, [u'is_configured', u'unique_ip_check', u'sessions', u'fsol_packets', u'fsol_bytes', u'fsol_dropped_packets', u'fsol_dropped_bytes', u'fsol_dropped_packets_flow', u'fsol_dropped_packets_session_limit', u'fsol_dropped_packets_dup_addr'], name, value)
class PacketTrigger(Entity):
"""
packet trigger information
.. attribute:: is_configured
Ture if the initiator is configred
**type**\: bool
.. attribute:: unique_ip_check
True if check for subscriber address uniquenessduring first sign of life is enabled
**type**\: bool
.. attribute:: sessions
Number of sessions currently up for each initiator
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_packets
Number of first sign of life packets received for initiating protocol on this interface
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_bytes
Number of first sign of life bytes received for initiating protocol on this interface
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: fsol_dropped_packets
Number of first sign of life packets received for initiating protocol on this interface that were dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_bytes
Number of first sign of life bytes received for initiating protocol on this interface that were dropped
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: fsol_dropped_packets_flow
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to exceeding creation rate
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_packets_session_limit
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to exceeding one or more of the configured session limits
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_packets_dup_addr
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to duplicate source address
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators.PacketTrigger, self).__init__()
self.yang_name = "packet-trigger"
self.yang_parent_name = "initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_configured', (YLeaf(YType.boolean, 'is-configured'), ['bool'])),
('unique_ip_check', (YLeaf(YType.boolean, 'unique-ip-check'), ['bool'])),
('sessions', (YLeaf(YType.uint32, 'sessions'), ['int'])),
('fsol_packets', (YLeaf(YType.uint32, 'fsol-packets'), ['int'])),
('fsol_bytes', (YLeaf(YType.uint32, 'fsol-bytes'), ['int'])),
('fsol_dropped_packets', (YLeaf(YType.uint32, 'fsol-dropped-packets'), ['int'])),
('fsol_dropped_bytes', (YLeaf(YType.uint32, 'fsol-dropped-bytes'), ['int'])),
('fsol_dropped_packets_flow', (YLeaf(YType.uint32, 'fsol-dropped-packets-flow'), ['int'])),
('fsol_dropped_packets_session_limit', (YLeaf(YType.uint32, 'fsol-dropped-packets-session-limit'), ['int'])),
('fsol_dropped_packets_dup_addr', (YLeaf(YType.uint32, 'fsol-dropped-packets-dup-addr'), ['int'])),
])
self.is_configured = None
self.unique_ip_check = None
self.sessions = None
self.fsol_packets = None
self.fsol_bytes = None
self.fsol_dropped_packets = None
self.fsol_dropped_bytes = None
self.fsol_dropped_packets_flow = None
self.fsol_dropped_packets_session_limit = None
self.fsol_dropped_packets_dup_addr = None
self._segment_path = lambda: "packet-trigger"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Initiators.PacketTrigger, [u'is_configured', u'unique_ip_check', u'sessions', u'fsol_packets', u'fsol_bytes', u'fsol_dropped_packets', u'fsol_dropped_bytes', u'fsol_dropped_packets_flow', u'fsol_dropped_packets_session_limit', u'fsol_dropped_packets_dup_addr'], name, value)
class Ipv6Initiators(Entity):
"""
Configurational state\-statistics for each
initiating protocol enabled on this parent
interface for IPv6 session
.. attribute:: dhcp
DHCP information
**type**\: :py:class:`Dhcp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators.Dhcp>`
.. attribute:: packet_trigger
packet trigger information
**type**\: :py:class:`PacketTrigger <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators.PacketTrigger>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators, self).__init__()
self.yang_name = "ipv6-initiators"
self.yang_parent_name = "access-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("dhcp", ("dhcp", IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators.Dhcp)), ("packet-trigger", ("packet_trigger", IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators.PacketTrigger))])
self._leafs = OrderedDict()
self.dhcp = IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators.Dhcp()
self.dhcp.parent = self
self._children_name_map["dhcp"] = "dhcp"
self.packet_trigger = IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators.PacketTrigger()
self.packet_trigger.parent = self
self._children_name_map["packet_trigger"] = "packet-trigger"
self._segment_path = lambda: "ipv6-initiators"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators, [], name, value)
class Dhcp(Entity):
"""
DHCP information
.. attribute:: is_configured
Ture if the initiator is configred
**type**\: bool
.. attribute:: unique_ip_check
True if check for subscriber address uniquenessduring first sign of life is enabled
**type**\: bool
.. attribute:: sessions
Number of sessions currently up for each initiator
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_packets
Number of first sign of life packets received for initiating protocol on this interface
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_bytes
Number of first sign of life bytes received for initiating protocol on this interface
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: fsol_dropped_packets
Number of first sign of life packets received for initiating protocol on this interface that were dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_bytes
Number of first sign of life bytes received for initiating protocol on this interface that were dropped
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: fsol_dropped_packets_flow
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to exceeding creation rate
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_packets_session_limit
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to exceeding one or more of the configured session limits
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_packets_dup_addr
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to duplicate source address
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators.Dhcp, self).__init__()
self.yang_name = "dhcp"
self.yang_parent_name = "ipv6-initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_configured', (YLeaf(YType.boolean, 'is-configured'), ['bool'])),
('unique_ip_check', (YLeaf(YType.boolean, 'unique-ip-check'), ['bool'])),
('sessions', (YLeaf(YType.uint32, 'sessions'), ['int'])),
('fsol_packets', (YLeaf(YType.uint32, 'fsol-packets'), ['int'])),
('fsol_bytes', (YLeaf(YType.uint32, 'fsol-bytes'), ['int'])),
('fsol_dropped_packets', (YLeaf(YType.uint32, 'fsol-dropped-packets'), ['int'])),
('fsol_dropped_bytes', (YLeaf(YType.uint32, 'fsol-dropped-bytes'), ['int'])),
('fsol_dropped_packets_flow', (YLeaf(YType.uint32, 'fsol-dropped-packets-flow'), ['int'])),
('fsol_dropped_packets_session_limit', (YLeaf(YType.uint32, 'fsol-dropped-packets-session-limit'), ['int'])),
('fsol_dropped_packets_dup_addr', (YLeaf(YType.uint32, 'fsol-dropped-packets-dup-addr'), ['int'])),
])
self.is_configured = None
self.unique_ip_check = None
self.sessions = None
self.fsol_packets = None
self.fsol_bytes = None
self.fsol_dropped_packets = None
self.fsol_dropped_bytes = None
self.fsol_dropped_packets_flow = None
self.fsol_dropped_packets_session_limit = None
self.fsol_dropped_packets_dup_addr = None
self._segment_path = lambda: "dhcp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators.Dhcp, [u'is_configured', u'unique_ip_check', u'sessions', u'fsol_packets', u'fsol_bytes', u'fsol_dropped_packets', u'fsol_dropped_bytes', u'fsol_dropped_packets_flow', u'fsol_dropped_packets_session_limit', u'fsol_dropped_packets_dup_addr'], name, value)
class PacketTrigger(Entity):
"""
packet trigger information
.. attribute:: is_configured
Ture if the initiator is configred
**type**\: bool
.. attribute:: unique_ip_check
True if check for subscriber address uniquenessduring first sign of life is enabled
**type**\: bool
.. attribute:: sessions
Number of sessions currently up for each initiator
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_packets
Number of first sign of life packets received for initiating protocol on this interface
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_bytes
Number of first sign of life bytes received for initiating protocol on this interface
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: fsol_dropped_packets
Number of first sign of life packets received for initiating protocol on this interface that were dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_bytes
Number of first sign of life bytes received for initiating protocol on this interface that were dropped
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: fsol_dropped_packets_flow
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to exceeding creation rate
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_packets_session_limit
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to exceeding one or more of the configured session limits
**type**\: int
**range:** 0..4294967295
.. attribute:: fsol_dropped_packets_dup_addr
Number of first sign of life packets received for initiating protocol on this interface that were dropped due to duplicate source address
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators.PacketTrigger, self).__init__()
self.yang_name = "packet-trigger"
self.yang_parent_name = "ipv6-initiators"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_configured', (YLeaf(YType.boolean, 'is-configured'), ['bool'])),
('unique_ip_check', (YLeaf(YType.boolean, 'unique-ip-check'), ['bool'])),
('sessions', (YLeaf(YType.uint32, 'sessions'), ['int'])),
('fsol_packets', (YLeaf(YType.uint32, 'fsol-packets'), ['int'])),
('fsol_bytes', (YLeaf(YType.uint32, 'fsol-bytes'), ['int'])),
('fsol_dropped_packets', (YLeaf(YType.uint32, 'fsol-dropped-packets'), ['int'])),
('fsol_dropped_bytes', (YLeaf(YType.uint32, 'fsol-dropped-bytes'), ['int'])),
('fsol_dropped_packets_flow', (YLeaf(YType.uint32, 'fsol-dropped-packets-flow'), ['int'])),
('fsol_dropped_packets_session_limit', (YLeaf(YType.uint32, 'fsol-dropped-packets-session-limit'), ['int'])),
('fsol_dropped_packets_dup_addr', (YLeaf(YType.uint32, 'fsol-dropped-packets-dup-addr'), ['int'])),
])
self.is_configured = None
self.unique_ip_check = None
self.sessions = None
self.fsol_packets = None
self.fsol_bytes = None
self.fsol_dropped_packets = None
self.fsol_dropped_bytes = None
self.fsol_dropped_packets_flow = None
self.fsol_dropped_packets_session_limit = None
self.fsol_dropped_packets_dup_addr = None
self._segment_path = lambda: "packet-trigger"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.Ipv6Initiators.PacketTrigger, [u'is_configured', u'unique_ip_check', u'sessions', u'fsol_packets', u'fsol_bytes', u'fsol_dropped_packets', u'fsol_dropped_bytes', u'fsol_dropped_packets_flow', u'fsol_dropped_packets_session_limit', u'fsol_dropped_packets_dup_addr'], name, value)
class SessionLimit(Entity):
"""
Configuration session limits for each session
limit source and type
.. attribute:: unclassified_source
Unclassified source session limits
**type**\: :py:class:`UnclassifiedSource <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit.UnclassifiedSource>`
.. attribute:: total
All sources session limits
**type**\: :py:class:`Total <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_ipsub_oper.IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit.Total>`
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit, self).__init__()
self.yang_name = "session-limit"
self.yang_parent_name = "access-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("unclassified-source", ("unclassified_source", IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit.UnclassifiedSource)), ("total", ("total", IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit.Total))])
self._leafs = OrderedDict()
self.unclassified_source = IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit.UnclassifiedSource()
self.unclassified_source.parent = self
self._children_name_map["unclassified_source"] = "unclassified-source"
self.total = IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit.Total()
self.total.parent = self
self._children_name_map["total"] = "total"
self._segment_path = lambda: "session-limit"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit, [], name, value)
class UnclassifiedSource(Entity):
"""
Unclassified source session limits
.. attribute:: per_vlan
Per\-VLAN limit category
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit.UnclassifiedSource, self).__init__()
self.yang_name = "unclassified-source"
self.yang_parent_name = "session-limit"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('per_vlan', (YLeaf(YType.uint32, 'per-vlan'), ['int'])),
])
self.per_vlan = None
self._segment_path = lambda: "unclassified-source"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit.UnclassifiedSource, [u'per_vlan'], name, value)
class Total(Entity):
"""
All sources session limits
.. attribute:: per_vlan
Per\-VLAN limit category
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'subscriber-ipsub-oper'
_revision = '2015-11-09'
def __init__(self):
super(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit.Total, self).__init__()
self.yang_name = "total"
self.yang_parent_name = "session-limit"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('per_vlan', (YLeaf(YType.uint32, 'per-vlan'), ['int'])),
])
self.per_vlan = None
self._segment_path = lambda: "total"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(IpSubscriber.Nodes.Node.AccessInterfaces.AccessInterface.SessionLimit.Total, [u'per_vlan'], name, value)
def clone_ptr(self):
self._top_entity = IpSubscriber()
return self._top_entity
| 50.361101
| 522
| 0.429696
| 10,094
| 139,047
| 5.676541
| 0.029721
| 0.047767
| 0.057174
| 0.024049
| 0.882528
| 0.855687
| 0.835041
| 0.812824
| 0.799979
| 0.782736
| 0
| 0.027743
| 0.484656
| 139,047
| 2,760
| 523
| 50.379348
| 0.771886
| 0.222565
| 0
| 0.722952
| 0
| 0
| 0.152245
| 0.060026
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069767
| false
| 0
| 0.005056
| 0
| 0.140546
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0c0a32ed25a4b5d99283b54fdce129de95511f6
| 6,037
|
py
|
Python
|
ao/upcloud/migrations/0002_auto_20180122_0152.py
|
ZuluPro/ao
|
00afffc044a7242401a7c4e216d65b2b11d8b0c0
|
[
"BSD-3-Clause"
] | null | null | null |
ao/upcloud/migrations/0002_auto_20180122_0152.py
|
ZuluPro/ao
|
00afffc044a7242401a7c4e216d65b2b11d8b0c0
|
[
"BSD-3-Clause"
] | null | null | null |
ao/upcloud/migrations/0002_auto_20180122_0152.py
|
ZuluPro/ao
|
00afffc044a7242401a7c4e216d65b2b11d8b0c0
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-01-22 01:52
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('upcloud', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='zone',
name='firewall_price',
field=models.DecimalField(decimal_places=3, default=0, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='io_request_backup_price',
field=models.DecimalField(decimal_places=3, default=0, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='io_request_hdd_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='io_request_maxiops_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='ipv4_address_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='ipv6_address_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='plan_12cpu_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='plan_16cpu_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='plan_1cpu_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='plan_20cpu_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='plan_2cpu_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='plan_4cpu_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='plan_6cpu_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='plan_8cpu_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='public_ipv4_bandwidth_in_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='public_ipv4_bandwidth_out_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='public_ipv6_bandwidth_in_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='public_ipv6_bandwidth_out_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='server_core_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='server_memory_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='storage_backup_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='storage_hdd_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='storage_maxiops_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='windows_datacenter_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
migrations.AddField(
model_name='zone',
name='windows_standard_price',
field=models.DecimalField(decimal_places=3, default=1, max_digits=4),
preserve_default=False,
),
]
| 36.36747
| 81
| 0.586384
| 619
| 6,037
| 5.450727
| 0.119548
| 0.133373
| 0.170421
| 0.200059
| 0.922644
| 0.922644
| 0.91227
| 0.91227
| 0.91227
| 0.908714
| 0
| 0.026833
| 0.308597
| 6,037
| 165
| 82
| 36.587879
| 0.781505
| 0.010933
| 0
| 0.791139
| 1
| 0
| 0.104055
| 0.039544
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012658
| 0
| 0.031646
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e0c6894ee0942060f987f58020bd983841b58a9f
| 237
|
py
|
Python
|
src/service/HaversineService.py
|
Bocampagni/Shipping-api
|
4cdf074467e4478885fe55d7c82a16e1a577b045
|
[
"MIT"
] | null | null | null |
src/service/HaversineService.py
|
Bocampagni/Shipping-api
|
4cdf074467e4478885fe55d7c82a16e1a577b045
|
[
"MIT"
] | null | null | null |
src/service/HaversineService.py
|
Bocampagni/Shipping-api
|
4cdf074467e4478885fe55d7c82a16e1a577b045
|
[
"MIT"
] | null | null | null |
"""
Haversine
"""
from haversine import haversine
def linear_distance(distance):
return haversine((distance.first_lat_coordinate, distance.first_lon_coordinate), (distance.second_lat_coordinate, distance.second_lon_coordinate))
| 19.75
| 150
| 0.810127
| 27
| 237
| 6.777778
| 0.444444
| 0.295082
| 0.229508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097046
| 237
| 11
| 151
| 21.545455
| 0.85514
| 0.037975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
e0dacd7f4e9471c2e1eb02a5d2bc4b7b3d5a9777
| 63
|
py
|
Python
|
modules/__init__.py
|
mori97/JKNet-dgl
|
7ad1872645edb55c725bc0a2afe461f444be9634
|
[
"MIT"
] | 32
|
2018-12-27T12:07:54.000Z
|
2022-02-26T13:45:18.000Z
|
modules/__init__.py
|
mori97/JKNet-dgl
|
7ad1872645edb55c725bc0a2afe461f444be9634
|
[
"MIT"
] | null | null | null |
modules/__init__.py
|
mori97/JKNet-dgl
|
7ad1872645edb55c725bc0a2afe461f444be9634
|
[
"MIT"
] | 7
|
2020-09-10T14:19:23.000Z
|
2022-03-05T09:13:23.000Z
|
from .jknet import JKNetConcat
from .jknet import JKNetMaxpool
| 21
| 31
| 0.84127
| 8
| 63
| 6.625
| 0.625
| 0.339623
| 0.566038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 63
| 2
| 32
| 31.5
| 0.963636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1cc6e7b98fb0f3b26b2c66a288f9cc83971e7dc2
| 214
|
py
|
Python
|
atv026.py
|
luismontei/Atividade---python-3
|
c29c81623b8e8c630c0e16cc22db9a7489f3cea7
|
[
"Apache-2.0"
] | null | null | null |
atv026.py
|
luismontei/Atividade---python-3
|
c29c81623b8e8c630c0e16cc22db9a7489f3cea7
|
[
"Apache-2.0"
] | null | null | null |
atv026.py
|
luismontei/Atividade---python-3
|
c29c81623b8e8c630c0e16cc22db9a7489f3cea7
|
[
"Apache-2.0"
] | null | null | null |
n1=int(input("digite um número: "))
n2=int(input("digite outro número: "))
while n2<n1:
n1=int(input("digite um número: "))
n2=int(input("digite outro número: "))
else:
for i in range(n1,n2,1):
print(i)
| 26.75
| 40
| 0.640187
| 37
| 214
| 3.702703
| 0.432432
| 0.233577
| 0.408759
| 0.233577
| 0.744526
| 0.744526
| 0.744526
| 0.744526
| 0.744526
| 0.744526
| 0
| 0.050279
| 0.163551
| 214
| 8
| 41
| 26.75
| 0.715084
| 0
| 0
| 0.5
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1ccdaa92ab4667601be0acd49a2a8486b1b2d296
| 2,706
|
py
|
Python
|
tests/test_swimmingPoolPayDesk.py
|
green10-syntra-ab-python-adv/practices-05
|
ca083dd6f6f675a9336ef15fdfcc293111885a36
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_swimmingPoolPayDesk.py
|
green10-syntra-ab-python-adv/practices-05
|
ca083dd6f6f675a9336ef15fdfcc293111885a36
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_swimmingPoolPayDesk.py
|
green10-syntra-ab-python-adv/practices-05
|
ca083dd6f6f675a9336ef15fdfcc293111885a36
|
[
"BSD-2-Clause"
] | null | null | null |
import pytest
from sample.swimming_pool_pay_desk import SwimmingPoolPayDesk
class TestSwimmingPoolPayDesk():
def setup(self):
self.swimming_pool_pay_desk = SwimmingPoolPayDesk()
def test_invalid_type(self):
with pytest.raises(TypeError):
self.swimming_pool_pay_desk.calculate_admission_fee("young child")
def test_invalid_value(self):
with pytest.raises(ValueError):
self.swimming_pool_pay_desk.calculate_admission_fee(-5)
with pytest.raises(ValueError):
self.swimming_pool_pay_desk.calculate_admission_fee(-1)
def test_0_incl_6_incl(self):
# pytest.approx used with floating point numbers, 0.005 EUR tolerance
assert pytest.approx(1.00, 0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(0)
assert pytest.approx(1.00, 0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(1)
assert pytest.approx(1.00, 0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(3)
assert pytest.approx(1.00, 0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(5)
assert pytest.approx(1.00, 0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(6)
def test_6_excl_16_incl(self):
# pytest.approx used with floating point numbers, 0.005 EUR tolerance
assert pytest.approx(2.00, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(7)
assert pytest.approx(2.00, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(10)
assert pytest.approx(2.00, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(15)
assert pytest.approx(2.00, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(16)
def test_16_excl_65_incl(self):
# pytest.approx used with floating point numbers, 0.005 EUR tolerance
assert pytest.approx(2.50, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(17)
assert pytest.approx(2.50, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(53)
assert pytest.approx(2.50, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(64)
assert pytest.approx(2.50, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(65)
def test_65_excl_older(self):
# pytest.approx used with floating point numbers, 0.005 EUR tolerance
assert pytest.approx(1.50, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(66)
assert pytest.approx(1.50, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(75)
assert pytest.approx(1.50, abs=0.005) == self.swimming_pool_pay_desk.calculate_admission_fee(9999)
| 58.826087
| 106
| 0.743163
| 409
| 2,706
| 4.623472
| 0.144254
| 0.133263
| 0.166579
| 0.210999
| 0.846113
| 0.83395
| 0.83395
| 0.83395
| 0.810682
| 0.808567
| 0
| 0.073896
| 0.154841
| 2,706
| 45
| 107
| 60.133333
| 0.752951
| 0.100148
| 0
| 0.060606
| 0
| 0
| 0.004527
| 0
| 0
| 0
| 0
| 0
| 0.484848
| 1
| 0.212121
| false
| 0
| 0.060606
| 0
| 0.30303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1cee3fb199a727a069f6178a5c4404e8cb936c47
| 102
|
py
|
Python
|
python/xpctl/__init__.py
|
domyounglee/baseline
|
2261abfb7e770cc6f3d63a7f6e0015238d0e11f8
|
[
"Apache-2.0"
] | null | null | null |
python/xpctl/__init__.py
|
domyounglee/baseline
|
2261abfb7e770cc6f3d63a7f6e0015238d0e11f8
|
[
"Apache-2.0"
] | null | null | null |
python/xpctl/__init__.py
|
domyounglee/baseline
|
2261abfb7e770cc6f3d63a7f6e0015238d0e11f8
|
[
"Apache-2.0"
] | 3
|
2019-05-27T04:52:21.000Z
|
2022-02-15T00:22:53.000Z
|
try:
from xpctl.core import *
except ImportError:
pass
from xpctl.version import __version__
| 14.571429
| 37
| 0.745098
| 13
| 102
| 5.538462
| 0.692308
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205882
| 102
| 6
| 38
| 17
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
1cf19d078af79b04b5774c1ec73a31315f2b109e
| 4,825
|
py
|
Python
|
tests/test_substring.py
|
drnextgis/Shapely
|
f9912a1ef9753f6e95347520911818aec5635de3
|
[
"BSD-3-Clause"
] | 1
|
2019-07-02T03:59:07.000Z
|
2019-07-02T03:59:07.000Z
|
tests/test_substring.py
|
AtrCheema/Shapely
|
2c39f871341ddd82c24ec42f1ac1c5a16663729b
|
[
"BSD-3-Clause"
] | 1
|
2021-02-23T23:32:17.000Z
|
2021-02-23T23:32:17.000Z
|
tests/test_substring.py
|
AtrCheema/Shapely
|
2c39f871341ddd82c24ec42f1ac1c5a16663729b
|
[
"BSD-3-Clause"
] | 1
|
2020-12-23T11:20:53.000Z
|
2020-12-23T11:20:53.000Z
|
from . import unittest
from shapely.ops import substring
from shapely.geos import geos_version
from shapely.geometry import Point, LineString
class SubstringTestCase(unittest.TestCase):
def setUp(self):
self.point = Point(1, 1)
self.line1 = LineString(([0, 0], [2, 0]))
self.line2 = LineString(([3, 0], [3, 6], [4.5, 6]))
@unittest.skipIf(geos_version < (3, 2, 0), 'GEOS 3.2.0 required')
def test_return_startpoint(self):
self.assertTrue(substring(self.line1, -500, -600).equals(Point(0, 0)))
self.assertTrue(substring(self.line1, -500, -500).equals(Point(0, 0)))
self.assertTrue(substring(self.line1, -1, -1.1, True).equals(Point(0, 0)))
self.assertTrue(substring(self.line1, -1.1, -1.1, True).equals(Point(0, 0)))
@unittest.skipIf(geos_version < (3, 2, 0), 'GEOS 3.2.0 required')
def test_return_endpoint(self):
self.assertTrue(substring(self.line1, 500, 600).equals(Point(2, 0)))
self.assertTrue(substring(self.line1, 500, 500).equals(Point(2, 0)))
self.assertTrue(substring(self.line1, 1, 1.1, True).equals(Point(2, 0)))
self.assertTrue(substring(self.line1, 1.1, 1.1, True).equals(Point(2, 0)))
@unittest.skipIf(geos_version < (3, 2, 0), 'GEOS 3.2.0 required')
def test_return_midpoint(self):
self.assertTrue(substring(self.line1, 0.5, 0.5).equals(Point(0.5, 0)))
self.assertTrue(substring(self.line1, -0.5, -0.5).equals(Point(1.5, 0)))
self.assertTrue(substring(self.line1, 0.5, 0.5, True).equals(Point(1, 0)))
self.assertTrue(substring(self.line1, -0.5, -0.5, True).equals(Point(1, 0)))
@unittest.skipIf(geos_version < (3, 2, 0), 'GEOS 3.2.0 required')
def test_return_startsubstring(self):
self.assertTrue(substring(self.line1, -500, 0.6).equals(LineString(([0, 0], [0.6, 0]))))
self.assertTrue(substring(self.line1, -1.1, 0.6, True).equals(LineString(([0, 0], [1.2, 0]))))
@unittest.skipIf(geos_version < (3, 2, 0), 'GEOS 3.2.0 required')
def test_return_endsubstring(self):
self.assertTrue(substring(self.line1, 0.6, 500).equals(LineString(([0.6, 0], [2, 0]))))
self.assertTrue(substring(self.line1, 0.6, 1.1, True).equals(LineString(([1.2, 0], [2, 0]))))
@unittest.skipIf(geos_version < (3, 2, 0), 'GEOS 3.2.0 required')
def test_return_midsubstring(self):
self.assertTrue(substring(self.line1, 0.5, 0.6).equals(LineString(([0.5, 0], [0.6, 0]))))
self.assertTrue(substring(self.line1, 0.6, 0.5).equals(LineString(([0.6, 0], [0.5, 0]))))
self.assertTrue(substring(self.line1, -0.5, -0.6).equals(LineString(([1.5, 0], [1.4, 0]))))
self.assertTrue(substring(self.line1, -0.6, -0.5).equals(LineString(([1.4, 0], [1.5, 0]))))
self.assertTrue(substring(self.line1, 0.5, 0.6, True).equals(LineString(([1, 0], [1.2, 0]))))
self.assertTrue(substring(self.line1, 0.6, 0.5, True).equals(LineString(([1.2, 0], [1, 0]))))
self.assertTrue(substring(self.line1, -0.5, -0.6, True).equals(LineString(([1, 0], [0.8, 0]))))
self.assertTrue(substring(self.line1, -0.6, -0.5, True).equals(LineString(([0.8, 0], [1, 0]))))
@unittest.skipIf(geos_version < (3, 2, 0), 'GEOS 3.2.0 required')
def test_return_midsubstring(self):
self.assertTrue(substring(self.line1, 0.5, 0.6).equals(LineString(([0.5, 0], [0.6, 0]))))
self.assertTrue(substring(self.line1, 0.6, 0.5).equals(LineString(([0.6, 0], [0.5, 0]))))
self.assertTrue(substring(self.line1, -0.5, -0.6).equals(LineString(([1.5, 0], [1.4, 0]))))
self.assertTrue(substring(self.line1, -0.6, -0.5).equals(LineString(([1.4, 0], [1.5, 0]))))
self.assertTrue(substring(self.line1, 0.5, 0.6, True).equals(LineString(([1, 0], [1.2, 0]))))
self.assertTrue(substring(self.line1, 0.6, 0.5, True).equals(LineString(([1.2, 0], [1, 0]))))
self.assertTrue(substring(self.line1, -0.5, -0.6, True).equals(LineString(([1, 0], [0.8, 0]))))
self.assertTrue(substring(self.line1, -0.6, -0.5, True).equals(LineString(([0.8, 0], [1, 0]))))
@unittest.skipIf(geos_version < (3, 2, 0), 'GEOS 3.2.0 required')
def test_return_substring_with_vertices(self):
self.assertTrue(substring(self.line2, 1, 7).equals(LineString(([3, 1], [3, 6], [4, 6]))))
self.assertTrue(substring(self.line2, 0.2, 0.9, True).equals(LineString(([3, 1.5], [3, 6], [3.75, 6]))))
self.assertTrue(substring(self.line2, 0, 0.9, True).equals(LineString(([3, 0], [3, 6], [3.75, 6]))))
self.assertTrue(substring(self.line2, 0.2, 1, True).equals(LineString(([3, 1.5], [3, 6], [4.5, 6]))))
def test_suite():
loader = unittest.TestLoader()
return loader.loadTestsFromTestCase(SubstringTestCase)
| 61.858974
| 112
| 0.618031
| 759
| 4,825
| 3.891963
| 0.067194
| 0.170616
| 0.280298
| 0.329045
| 0.861882
| 0.822275
| 0.806026
| 0.779959
| 0.750508
| 0.724103
| 0
| 0.102284
| 0.165181
| 4,825
| 77
| 113
| 62.662338
| 0.631082
| 0
| 0
| 0.40625
| 0
| 0
| 0.031503
| 0
| 0
| 0
| 0
| 0
| 0.5625
| 1
| 0.15625
| false
| 0
| 0.0625
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1cfd3429a5dde09b07048c615ed135236508936c
| 32,852
|
py
|
Python
|
int_sync_inclination.py
|
erccarls/MW_TWIN
|
a3a1560271cd4e553d9423d0d3eb62f01561e176
|
[
"MIT"
] | null | null | null |
int_sync_inclination.py
|
erccarls/MW_TWIN
|
a3a1560271cd4e553d9423d0d3eb62f01561e176
|
[
"MIT"
] | null | null | null |
int_sync_inclination.py
|
erccarls/MW_TWIN
|
a3a1560271cd4e553d9423d0d3eb62f01561e176
|
[
"MIT"
] | null | null | null |
#===============================================================================
# Author: Eric Carlson
# E-mail: erccarls@ucsc.edu
# Description: Integrates galprop emissivity maps for arbitrary inclination through LOS.
# Output:Outputs FITS with units mJy/beam for a specified frequency.S
# Last Modified: May 23 2012
#===============================================================================
from scipy import *
import sys
sys.path.append('./pyfits/lib')
#===============================================================================
# Parameters: Emissivity filename, FITS ouput filename, physical radius of
# emissivity file, and total physical height (z) of emissivity file (i.e. +-5kpc
# would be take arg 10),inclination in degrees, physical distance to the object
# in kpc, frequency, and the instrument half power beam width (hpbw) in arcsec
# and the faintest contour n, and the width in arc-minutes of the output plot.
#===============================================================================
def calc_emiss(emissFilename, fileout, r_kpc, z_kpc, inclination, objectDist, frequency, hpbw,contour_n,width):
import pyfits, numpy, math #@UnresolvedImport
import os, sys
from sys import stdout
pc2cm = 3.08568025e18
kpc2cm = 3.08568025e21
sqArcSecPerSr = 42545949625.0 # Square arcseconds per sr
# Determine which energy bin to use based on the frequency passed in.
vSyncLow = 1.02026e8 #These are found in the GALDEF File
vSyncFactor = 1.125 #These are found in the GALDEF File
# Determine correct energy bin
eBin = int(round(math.log(frequency/vSyncLow)/math.log(vSyncFactor)))
print 'Selected Frequency:' + str(frequency/1e9) + 'GHz \nEnergy bin: ' + str(eBin)
# generate a square image.
outRes = 300 # Number of pixels for square output image was 300 for paper
num_zSteps = 100 # Number of z-steps along the integration line
# load emissivity data
hdulist = pyfits.open(emissFilename, mode='update')
scidata = hdulist[0].data
hdulist.info()
# Read FITS header to get dimensions of emissivity map (pixels)
dimen = scidata.shape
iRange = int(dimen[0])
jRange = int(dimen[1])
kRange = int(dimen[2])
lRange = int(dimen[3])
print "Ranges are", iRange, jRange, kRange, lRange
if (eBin>=jRange):
print "Energy out of range. Exiting..."
return
#===========================================================================
# We now will choose a coordinate system of (x',y',z') with the +z' face of a
# cube parallel to the observer (output image) so that we integrate along z'
# axis. Thus we have a cylindrical input map rotated in a cube. This cube is
# chosen to have side length of 1.5*the diameter of the cylinder. Points in
# the primed system are mapped to points in the unprimed system via an inverse
# euler rotation. Rotations are unitary so the transpose of the standard
# rotation matrix yields the inverse.
#===========================================================================
# Calculate the physical side length of the output map based on object distance and angular size of output.
sideLength = 40 # in kpc
print 'Output sidelength (kpc): ' , sideLength
kpcPerDeltaZ = float(sideLength)/num_zSteps # Physical distance for z integration interval
kpcPerPixelInZ = float(z_kpc)/kRange # Input Z has roughly 10:1 aspect ratio z:r
kpcPerPixelInR = float(r_kpc)/lRange #
kpcPerPixelOut = float(sideLength)/float(outRes) # Output has square aspect ratio
cmPerPixelOut = kpcPerPixelOut*kpc2cm # Output cm/pixel
solidAngle = 1/((objectDist*kpc2cm)**2.0) # 4 pi * ratio of 1 cm^2 to the entire sphere given distance
volPerPixelOut = (cmPerPixelOut**2.0)*kpcPerDeltaZ*kpc2cm # one integration volume in cm^3
srPerPixel = (kpcPerPixelOut/(objectDist))**2 # 4 pi * ratio of areas
SqArcSecPerPixel = sqArcSecPerSr*srPerPixel # ArcSec^2/Pixel
pixelPerArcSecSq = 1/(SqArcSecPerPixel) # pixel/ArcSec^2 (Ratio of areas times 4 pi * conversion)
beamConversion = pixelPerArcSecSq * (math.pi * (hpbw/2)**2) / math.log(2) # pixel/ArcSec^2 * arcSec^2/beam
#pixPerBeam = (math.pi * (hpbw/2)**2)/SqArcSecPerPixel / math.log(2)
#print 'ppb', pixPerBeam
print 'ArcSec^2/pixel: ' + str(1/pixelPerArcSecSq)
print 'Beam Conversion: ' + str(beamConversion)
image = numpy.zeros((outRes, outRes))
print 'Beginning Integration'
for i in range(0,outRes): # x loop
for j in range(0,outRes): # y loop
flux = 0.0 # total flux sum
for k in range (0,num_zSteps): # z-integration
# Shift coordinates so they are centered on galaxy and convert to kpc before transforming to emiss. coordinates.
outPosition = ((i-outRes/2.0)*kpcPerPixelOut,(j-outRes/2.0)*kpcPerPixelOut,(k-num_zSteps/2.0)*kpcPerDeltaZ)
inPosition = rotate_coordinates(outPosition,inclination[0]/180.0*math.pi,0,inclination[1]/180.0*math.pi) # Rotate to input emissivity file coords.
r = math.sqrt(inPosition[0]**2.0+inPosition[1]**2.0) # Calc r in kpc
z = inPosition[2] # Calc z in kpc
if (abs(z) <= z_kpc/2.0 and r <= float(r_kpc)): # Check that we are within the bounds of the input image.
xPixel = int(r/kpcPerPixelInR) # Calc the actual pixel coordinates.
yPixel = int((z+z_kpc/2.0)/kpcPerPixelInZ) # Calc the actual pixel coordinates.
# Ensure we are have a valid coordinate. The edge flux may be underestimated since we are truncating, but assuming the emiss file is very small at boundaries this won't be an issue.
if (xPixel < lRange and yPixel < kRange):
density = scidata[0,eBin,yPixel, xPixel] # Read density from file
flux += volPerPixelOut*density # Volume*energy density
if(flux!=0):
image[j,i] = flux*frequency*solidAngle # Image in erg/s/cm^2
# Monitor Progress in terminal.
sys.stdout.write('\r' + str(int(float(i)/outRes*100+1.0)))
sys.stdout.flush()
print 'Integration Complete...\n'
print 'Total Luminosity (erg/s):' + str(numpy.sum(image)/solidAngle*4*math.pi)
print 'Total Flux (Jy):' + str(numpy.sum(image)*1.0e23/frequency) # in Jy
print 'Complete. FITS image output to ' + fileout
#===========================================================================
# Write array to FITS image
#===========================================================================
hdu = pyfits.PrimaryHDU(beamConversion*image*1.0e23*1000/frequency) # in mJy/beam
hdulist = pyfits.HDUList([hdu])
if os.access(fileout, os.F_OK ): os.remove(fileout)
hdulist.writeto(fileout)
#===========================================================================
# Contour plot with scipy
#===========================================================================
import matplotlib.pyplot as plt #@UnresolvedImport
import matplotlib.image as mpimg #@UnresolvedImport
import matplotlib.cm as cm
from matplotlib.patches import Ellipse
import matplotlib.patches as mpatches
import matplotlib
from matplotlib.collections import PatchCollection
SideLengthArcMin = math.atan(sideLength/float(objectDist))*180/math.pi*60 #side length in arc minutes
img = plt.imshow(beamConversion*image*1.0e23/frequency*1000,origin='lower', extent=[-SideLengthArcMin/2,SideLengthArcMin/2,-SideLengthArcMin/2,SideLengthArcMin/2],vmin=0, vmax=15.0)
#img.set_clim=(0.0,15.0)
tickList = [2**-1.5,2**0,2**.5,2**1.0,2**1.5,2**2,2**2.5,2**3,2**3.5,15.0]
for i in range(len(tickList)):
tickList[i] = round(tickList[i],2)
plt.colorbar(ticks=tickList, orientation='vertical')
pltSideLengthKpc = 20.0 # kpc
# Radio contour is so small we prefer a different sidelength.
if (fileout == 'm31_out_DM_ONLY.fits'):
print 'M31, Adjusting sidelength'
pltSideLengthKpc = 10.0 # kpc
pltSideLength = math.atan(pltSideLengthKpc/float(objectDist))*180/math.pi*60
plt.xlim((-pltSideLength/2,pltSideLength/2))
plt.ylim((-pltSideLength/2,pltSideLength/2))
#plt.colorbar( img, shrink=.8 ,extend='both')
plt.xlabel('x (arcmin)')
plt.ylabel('y (arcmin)')
plt.title('Flux ($mJy Beam^{-1}$ at ' + str(frequency/1.0e9) + 'GHz)\nTotal Flux:' + str(round(numpy.sum(image)*1.0e23/frequency,6)) + ' Jy')
levelList = []
for i in range(0,20):
levelList.append(2.0**((contour_n+i)/2))
CS = plt.contour(beamConversion*image*1.0e23/frequency*1000,origin='lower', extent=[-SideLengthArcMin/2,SideLengthArcMin/2,-SideLengthArcMin/2,SideLengthArcMin/2],levels=levelList, colors = 'w')
plt.clabel(CS, fontsize=8, inline=1)
#plt.colorbar( shrink=.8 ,extend='both')
#plt.flag()
plt.savefig(str(fileout)+ '.png')
#plt.show()
plt.clf()
# Rotates coordinates given full set of euler angles. 1 Redundant angle for axially symmetric galaxy
#===============================================================================
# Maps coordinates in the output image to those of the input map via inverse
# euler rotation matrix. See wikipedia article on "Rotation Matrix". Input is
# the rotated coordinate vector (x',y',z') followed by the euler rotation angles
# for the galaxy. Returns (x,y,z) for the emissivity map (x^2+y^2=r).
#===============================================================================
def rotate_coordinates(vector,phi,theta,psi):
import math
x = vector[0]*math.cos(theta)*math.cos(psi) + vector[1]*math.cos(theta)*math.sin(psi) - vector[2]*math.sin(theta)
y = vector[0]*(-math.cos(phi)*math.sin(psi)+math.sin(phi)*math.sin(theta)*math.cos(psi)) + vector[1]*(math.cos(phi)*math.cos(psi)+math.sin(phi)*math.sin(theta)*math.sin(psi)) + vector[2]*math.sin(phi)*math.cos(theta)
z = vector[0]*(math.sin(phi)*math.sin(psi)+math.cos(phi)*math.sin(theta)*math.cos(psi))+vector[1]*(-math.sin(phi)*math.cos(psi)+math.cos(phi)*math.sin(theta)*math.sin(psi))+vector[2]*math.cos(phi)*math.cos(theta)
return (x,y,z)
from scipy import *
import sys
sys.path.append('./pyfits/lib')
def calcz_flux(emissFilename):
import pyfits, numpy, math #@UnresolvedImport
import os, sys
hdulist = pyfits.open(emissFilename, mode='update')
scidata = hdulist[0].data
eBin = 23 # 1.49 ghz bin
dimen = scidata.shape
iRange = int(dimen[0])
jRange = int(dimen[1])
kRange = int(dimen[2])
lRange = int(dimen[3])
#===============================================================================
# Used for contour flux comparison
#===============================================================================
def calc_emiss_limited(emissFilename, fileout, r_kpc, z_kpc, inclination, objectDist, frequency, hpbw,contour_n,width):
import pyfits, numpy, math #@UnresolvedImport
import os, sys
from sys import stdout
pc2cm = 3.08568025e18
kpc2cm = 3.08568025e21
sqArcSecPerSr = 42545949625.0 # Square arcseconds per sr
# Determine which energy bin to use based on the frequency passed in.
vSyncLow = 1.02026e8 #These are found in the GALDEF File
vSyncFactor = 1.125 #These are found in the GALDEF File
# Determine correct energy bin
eBin = int(round(math.log(frequency/vSyncLow)/math.log(vSyncFactor)))
print 'Selected Frequency:' + str(frequency/1e9) + 'GHz \nEnergy bin: ' + str(eBin)
# generate a square image.
outRes = 300 # Number of pixels for square output image was 300 for paper
num_zSteps = 100 # Number of z-steps along the integration line
# load emissivity data
hdulist = pyfits.open(emissFilename, mode='update')
scidata = hdulist[0].data
hdulist.info()
# Read FITS header to get dimensions of emissivity map (pixels)
dimen = scidata.shape
iRange = int(dimen[0])
jRange = int(dimen[1])
kRange = int(dimen[2])
lRange = int(dimen[3])
print "Ranges are", iRange, jRange, kRange, lRange
if (eBin>=jRange):
print "Energy out of range. Exiting..."
return
#===========================================================================
# We now will choose a coordinate system of (x',y',z') with the +z' face of a
# cube parallel to the observer (output image) so that we integrate along z'
# axis. Thus we have a cylindrical input map rotated in a cube. This cube is
# chosen to have side length of 1.5*the diameter of the cylinder. Points in
# the primed system are mapped to points in the unprimed system via an inverse
# euler rotation. Rotations are unitary so the transpose of the standard
# rotation matrix yields the inverse.
#===========================================================================
# Calculate the physical side length of the output map based on object distance and angular size of output.
sideLength = 40 # in kpc
print 'Output sidelength (kpc): ' , sideLength
kpcPerDeltaZ = float(sideLength)/num_zSteps # Physical distance for z integration interval
kpcPerPixelInZ = float(z_kpc)/kRange # Input Z has roughly 10:1 aspect ratio z:r
kpcPerPixelInR = float(r_kpc)/lRange #
kpcPerPixelOut = float(sideLength)/float(outRes) # Output has square aspect ratio
cmPerPixelOut = kpcPerPixelOut*kpc2cm # Output cm/pixel
solidAngle = 1/((objectDist*kpc2cm)**2.0) # 4 pi * ratio of 1 cm^2 to the entire sphere given distance
volPerPixelOut = (cmPerPixelOut**2.0)*kpcPerDeltaZ*kpc2cm # one integration volume in cm^3
srPerPixel = (kpcPerPixelOut/(objectDist))**2 # 4 pi * ratio of areas
SqArcSecPerPixel = sqArcSecPerSr*srPerPixel # ArcSec^2/Pixel
pixelPerArcSecSq = 1/(SqArcSecPerPixel) # pixel/ArcSec^2 (Ratio of areas times 4 pi * conversion)
beamConversion = pixelPerArcSecSq * (math.pi * (hpbw/2)**2) / math.log(2) # pixel/ArcSec^2 * arcSec^2/beam
#pixPerBeam = (math.pi * (hpbw/2)**2)/SqArcSecPerPixel / math.log(2)
#print 'ppb', pixPerBeam
print 'ArcSec^2/pixel: ' + str(1/pixelPerArcSecSq)
print 'Beam Conversion: ' + str(beamConversion)
image = numpy.zeros((outRes, outRes))
print 'Beginning Integration'
for i in range(0,outRes): # x loop
for j in range(0,outRes): # y loop
flux = 0.0 # total flux sum
for k in range (0,num_zSteps): # z-integration
# Shift coordinates so they are centered on galaxy and convert to kpc before transforming to emiss. coordinates.
outPosition = ((i-outRes/2.0)*kpcPerPixelOut,(j-outRes/2.0)*kpcPerPixelOut,(k-num_zSteps/2.0)*kpcPerDeltaZ)
inPosition = rotate_coordinates(outPosition,inclination[0]/180.0*math.pi,0,inclination[1]/180.0*math.pi) # Rotate to input emissivity file coords.
r = math.sqrt(inPosition[0]**2.0+inPosition[1]**2.0) # Calc r in kpc
z = inPosition[2] # Calc z in kpc
if (abs(z) <= z_kpc/2.0 and r <= float(r_kpc)): # Check that we are within the bounds of the input image.
xPixel = int(r/kpcPerPixelInR) # Calc the actual pixel coordinates.
yPixel = int((z+z_kpc/2.0)/kpcPerPixelInZ) # Calc the actual pixel coordinates.
# Ensure we are have a valid coordinate. The edge flux may be underestimated since we are truncating, but assuming the emiss file is very small at boundaries this won't be an issue.
if (xPixel < lRange and yPixel < kRange):
density = scidata[0,eBin,yPixel, xPixel] # Read density from file
flux += volPerPixelOut*density # Volume*energy density
if(flux!=0):
image[j,i] = flux*frequency*solidAngle # Image in erg/s/cm^2
# Monitor Progress in terminal.
sys.stdout.write('\r' + str(int(float(i)/outRes*100+1.0)))
sys.stdout.flush()
print 'Integration Complete...\n'
print 'Total Luminosity (erg/s):' + str(numpy.sum(image)/solidAngle*4*math.pi)
print 'Total Flux (Jy):' + str(numpy.sum(image)*1.0e23/frequency) # in Jy
print 'Complete. FITS image output to ' + fileout
#===========================================================================
# Write array to FITS image
#===========================================================================
hdu = pyfits.PrimaryHDU(beamConversion*image*1.0e23*1000/frequency) # in mJy/beam
hdulist = pyfits.HDUList([hdu])
if os.access(fileout, os.F_OK ): os.remove(fileout)
hdulist.writeto(fileout)
#===============================================================================
# Parameters: Emissivity filename, FITS ouput filename, physical radius of
# emissivity file, and total physical height (z) of emissivity file (i.e. +-5kpc
# would be take arg 10),inclination in degrees, physical distance to the object
# in kpc, frequency, and the instrument half power beam width (hpbw) in arcsec
# and the faintest contour n, and the width in arc-minutes of the output plot.
#===============================================================================
def intEllipse(FITSFile, semiMaj,semiMin,dist,posAng,hpbw):
import pyfits, numpy, math #@UnresolvedImport
import os, sys
from sys import stdout
pc2cm = 3.08568025e18
kpc2cm = 3.08568025e21
sqArcSecPerSr = 42545949625.0 # Square arcseconds per sr
# Determine correct energy bin
# load emissivity data
hdulist = pyfits.open(FITSFile, mode='update')
scidata = hdulist[0].data
#hdulist.info()
# Read FITS header to get dimensions of emissivity map (pixels)
dimen = scidata.shape
jRange = int(dimen[0])
kRange = int(dimen[1])
#print "Ranges are", jRange, kRange
sideLength = 40.0 # in kpc
SideLengthArcMin = math.atan(sideLength/float(dist))*180./math.pi*60. #side length in arc minutes
# Calculate the physical side length of the output map based on object distance and angular size of output.
#print 'Output sidelength (kpc): ' , sideLength
armMinPerPixel = float(SideLengthArcMin)/float(kRange)
semiMin = semiMin/armMinPerPixel/2.0
semiMaj = semiMaj/armMinPerPixel/2.0
kpcPerPixelOut = float(sideLength)/float(kRange) # Output has square aspect ratio
cmPerPixelOut = kpcPerPixelOut*kpc2cm # Output cm/pixel
solidAngle = 1/((dist*kpc2cm)**2.0) # 4 pi * ratio of 1 cm^2 to the entire sphere given distance
srPerPixel = (kpcPerPixelOut/(dist))**2 # 4 pi * ratio of areas
SqArcSecPerPixel = sqArcSecPerSr*srPerPixel # ArcSec^2/Pixel
pixelPerArcSecSq = 1/(SqArcSecPerPixel) # pixel/ArcSec^2 (Ratio of areas times 4 pi * conversion)
beamConversion = pixelPerArcSecSq * (math.pi * (hpbw/2)**2) / math.log(2) # pixel/ArcSec^2 * arcSec^2/beam
image = numpy.zeros((kRange, kRange))
posAng = -posAng/180*math.pi # inverse rotation and convert to rad.
flux = 0.0 # total flux sum
for i in range(0,kRange): # x loop
for j in range(0,kRange): # y loop
x = float(i-kRange/2.0)
y = float(j-kRange/2.0)
if ( ((x*math.cos(posAng) + y*math.sin(posAng))**2.0/semiMin**2.0 + (x* math.sin(posAng)-y*math.cos(posAng))**2.0/semiMaj**2.0)<=1):
image[i,j]= scidata[i,j]
flux += scidata[i, j]
print 'flux = ', flux/beamConversion, ' mJy, Distance = ' , dist
# import matplotlib.pyplot as plt #@UnresolvedImport
# import matplotlib.image as mpimg #@UnresolvedImport
# import matplotlib.cm as cm
# from matplotlib.patches import Ellipse
# import matplotlib.patches as mpatches
# import matplotlib
# from matplotlib.collections import PatchCollection
#
# imgplot = plt.imshow(image)
#
# plt.show()
#
#print '\nm31\n'
#intEllipse('m31_out_DM_ONLY.fits',190.0,60.0,700.0,-55,48.)
#
#print '\n2683\n'
#intEllipse('ngc2683_out_DM_ONLY.fits',9.3,2.2,10182.0,-46.5,48.)
#intEllipse('ngc2683_out_DM_ONLY.fits',9.3,2.2,7959.0,-46.5,48.)
#intEllipse('ngc2683_out_DM_ONLY.fits',9.3,2.2,12405.0,-46.5,48.)
#
#print '\n4448\n'
#intEllipse('ngc4448_out_DM_ONLY.fits',3.9,1.4,13000.0,7.9,60.)
#intEllipse('ngc4448_out_DM_ONLY.fits',3.9,1.4,9700.0,7.9,60.)
#intEllipse('ngc4448_out_DM_ONLY.fits',3.9,1.4,47400.0,7.9,60.)
#
#print '\n4866\n'
#intEllipse('ngc4866_out_DM_ONLY.fits',6.3,1.3,21900.0,7.9,54.)
#intEllipse('ngc4866_out_DM_ONLY.fits',6.3,1.3,16000.0,7.9,54.)
#intEllipse('ngc4866_out_DM_ONLY.fits',6.3,1.3,29500.0,7.9,54.)
#
#print '\n1350\n'
#intEllipse('ngc1350_out_DM_ONLY.fits',5.2,2.8,20938.0,90.0,48.)
#
#print '\n7814\n'
#intEllipse('ngc7814_out_DM_ONLY.fits',5.5,2.3,17171.0,45.0,48.)
#
#print '\n4394\n'
#intEllipse('ngc4394_out_DM_ONLY.fits',3.6,3.2,16800.0,50.0,54.)
#print '\n4698\n'
#intEllipse('ngc4698_out_DM_ONLY.fits',4.0,2.5,23650.0,80.0,54.)
#intEllipse('ngc4698_out_DM_ONLY.fits',4.0,2.5,16909.0,80.0,54.)
#intEllipse('ngc4698_out_DM_ONLY.fits',4.0,2.5,30391.0,80.0,54.)
# m31
# PARAMETERS: emissFilename, fileout, r_kpc, z_kpc, inclination, objectDist, frequency, hpbw,contour_n,width
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_28.4_16.0_1e+29_0.3_1.0_1.0_22.0', 'm31_out_DM_ONLY.fits',28.4,32.0,[72.2,-55],700.0, 1.485E9, 48.,-2.0,40.0)
##NGC 2683 # used 8000 initially
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_20.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc2683_out_DM_ONLY.fits',20.0,32.0,[82.8,-46.5],10182., 1.485E9, 48.,-3.0,8.0)
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_20.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc2683_out_DM_ONLY_closer.fits',20.0,32.0,[82.8,-46.5],7959.0, 1.485E9, 48.,-3.0,8.0)
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_20.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc2683_out_DM_ONLY_farther.fits',20.0,32.0,[82.8,-46.5],12405.0, 1.485E9, 48.,-3.0,8.0)
##NGC 4448
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_10.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc4448_out_DM_ONLY.fits',10.0,32.0,[71.0,7.9],13000.0, 1.485E9, 60.,-3.0,16.0)
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_10.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc4448_out_DM_ONLY_closer.fits',10.0,32.0,[71.0,7.9],9700.0, 1.485E9, 60.0,-3.0,16.0)
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_10.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc4448_out_DM_ONLY_farther.fits',10.0,32.0,[71.0,7.9],47400.0, 1.485E9, 60.0,-3.0,16.0)
#
#
#
## NGC 4698 # sigma dist = 6.741 (28%)
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_20.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc4698_out_DM_ONLY.fits',20.0,32.0,[73.44,80.0],23650.0, 1.485E9, 54.0,-3.0,16.0)
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_20.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc4698_out_DM_ONLY_closer.fits',20.0,32.0,[73.44,80.0],16909.0, 1.485E9, 54.0,-3.0,16.0)
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_20.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc4698_out_DM_ONLY_farther.fits',20.0,32.0,[73.44,80.0],30391.0, 1.485E9, 54.0,-3.0,16.0)
#
#
## NGC 1350 sigma dist = 3.612 (17%)
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_28.4_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc1350_out_DM_ONLY.fits',28.4,32.0,[64.79,90.0],20938.0, 1.485E9, 48.0,-3.0,16.0)
#
## NGC 4394 # Only 1 measurement.
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_15.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc4394_out_DM_ONLY.fits',15.0,32.0,[16.55,50.0],16800.0, 1.485E9, 54.0,-3.0,16.0)
#
## NGC 7814 (22% dist variation)
#calc_emiss('./diffsynchrotron_emiss_54_grid.60.0_4.0_1.8_20.0_16.0_1e+29_0.3_1.0_1.0_22.0', 'ngc7814_out_DM_ONLY.fits',20.0,32.0,[90.,45.0],17171.0, 1.485E9, 48.0,-3.0,16.0)
#===============================================================================
# TESTING
#===============================================================================
#for i in (10,20,30,40):
# vSyncLow = 1.02026e8 #These are found in the GALDEF File
# vSyncFactor = 1.125 #These are found in the GALDEF File
# frequency = vSyncLow*vSyncFactor**i
# calc_emiss('./sync.emiss.extragalactic_DM_Only.fits', 'm31_out_DM_ONLY_test2.fits',20.0,16.0,[0,0],7000.0, frequency, 48.0,-2.0,40.0)
def calc_emiss2(emissFilename, fileout, r_kpc, z_kpc, inclination, objectDist, frequency, hpbw,contour_n,width):
import pyfits, numpy, math #@UnresolvedImport
import os, sys
from sys import stdout
pc2cm = 3.08568025e18
kpc2cm = 3.08568025e21
sqArcSecPerSr = 42545949625.0 # Square arcseconds per sr
# Determine which energy bin to use based on the frequency passed in.
vSyncLow = 1.02026e8 #These are found in the GALDEF File
vSyncFactor = 1.125 #These are found in the GALDEF File
# Determine correct energy bin
eBin = int(round(math.log(frequency/vSyncLow)/math.log(vSyncFactor)))
print 'Selected Frequency:' + str(frequency/1e9) + 'GHz \nEnergy bin: ' + str(eBin)
# generate a square image.
#outRes = 200 # Number of pixels for square output image
#num_zSteps = 100 # Number of z-steps along the integration line
outRes=15
num_zSteps=15
# load emissivity data
hdulist = pyfits.open(emissFilename, mode='update')
scidata = hdulist[0].data
hdulist.info()
# Read FITS header to get dimensions of emissivity map (pixels)
dimen = scidata.shape
iRange = int(dimen[0])
jRange = int(dimen[1])
kRange = int(dimen[2])
lRange = int(dimen[3])
print "Ranges are", iRange, jRange, kRange, lRange
if (eBin>=jRange):
print "Energy out of range. Exiting..."
return
#===========================================================================
# We now will choose a coordinate system of (x',y',z') with the +z' face of a
# cube parallel to the observer (output image) so that we integrate along z'
# axis. Thus we have a cylindrical input map rotated in a cube. This cube is
# chosen to have side length of 1.5*the diameter of the cylinder. Points in
# the primed system are mapped to points in the unprimed system via an inverse
# euler rotation. Rotations are unitary so the transpose of the standard
# rotation matrix yields the inverse.
#===========================================================================
# Calculate the physical side length of the output map based on object distance and angular size of output.
sideLength = 50 # in kpc
print 'Output sidelength (kpc): ' , sideLength
kpcPerDeltaZ = float(sideLength)/num_zSteps # Physical distance for z integration interval
kpcPerPixelInZ = float(z_kpc)/kRange # Input Z has roughly 10:1 aspect ratio z:r
kpcPerPixelInR = float(r_kpc)/lRange
kpcPerPixelOut = float(sideLength)/float(outRes) # Output has square aspect ratio
cmPerPixelOut = kpcPerPixelOut*kpc2cm # Output cm/pixel
solidAngle = 1/((objectDist*kpc2cm)**2.0) # 4 pi * ratio of 1 cm^2 to the entire sphere given distance
volPerPixelOut = (cmPerPixelOut**2.0)*kpcPerDeltaZ*kpc2cm # one integration volume in cm^3
# In initial code was missing this 4*pi
srPerPixel = 4*math.pi* (kpcPerPixelOut/objectDist)**2 # 4 pi * ratio of areas
SqArcSecPerPixel = sqArcSecPerSr*srPerPixel # ArcSec^2/Pixel
## Previous
#pixelPerArcSecSq = objectDist**2/((kpcPerPixelOut**2))/sqArcSecPerSr # pixel/ArcSec^2
pixelPerArcSecSq = 1/(SqArcSecPerPixel) # pixel/ArcSec^2 (Ratio of areas times 4 pi * conversion)
beamConversion = pixelPerArcSecSq * (math.pi * (hpbw/2)**2) # pixel/ArcSec^2 * arcSec^2/beam
print 'ArcSec^2/pixel: ' + str(1/pixelPerArcSecSq)
print 'Beam Conversion: ' + str(beamConversion)
image = numpy.zeros((outRes, outRes))
print 'Beginning Integration'
xs=[]
ys=[]
zs=[]
for i in range(0,outRes): # x loop
for j in range(0,outRes): # y loop
flux = 0.0 # total flux sum
for k in range (0,num_zSteps): # z-integration
# Shift coordinates so they are centered on galaxy and convert to kpc before transforming to emiss. coordinates.
outPosition = ((i-outRes/2.0)*kpcPerPixelOut,(j-outRes/2.0)*kpcPerPixelOut,(k-num_zSteps/2.0)*kpcPerDeltaZ)
inPosition = rotate_coordinates(outPosition,inclination[0]/180.0*math.pi,0,inclination[1]/180.0*math.pi) # Rotate to input emissivity file coords.
r = math.sqrt(inPosition[0]**2.0+inPosition[1]**2.0) # Calc r in kpc
z = inPosition[2] # Calc z in kpc
if (abs(z) <= z_kpc/2.0 and r <= float(r_kpc)): # Check that we are within the bounds of the input image.
xPixel = int(r/float(kpcPerPixelInR)) # Calc the actual pixel coordinates
yPixel = int(z+z_kpc/2.0)/float(kpcPerPixelInZ) # Calc the actual pixel coordinates
xs.append(outPosition[0])
ys.append(outPosition[1])
zs.append(outPosition[2])
# Ensure we are have a valid coordinate. The edge flux may be underestimated since we are truncating, but assuming the emiss file is very small at boundaries this won't be an issue.
if (xPixel < lRange and yPixel < kRange):
#density = scidata[0,eBin,yPixel, xPixel] # Read density from file
density = 1
flux += volPerPixelOut*density # Volume*energy density
if(flux!=0):
#image[j,i] = flux*frequency*solidAngle # Image in erg/s/cm^2
image[j,i] = flux # Image in erg/s/cm^2
# Monitor Progress in terminal.
sys.stdout.write('\r' + str(int(float(i)/outRes*100)))
sys.stdout.flush()
print '\n'
print 'Total Volume:' + str(numpy.sum(image))
print 'Total Flux (Jy):' + str(numpy.sum(image)*1.0e23/frequency) # in Jy
print 'Complete. FITS image output to ' + fileout
import numpy as np
from mpl_toolkits.mplot3d import Axes3D#@UnresolvedImport
import matplotlib.pyplot as plt#@UnresolvedImport
fig = plt.figure()
#ax = fig.add_subplot(111,projection='3d')
ax = Axes3D(fig)
ax.scatter(xs,ys,zs)
ax.set_zlim3d([-25, 25])
ax.set_xlim3d([-25, 25])
ax.set_ylim3d([-25, 25])
#plt.show()
#TESTING PURPOSES
#calc_emiss2('./sync.emiss.extragalactic_DM_Only.fits', 'm31_out_DM_ONLY_test2.fits',20.0,16.0,[25.0,0],7000.0, 1.485E9, 48.0,-2.0,40.0)
| 50.697531
| 220
| 0.59004
| 4,459
| 32,852
| 4.271586
| 0.117291
| 0.004305
| 0.01512
| 0.016381
| 0.840342
| 0.816087
| 0.805009
| 0.800021
| 0.788313
| 0.773088
| 0
| 0.077766
| 0.241812
| 32,852
| 647
| 221
| 50.775889
| 0.686928
| 0.473122
| 0
| 0.709571
| 0
| 0
| 0.055975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.09571
| null | null | 0.115512
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c2280d177161e424dba3fec6eb613011d70c05c
| 230
|
py
|
Python
|
clientes/models.py
|
alantinoco/django-crmsmart
|
f8bd3404e0dfdf4a2976ec8bbdaee27a012f9981
|
[
"MIT"
] | null | null | null |
clientes/models.py
|
alantinoco/django-crmsmart
|
f8bd3404e0dfdf4a2976ec8bbdaee27a012f9981
|
[
"MIT"
] | null | null | null |
clientes/models.py
|
alantinoco/django-crmsmart
|
f8bd3404e0dfdf4a2976ec8bbdaee27a012f9981
|
[
"MIT"
] | null | null | null |
from django.contrib import auth
from django.contrib.auth import authenticate
from django.db import models
from django.contrib.auth.models import User
from django.db.models.fields import DateTimeField
from escola.models import *
| 25.555556
| 49
| 0.834783
| 34
| 230
| 5.647059
| 0.352941
| 0.260417
| 0.265625
| 0.21875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113043
| 230
| 8
| 50
| 28.75
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1c44bc91b5ba84c01e22b26aeee62ac21ef1981b
| 59,408
|
py
|
Python
|
BD09.py
|
bdjahied/BD09
|
1cf50087a839b35261311cd95a9725e365ff1b90
|
[
"Unlicense"
] | null | null | null |
BD09.py
|
bdjahied/BD09
|
1cf50087a839b35261311cd95a9725e365ff1b90
|
[
"Unlicense"
] | null | null | null |
BD09.py
|
bdjahied/BD09
|
1cf50087a839b35261311cd95a9725e365ff1b90
|
[
"Unlicense"
] | null | null | null |
ó
8N6ac @ sƒ d Z d „ Z e e j ƒ Z e d d d d d d d d d e f d
d d d
d d d f f d d d d f f ƒ e _ e ƒ d S( sh‡
CODED BY BD JAHIED
FACEBOOK : BD JAHIED
CONTACT : https://www.facebook.com/jahied.bau
hunterboy = ['+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
'+++++++++++++++++++++++++++++++++++',
'-----------------------',
'&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&',
'++++++++++++++++++++++++++++++++'
]
exec("".join([chr(len(i)) for i in hunterboy]))
c C s | S( N( ( t x( ( s 3.pyt <lambda>ð t i i i@ s^ y9 e d ƒ j e d ƒ j e d ƒ j d ƒ ƒ ƒ d UWn e k
rY Z e e ƒ GHn Xd St marshalt zlibt base64s ^ eJztvV2TdE12ntUaSbZltsoCGjBkAobyzq7dWd3Y4sNAAIHBEBxxYIITR4CR9QySHPow6lEEOubYB/4XPuIvwjPdzosrq97n1bwfvDM5cc/TsaametfemWut614rc+/q+d2Hf/Gfv/9Pf/TwX3/+77e/88//8sOnh4dPv/Lw6UcPn3714dOvPXz69YdPf+nh0+f3/8rDp994+PRXHz79Sw+ftodPv/l+5On9gM/H/7WHT7/18Olffj/mX3n/7L/68Onx/c1/7f31v/5+hl8fB3w+/t94v8pvvZ/qN9+v9dffz/Cj99/+xvunPh/zbz58+rfeP1IePtX3Nz/e+fzi3/7yOf+d98P+3Xf718dEfvP9sL/xbj+P+d/75mf4998POL8P5lfHeX40Xn8+4d98+LQ/fGqzfXr4dHkf/PFun9/tx08fL/b3wz5efH7zqne293c+jjzpIx9v7uNTH2e++ez53b7o9eu7vY6zMbaiYw6NbdeYP+zjOP/x/lmu9Xo3l7/w6h9n+5gjx5y+4DHPnXF2XdHjKe/nbO+/xT6920NHPg/b7n729yuW8fMR9OPd9ne7v9tn2Ud96tD7r+MqnOdFr1/f7fXdnsZ5PsbPMadx5mOc/Gl4iZn2cWQbAe1jIh5tU4YcY2xP7wc/Km8ff+nsx7xeht945/UXxn5phOWXwn5pdr8IufEd8+qXbEZh5xfNhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRVt2FnXhp2wE3bCzoo27Kxrw07YCTthZ0Ubdta1YSfshJ2ws6INO+vasBN2wk7YWdGGnXVt2Ak7YSfsrGjDzro27ISdsBN2VrRhZ10bdsJO2Ak7K9qws64NO2En7ISdFW3YWdeGnbATdsLOijbsrGvDTtgJO2FnRRt21rVhJ+yEnbCzog0769qwE3bCTthZ0YaddW3YCTthJ+ysaMPOujbshJ2wE3ZWtGFnXRt2wk7YCTsr2rCzrg07YSfshJ0VbdhZ14adsBN2ws6KNuysa8NO2Ak7YWdFG3bWtWEn7ISdsLOiDTvr2rATdsJO2FnRhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRVt2FnXhp2wE3bCzoo27Kxrw07YCTthZ0Ubdta1YSfshJ2ws6INO+vasBN2wk7YWdGGnXVt2Ak7YSfsrGjDzro27ISdsBN2VrRhZ10bdsJO2Ak7K9qws64NO2En7ISdFW3YWdeGnbATdsLOijbsrGvDTtgJO2FnRRt21rVhJ+yEnbCzog0769qwE3bCTthZ0YaddW3YCTthJ+ysaMPOujbshJ2wE3ZWtGFnXRt2wk7YCTsr2rCzrg07YSfshJ0VbdhZ14adsBN2ws6KNuysa8NO2Ak7YWdFG3bWtWEn7ISdsLOiDTvr2rATdsJO2FnRhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRVt2FnXhp2wE3bCzoo27Kxrw07YCTthZ0Ubdta1YSfshJ2ws6INO+vasBN2wk7YWdGGnXVt2Ak7YSfsrGjDzro27ISdsBN2VrRhZ10bdsJO2Ak7K9qws64NO2En7ISdFW3YWdeGnbATdsLOijbsrGvDTtgJO2FnRRt21rVhJ+yEnbCzog0769qwE3bCTthZ0YaddW3YCTthJ+ysaMPOujbshJ2wE3ZWtGFnXRt2wk7YCTsr2rCzrg07YSfshJ0VbdhZ14adsBN2ws6KNuysa8NO2Ak7YWdFG3bWtWEn7ISdsLOiDTvr2rATdsJO2FnRhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRVt2FnXhp2wE3bCzoo27Kxrw07YCTthZ0Ubdta1YSfshJ2ws6INO+vasBN2wk7YWdGGnXVt2Ak7YSfsrGjDzro27ISdsBN2VrRhZ10bdsJO2Ak7K9qws64NO2En7ISdFW3YWdeGnbATdsLOijbsrGvDTtgJO2FnRRt21rVhJ+yEnbCzog0769qwE3bCTthZ0YaddW3YCTthJ+ysaMPOujbshJ2wE3ZWtGFnXRt2wk7YCTsr2rCzrg07YSfshJ0VbdhZ14adsBN2ws6KNuysa8NO2Ak7YWdFG3bWtWEn7ISdsLOiDTvr2rATdsJO2FnRhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRVt2FnXhp2wE3bCzoo27Kxrw07YCTthZ0Ubdta1YSfshJ2ws6INO+vasBN2wk7YWdGGnXVt2Ak7YSfsrGjDzro27ISdsBN2VrRhZ10bdsJO2Ak7K9qws64NO2En7ISdFW3YWdeGnbATdsLOijbsrGvDTtgJO2FnRRt21rVhJ+yEnbCzog0769qwE3bCTthZ0YaddW3YCTthJ+ysaMPOujbshJ2wE3ZWtGFnXRt2wk7YCTsr2rCzrg07YSfshJ0VbdhZ14adsBN2ws6KNuysa8NO2Ak7YWdFG3bWtWEn7ISdsLOiDTvr2rATdsJO2FnRhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRXtLz07/T2ON7Nr8zEc+fzw6Rhx395d9PJuz+/2Va6r4/39/ePt/fjj/Twfrz+/f31/5/H9yOPdPuuzfVyrvb/T3o//GNLHbz/G8DGqz69P7++X92P6+4vTOHMfZ/7Kq3gk3/1sx/DDl6714YenkVc3r7tef5ykjnc+ItLGaK/vB7/It0/DflzrS7/9mk/Vb/6pm/c/7PGF489fe/XLt/rU9zvTr7nWDzDT/Qec6ddc6weY6ZfO9vVj/ppPfV9j/kU7z9d445vSWkcK9fdffWjazZHnb5Vp9Ws/9TXjLEN1X74wHo5sI2O/Rve+xrdfysNvOtOv90/5Vvz+7GP41llURxndvtbPX5/P23fIxmPupvbRAzR1Dmc1Hh/O3NRdvNyd4Uk18aJu5Dr6kC8dT5dy1rX6eHGMmvulz/b3Ay7jum3k/+v7Z6+jZ+CE+/sBpzHCQzP6sPW9T/g4T33/LVlxGdcCsfP76+u7fdT5L2pXPs7zOo7pY/CXcc6iq5QR9zb4+qCSvstnPmsMHwcc45iqEx7DsXXEkYFtI1Wwz+NsfTBexhyPkQOvyoc2fFjH630cf+PD87jER99bx/k3ZexV+XAa56+jc6MJbONIRlVH/vQx3zpyYBsfPOYEKxrVptG+jNQqc5pdBu/EiKpKrl5FzTYiTv7gK7LuMuZChjyP6x7DpZvsk17v8/jLGN6hwT+PWeO9kw47xm+bXHQMWp/l8DLGz/uw46sQzbOO3IevyBD3PPXu+LOOfxlHHnPgivx8HR5mdfA4fosOFE3BkUXzfWZ+HseMUAyv2p6VTif5toxjGA/X6mPkF6n30+z8OuJ7UU6e5LdNI+z6bdFcquJ7VhBf9MFNsSvzTKnylzmvus7MZ7t86ysWeQBpOvTmvR8OracA1pl8nTP/mPPNcanjs3WcHydTVprOVuZ5Iae7hm1k9rlSM/I6j7Pf0XrMKofqOhbHPKrrnYIdyt7z7MmP89CbPcm3x5wtsHPWfLdxRYS0aBaXEXFGuynb9+HtG3l50ixe7/LH8lJFk/Pn47dn8XtRRXvRyKuuu6vC7rOvjrkqXcY5PSroJpe4YteMqpTqqspYRziIyJNG+6SUw9WHIkgfso83UZirZrGJqSq6n0baM+td+eAa2lWJOANNF74qc273eYToFdlLhd3GO1XvNA31xue43dx1RaEp7lZ1asTryMlj7lLs8KehtLtqdFVXz8F0MuSVWWg6QxlZ6lwqWi8w/q5q/iSdpxtsUmPUsqlvvEqdyIEyfkumdY3zmGtNE7/P6iLI7Tr29KhoH11Kl9qUOeLMpc2d7SZP0qtbmW90tWmBTH23xT8vUonrSCd0AGpc3dpcoXbpQ1EPvI0jGX/RmemEWcc9y2/PY2w0G65K9I1VPdU+VhBdpCCVzrGPGdUR36IIluE0OgQrT5eCnTWLXddy9Mvcg7nPqYoFenXSGOjrbjLKvLifISucjdQssp024yZLD60g3CK2QfqrNP+QmLxIq0/KN/pVGhhWPXWwwH7yocLn/o112UmKB2X0J8zlplLs4u5Q9FH4Y9Yr74TTbLtvwW+bzuyxverjVKWutVWZ+xl31HU+P2dAb9tYJ5a7n0Pvvw4lrHMIirqXPjvQClC+qoJflQN+/0Vxd7dwVoy6auXNmds483V+n5yBhT4y/CyF7/MZ0MYvvc+ZsfBLd+QzX8YZynxm3kdqHucFFKTvY9bEDg/4zDfvXzTrPruxjHxzX31/5ldFze8XRapLl7jn5TN/Tdxv3q+zV6loruk3Y/7KuPM+2khWuxa71jzeRedr8oFZn+czN+15MqP7M38pHzZFCsrwJ+T+hXG/eb+pfzDCXPd8d+Yvxd3a+zzTGh2IDkQHogPRgehAdCA6EB2IDkQHogPRgehAdCA6EB2IDkQHogPRgehAdCA6EB2IDkQHogPRgehAdCA6EB2IDkQHogPRge9dB7b3S1zHRD4yn5hexjHkSVGsL7N6fJx5H9804cl5EPO3L3dN7VXR6eNFETvH/K3GLkLxMLE467rkdtWz2X0Q1MbTvEW+NbPMdx9eIrGfB5tcpes8T/rUx9TquJYjuOt7DW3M9DyOPwanmz7b5b3ryBlGexnXxdsnKRvR/HDyxxnwwEl0kIGM/zpsHU8XH5opUahjFl0jLIpX1ayPWcfItBflG1cpwydF77woD8lAsoV8O0ZWULM+LCPZxq+Izoc+XEcsuvTtGBwxqjb8vw3NufeMo1zH0/iH/LyN8VR9h6jrWzC8v41RNT0536VIQHSShzeJ25PS0pm8qVrdU7+PbCGmdQwDTzZlAnmLJsD7ru9pAloVg232A4p36OeqOXbF0WpwzPlGtrzq6s9Dn8nzY3iVSsH34/BYV3yf5IfnuUZXKdVNZIlIUxbBV1E13DWjojn6u8YoM7p61hwPXf2sjCXDyX++11DGCM+qfWhUV17h+U2Hdam9/U/moG9txBEtusz+IXuvqqpmFj3cB1n0TmgpFYSRP+n4Os5vyqyTN9lId3eft8csg7ga7booXl1xrFKAF8kjfyPiZrTgYD9UVbEy/8UkvtHT5+6laF6v4p0Ycd3zzDuafJZW0LED9UkZeGjwXJH029SH9JGNZrZqbFTDMkeZ9GsjUmWWrItytY8JknX81qNiLnQRWFY0eBvtqsoNWL7omNdRbfFnv9NSykSZv015yBvbnKUo86aIbOPNOo/woirmzNlnPzAeVnysO653/c9FsB9zN3iMLK3qEMjVqo8cYxgQQbpaLdHhoppusop6p5uO7qzjL3LpWVWmzud0z0bOUH3o09A0d19lzmr6tz77v8987aNXr+N4mKIrw6tkLC7qmgjZjkrwq6Y6ywjdTtB94eebmfbxY3075ryiAyH/8aG14nXu68qonnUcw9X9t3SeRBO/LaNaOY67NK0OT7q7sFz751DnWUetbzpzGWM4lEVVf5ePyF7UGxyKICrXFa9DPDZl9TFmfREpiAafaopy1xngFN1AgsicTT48C7qrqj9lCJWmOtc5P1GbKqa61g5kKV2uxZwLnUc9oge7zr49lHtFtdtD7WP6YE727mNUN6QTI3L+chfxqtNump1Zhi80E2k61I27PyzzehN9oDbhoqK/q0Nl31ShqPhdzZIzhDwnInXwwkx5BwrIkzaig8ee5YeXEV8yk1Qp6rcZG94gsZ2HXkcjO005QFdAVleta/D5rqzbZ/kq+rYsGVsUaHArY3ZPamNQzq7VXJElx04zKV0Kz5g3Zf6mzKT6OOcZw0V6SEcEVq/qDDcdc1XD0DT+Z2UXGdvUUZPnrh10aOgzrUXVvMpQvE0jLLOTmxA7NAz65Mev6vQOXbRJl6DpJD8c45g2VLSMUdW5R6rSvVflfxthJb0f5dWTor8pK/yN76q50P9U7TCgkC+K8kWjahqMc2AXO+z/1Hkvjn2tKjqoyDcuJaZ0CBSg55HJzH1X31jm+aIGfe6O+tATuHDf6x7jpo5X+eSiczax8yRUmwbAnvOhnESuH5XVqGWb1eZQpIr830e2o8/1ri6f5361jpnWUf3LoMydP90X+VZG/pB1uNd7ksecuu7ZLsphR6foWmXeHSq6R1PmMbOCKFqTNv0UTbNpt7YOz7tzdoV90Xi4IinqlXudr7irupH5F035prJATVOmXVStLkon8qGN37If0rSnhIIdQzmJF3UWgtCZpjVynYN7uQvQMceU5mFXhiNcfY7Ui3yyqxtBB25q3zGOQcmdSwhsU+I11TVYZr5lKPaTqufrODNjvuqK7MPTRrKacJ7f6Paj8oF3jpEMLyroVe7alZ8XsfAk/Sz64KPmhdahZk25vSuyCAVdzVmzKONaTXuDXXWwag+Nfrgpyo+6BFMje1128TA16KTcgA46RrKIVvNFWdRGBaTjugx/MkJUxQ1AHXnOnWs8f5FvgbcpXQnfWY1KnXPbGktXc8yd/I1/qvIQMSRjOY8n1ebr0kXj/Ef1SC/y3knvnzTmLo6YI93Fzf1i+joHnX2AMmfaTZkoKojWUih4VJ/janiWml0VrzLHi8hW8dsGOy705AnnrMo3+sbLnJM3OdMUR0bY5FIUiW6TYw71SPCLeni9s8ufVT6Ba7T6aVwRpl50dcoEdHMvqUoT+qx453GGrhadKsDKjsLxovxx5e0jstSmPsbZ553Y812e7JpCk+7RoSFKrKkZiTOQfgCP4c82+5zVKGf2jt9JnjQRNLdoIx7js96hsp4fc+bjf+9SupS8aJxlRJ8uqKgT2AfvVfd0fK0ilUYHujKTrOAdemCrVtW6hpy5aCTsBlBxWOEe0hz0jfKBCpHzXbXvK+s+GlvlYeJIv13kSQi66eXMXZH3UPJD42/qB57lMXSmiGWz0+bdyKsoq4rdddQ1929EgbUSgye+roltnumzKs4h9bhq1idpVNXrm7V8mVl2V4BuENzriH7RKpWR0EeVOXsZZ/uqHKObPfS3MemLrqprJG2d4+t++KoKcpbnvV67DsVDD33vyf25nyIo8rb3ai7K+W1WNsLtqBVN0HvU/huYr4rXs3zuDp+CTlyelVe+T9SV7dRuRovfDkWW6Nt7L6qqVrkn5fDr3PuhZs/qFqp4qeKxKSI80dG0ioeRfUwc6/4NZrsqDmHluhxvdSJ2fa7jm6rzi2qZ7w96rXcdudFEJZrG/pJXSTCC34jmoTHf3NHmDN792+UBtNriTKw9tquOpDiSsYcyiv7K3R2afxLXN39psw4fthGdfnc2KtGuM7+M6bc7P5QRx4tYQ43rfH/2pE+5XjRRRheE2pwUgpP69mcdeYyZcsKzji/KkG2c6pj/LnSVlt7UTecq5FZ1Wb6n2ZWHkEKlptNoI0+KFKOovjsWmyJ4URQeFaNtjN87wFSENrxR9P/1UHQJmNrk87N84m7BOwmbZsdrKDiNkaP/F/nhZuTMnQG3ebehqFZeR9SAl06eM1zVdTCvqjOQLUV+Zu1T7taDVArnW5UH2HUkKzb5ljrL2NzFXTTam9V3UY/UlGPH3BleVTGv47dFpDDrovSo4oiqWocIcFe3fFWlvo7ksfIUgcAsqtih9z7mrKjDq8dg56xPIVyHhsc73nvv8m2TLp01KmSnjprrput5fIpcavLS650Dzxo/UkCM8MNNVb2KAu5MWUYYAytEXpMtjNa1m/p1zFFwF4euohVP8jZVsggZaiJP97lrKtoTaFKVPsaD04quyw9jplfhmXwEgfU+FbYpE1wNNwXO3zt4UuBepbSOKedkbGgyOnC6I8hP4Gxz1TAdjiYZy8oIbbTye7fWaVZU3dqcb2RF06dQWihjnHVWzq4awZ2aJrjcWaEJT+pDLvLwNvda5J73weDdFXxXB8516anaXDXQYdYy1pabdfoxZ86T3ve9Id9r4FsMVJ+bp0TAxPfifbf6OuuJRbLP+5OHjncFPOQxnl0k52+enoJE9LwoG8l2WIB3r5XoXuqsQrs6nKqInHVa1raMvA+foyGvitF1DJIlMKWW8b/Kb1RPpMnP5bIPj2I/62xtxKhqCtDHaguOfJfwKv+woq/zfsWmrpjVUFXXR6F8Ug5Qcfrwnr3NSpa4EG44Zd0KU0U60+STOsLBeAwp3RSdCbvxZeT2rvFcRwZawcgEr3BdEVhD1bufou696vxVe5VFdJA2XU0CvVDXk7rEztnInXp3Xzf9RpWaber3nvSOR74pK2DK0efqh/zA82/owHnOwDpSwvfRvOrc1RlehNKuK26KF+nX1B+2oaLm2r1cFwtnjd/9gFe+m+ZCX1eV4eSqn6LvYoeaiA743sohr1qTzdRVPqkaYZEPLxKrJ2VRG+909dVkIGcgLnQIqHSTB2gbPB4ixT0ydzI3Fbzqs65uz4qs9RP1K/OOKBWZUT0qQwiBqxX9dlW+7VoR0El2HeZaYwWryhDf+6N7oU9uuvNIWaH6V61xyCtrL894V8XIlb3cdRplpCLvIwi+m8BOcpca+LpNUSCC3HUiIohSGflDP1CkeIy5iYIu75VxuTK83eVVaiVRI6OqVP2YtetlztKqeoQf2mCqKFL+CDzuypyuObZRrVBU9mEOxYJ50Uuwo+5mg08dwuHQ3d6u0bofgHH2XlCkZ83IJfhRcSeTm0bS5bcupe2yZKabH1ZqAMvrprUVSu66QNmiV6FnK7NXmzLBO9hFOr/p+Dpm3TVrouP96qrh+dkY7293Vbei3pvOFgG5Kpe66hSVFD2hEh13d2CPea9mH0ei9ruya5MrzkrmrsS4zhelj0KZj7kH9h15Z/V5HFDH/98ra+Gi3W+06zpnO1cvikKTdhUd4Cd7eaqTq9ycuUqCoJhaVsZonzXZJu9dNUf2yqr6B9ffovssXcM4pJw33SNzr1pX3uSw1ftRs34Z79S73Q/eJ9z0LShn1VqmqdO+6bKaVNc99qEzk9gkGM87ucZBH35rcgUj3wXgplHRe5AMh1SX+Lq74zV9F/yexBS1tav+0mWdVcGtdeg8ClC1L8FV+lw7+BR086Ro1xoNjxHTpjVXVZdIVS2j8tav2k+jvB7zjpNrx67rvkqEn4QGNa7OyND7EZebFSWq28X7Vf7neWNaL1ZwRbmEAribZX19jFi8zMpAfT+pm73IOe45edoTFaoawzF+S/U/zeNEMfqIOB5j3+MY3u764ZxccVNG0QcW1YKiHEPxdmWdqx4rUyovzW2d7+I1xaioCtBhNtUacolVLSF+lh/aGHnXXtChjgIt9TcFYOEmTF0j9932Ki+RS+gJd0ncPFTV0z6ziaw1feObknRSBD0LF9kujzXlZNcU7J/7VXBX3UEtqWVFI2fPE97REPT2RjmL7rR2fbDrCbSuA8p8D6jpeKIP10/SwOtdWWRlfZPP9MO+J4hmos9Nd8qq2hK6ozYysM2VhbuWx1zHq3KPbKSW+dnOKr0qyjRHZ5czedaozj2qn6PAY1z9USN/UQ5c1SRfhye9r1Lm5zkP3V/b5U92j9G6Q2sB79tc9I7vcJVBAZ6vqhr7fF1rONHhDrs/yyw2ZSDZSELSsTfVaDSWyBbNoorcrppF++Gnm3YlFWxSH0HPnXDXnYU6f++pjo8c81x2HXOoAzlUiVzx0SVUgpM3xbfqm303NZROEoq5YwJ9frKRtRXScZ3n2NWNQI398DrmSJayemKEZ50N3ajqr3xnqum5iDrfl/Feyk29poZuczjoQq14XlGyGqq6f4HHkLWqe1JkeFG34CdtvEuDvu16tvlQRhXdl69SQsbT5LdHZYWfnejDS02lv+iz1IXHobFwxzTh7lmj3dSdviqLXoetym335FYVdO80frvPay52ftATakrTJeD68Y73QxniHuksduio6dA2xaiN4y31VXGBKY/K+xVe7/vpCH5Qoec57o4Fa6vriLi7uy7lhLVXqVlRpOoctUMzhVZ/x4EVU5tHtY0YUQu8V+adsSbXcUX/to49Lnotzo+yVWUX6sRTf9S489z/oGzuNB5HzsAOjR89BpXaKoTksvNctCrHe5so4P4R0sfODPrQVMtQEjSk6orEtM7P27QxI7KuaI44hzrbVW378C39ZB8H0KvQe6AM6N6N8tDD+KI3HZGfmmijrvEOzydQj3axWTR+ahYEsbtS5mc8bnpplhXete5jbNRZU3NSjK4iArW/z39Hgd4eJOsYid/nuXHm63Acistp1kPUAG3v6vPJAdQbebf/yX+usimy1tirznDRZ7n/BYOvig780sA8KV6bvHTfuW0aj3X1oizqes0zgaYSzK+qek20Pg9vW+0tDkV/e6RpjhwDR+SANfAYOJsdjqf7IlcPjb+MWXRpRZkVlY7FHVSbufBzL8+qNTd7pEy/qI86NIs+x4hdCzcbqF/T3xagSPneX9V65NB1zeMmRe3qc3z//XXM95i7lGdZ7upC3HV8tqrfO+YCehLddT6PcTZr+IQnb+HiUA/p/cybNfWNetxUJWrTZY4y/W2TM3dxt9/1gX5+z9+k2KVIrAIOrSWdG8yRXfo6d3RX9bcvIoiKCRFdDdWr/HxRXqHAvE/a4Leqq6DGRX8zkFE1MXWS3nbhT0ZBTb/Lz0Mai8Ls6u7whp91pEMg69hFYU1NLSvq5eiU/CSGM6HNM6WvuKgHaFIemtiiWnajon1ewxax0BUR/MBTdtBUdCGqOR72qA6F4KRr+XsEJ+Wq11w3fw0A1YUCaivrCI+fTsMsEyP3TvSNfoaTZxfpH2i26Q1Y29KtVXm+Dle3eVTchef8V9WFLg8f+hR7aOTSdehPUw575V7G+XfF9DxCQ6W+0Uk04VlzLHLdq4JLrFHs445Kqj+NUFNt5TB0w50eayh8flVZfxUXu/Kf+B4aQ5dOHvp+XNN6Z5+r6uUuFnVkb1VHXaT/zLcqG2++MV3mOynXQUHVtdC6qtm5T96k5L5KHREkXs6TqtnRU6EqfJeHGLGKofrQcRXVPncyZDKqSI5dZ1lD4dEZ1kq0Fm0kiaN/0m8PadRVHrtRAGsdI3xRbjMS1LiN46k4V0XBdaFLYZoUz+v6F42wqMJSrZr6OojepaisgxBAr8V8Nwo2URs6eWpBFRFPd6PdNBFyqc+1kvxvMwvu1Xn6vWgdAVk393C7okBl5DmBrrrQlZZlzJdIUVvPmgV7I1XJQK0p8jBdwXGXsfQtTZ3YNqLJLOjEzFEdNWtTDtAJkMk8Teo+vOs7BUTWYHqntNyN/3p3laviUlVN8D8x8p42CkBXBmXkwEf+0L8VPS9N9vJcHDE6lBtl+Iqi4GcsWf3tyh+6L/ck1H1LN9lFxcd797aMq/v8rsg3kT1E07Py09/LOObPdukbud0U6zIczmhfNf4nEVTUh9C4Np3kmGfUdZV7drYRR6jhTlzR3Yo67xaWEZ0bTW6aKZ5Efxxxf/eW1Tes8WRI01MrZZyEOyPUUJ7H5uqQhQI36cDNzsONb4uyy80GGtjlgTYT5Oc3Xmd9o3N4HaPqGiSz26Un3G1h1t435hmhQ1p6zPpALNqYyFVX97PuuzoN7qvCpqvMi/S2aaZnefKqIw8leZfnH+d07fO+NMweUiEqaVO/7dk1Rcp3Ew4pp3fYNvUbHm2fV3bkG7t2xNrfC7DanOQfXqPAj+Ns6OFZzO4jUgga6u2AoplnzYL9EzpAfyuKRst3DZyHyAgS0fUUyqGqSoF4VIghhTmW4ZND56zqclHmR/mKTgkYubvUR63x7jpX2bVKPdTJUNMdu+c5i5gC16L5fJLmW/38DECRc7pUmrtCeP5RI6cQtDv/U/2vUsjrIKLpol1kQSs5hs9f5AFWwY7Coat0ecY+ZFRoCPWab/fXoU4XfepRH2c3m9pRhv9J0SfN8SrP93F+qsymfGjqq31/lol0VSvOeZXfXjWYIo42+d9rKHbtvONH/8zeWptP65UgqgtBKAD0se5rGhi9R5UfrFd1ztWbuzA8S0nfVWel5UJ+Dp/i0vVUBm2Gn39zx3jIJ1X5RujJVTqQrhCjV214tWgNfuhId1nUDnalumLaZlLw3k236d1dYnfT/7CjhR+8f3tWPlPpuJZ7+/2OIEpbV6SsCfQkfM8FsuhAGA8kuhMAcyoXi46btU9V79T0DCpzP3R1R8R30Hh+A4VhnUi8jvEO6UHmw767Pu+K+CmIQ57kySLUclc+o7TEaxtj6/ISXQq5UXShou+wFCkA/Zv7SSuSGyrfoatSyyLNwat9XPpRUdtEXxsZ5Qy5qftAUfTcF/5nlUQ3crMOpWPx98Xor4rux6G97MiZozIrMG6sY2onjep0V+9eVUk3Kdgmn58VO6oA+gMp6EzXCG9W6FUraM7Prm8RfT4/vLMSafN9k6vm4siCdle9gH26Qdejqh7jUKT6rPnU0K45Hho21NBJ+s4Cfak55WxlXhvSyvL6Mnd0VrwbLboqS4vO6R1FZwv9HiXy+as6wDI/80NFrnN94U4EGXJVjnH3qmrVdugMqGKV35o6AT8HyB23Ov9lP+9wFkWwqP/nuwNl/k66PwjjHgb9AITSkzCeJm9TEzeNjXsiqCsUNP0cc2WBly5tJP/ZVym6q0iHAy9V/LqGshd60YzYX6rKDeJyKKNexak78KqRsAPTx+vryEyy0fd9zuNg6wBEmP1X4cB+Gop00nWpU4cYJ+LXYWGTjr3N1G+KY5NKe/+BDvxmtNcxBbqmrh/v9V3H3OlI6S0pfG1clx6mjKvQdbDKKzPXqCK66op2kq3KkEPzPc25cdMdMTUU4Kos8hpkUwRRbDKBkTBCdleafGJ1qspe1yw6FqufueOJqT43zNx9qLpHTB/Fji45c9I4eTqrqodE+dGorjEz2kPHW0lcHchb9GcbKtdHPl/myNZRJrYxTnryJ03fRMOs7/UwBuaLkLKz1HXXpsx9vufl1RwlBiqr4vikGXlH2jt17iW82tqHT1Cn6+x5j9YVoamHp/Oh+qMhzA41exz4uIc8xgdZKTBfstGZgLc3XfesKv+qY7r0qqmn3VXlX5QPNIScDS3CD7vygd3pOp/hqlpzvcsEJtjVMTapFrnEHkvRXTnwYb+iqRYzR1ZhRJ8em9E6K3juCGZ3jfDQ8U+al/8ey0mJwf3KQz4nglgXbqo5q/4yrv4ibfF6hHWE+8mi+ULH6xgPvnWf70p6Vdpwj6DMq2zWwjf3GlgFcN1D9dfPojRpxUWj7dIi1OaYKaADP+a8etIA/Lzoy5gm9W5Xx97Vd3nfpo1RdUFBtpC3lA8+UvTUZVHIitbaRc/pda0g6EnOmt1ZOVwV5aZMq0ohMg0G7/c0Tsqxy10PjA67w6T6sG9c5ucu0P9DOUOX4l1oMp9k2BQO6iwdKbuCXc8U3XSeh86zKRzEsc6djLOr3vU5joXVD6bqrA+nufej86HmMt86K8NN4Pjmy82qgfnSX91oDn7u+p/e0UIxuugj7lT/Nh/Dcy997uiuUiQ8Ri3z+o5aUMcBXRmyzYIGiX3MtEptbj5Lz8Zo3WM3KclJl9j0WXyCx9Be/Oy62eQxkpAMfNZpL1ID9KrP9cvrYnrmcvfkYdceQpvZqdKQonF6f8+rDCsA+7RF983Jf1fJok7S+3Uv8zA4J2slNLkrcH3+LPWUu3Jl7kJZvBxzT0jng/8hqw5lppPkDEXdQlVfut/1/E0ecIbUkY34yi3NqxjZFM1dQb/pvekl0Bk43XStIv84z712Q/ORvl1UdjHC3YR+lw+UmJtVNoE4yedef5k4unHaEp5DQHmucin1F4nw0/v+jsZVBFFlqNTsxlAZnVd0p+TGNnxLd1Gk503cNUWQe7XkA4lNM3BVNr4q/91F0Dk36aQ7UpeGMk5C33vMGe4VH0+xVtUj4s4uCkTQXdDtXKQPL6osu7rZoqv0EV+i4w7zKn3Dt111H6KZr5WZ9y/KZDQBdp6Umd4/ocg23ZEknze9f4ysuBlJHUQw68tddtH3oidd4+eeZpEmF90LQ7X83eGT9KqP0Xr6LjFF+4276tGh5CHHTCJP8qNdvofyIr785OchPXmc/d/U25TB/nUMwLy45atz9rpLwb1NNRrvveo8FDt3Na5ETfemWS+wh0Bm8sTRPusP3TUxtWe6NLAqiwgE2n6MMRA79hmK/qpzkTLA1zbiy6wZapW7qjpYsvTxLh+6fOtc8n4493Ob6jJ13/fQu35MN3/BhipT9VtqZdfTC4eyHRUt+ux1ZAhjOCm1ds33qnrdBIV3fdExVr70223E91n+8b2Gk6LT5DEmiA4/jlHhcPcJ9GN9rh04lp7hdcyIWsZ93mPmtCnn693xVrwi7zX5hD1eVKWKlKbzmH1Yg1PS+9Bnj5Gi3Fdy113V8xBT7zvRk9ehukUq96TMP3TkVdHZRKVz+CyythF9qIH3pk+1OWe6ZuRe7iw/2+fcYYSypjnaw5ty76xjsIe6AioL6kQ/86wj211ebcOrVfe1q74rTWTpwDlnUVajYGiUSeHeGR3jo1TX+8knOdk9Z5H+sIK4mUubxwPyh07YlZ/eo0OjbirmfhfxJ3FxVuB4AhkvsWvR9b2bPo/nOkbb7zKHKokuFT2zRNyv+tShXGUXep9jh9rsyuGuK9L3+nmnXVWPp6TK/MwAJa+ogrv+HvNzREXv09tUVXOU8HqXS1XjxG/Mws+3uNbv4qLPOXDzW/eHm3xVRxdklqm/qDdnYA1Sx0Wf5Ft6fnLM3yi0A6+KqXskPzfuqs39X868aeKoPYHbdHU/E8JHqtKmzvewTjoS/S/zPs9VALIycot4jPFvmpE7Lldt72xsgrpInbp49FW4j0DWPYoRhoT4XDU1dA/tOoZPumoi3fV19hvvuOe8jMtRH1Ghm9q6jU+ROcgIGVjVCxWd55hrHJ2J/zIPpNS582GdaM0n7lyL+nKVAmxqHm56KjKT1X1T4rmXYJrsX9UBGqR37ZMc85PPXTlTx5GMitrapfb403vRXVVsk5471jxfZB3DJ1XZUod/ygjBLm+TRVgypIyJFGkXFQSNZX2K2vQ5K8jYplgUedj7sZt80kbUis7Z9ERHG1Syt1BGfK2NiI/LPZ/ivl7TkxicrepahzKwynubeif3SE/yEnsFRX/pkcypd3WEtLRCNuV5ufs7J1QBUg6NRfFQyPOIaZnXcV3ZdWji2zjgpm/0ytc0OTo3q+OmKllUyLpqE//Tpz10fJd6X5Vd5CqVwiNnR5pejqfRmpxMj+dveRN38meTnpQ5Fnz2WQO+p/vQrzaduc6fIvE8Fz8jus+q3vQNiKLdAFTC3uZJkkMSgWc4M1n6NN7Hw1Yk31+gjhR1hkXV30fCO7GwcyCI/ZabyJ7V7oI8EWen2nWQGKHPTYrtkBV1/lURJ+7sHaG91JE6rzoPOd9q7Gfmi/ph7j4zmCqFqcr/Kq7dlVVNh+yivp9F3Enq+qpq4qdw6xhV1Z0XBMfr36L4thHBqr+/R3T2kQ9NlQJH8RwpmXxSrH0Pnae5XB2OmVDuQ/Xh7Wdd/aL3+xgzGXIoB4jmPvfStG113nu5Do7gtImOqjrY5jwvqtRA4W8M1eEZKhFShnQ86rfoKkfu8tsmZXgZHu4arfO5q1RRE8nqNph158BMoeYk/x+SX1S9K1uqfOh6V/VDmdiVk2R+V2pxzqYejH4YteQM1mogtVoWaTL3v455D/9Rr4uU4arzHMNLeGZT9Hl+kpPcPGlQ5t1Rf/vMVf4iHag65qTx9LseiWJEh1DkSVa41gq6hdcxX1h7Gp50P8Bv25yZh6pAV7Y4D/GMu1x6YzoiEKjzHerLnF0cYwW4qtk4xtWPOSv4Rh61+KrOpIvuF32WPKSE0ZdC03V4khFepCr0mbhik4bz5Az5TGODTl7GGMpYWfj+VBvnuc9D080ImTU9YZMCWMea8sE6wzq6KfplXvV3ncQe26QkVqQuBUMn691OeFcNZZqP8ozvxJFv9EtFufeo8sEqHjoQPapklxq4Y7dKs7oh+lU5Sd1B6zzyXfWXrHZHhAZy/51dkYtmZ81pin5VH3IMPwAL46Q/uSoWL/rVs5x/HbN2haq6b8K1OBuyz7oS3lFCdK/qmBd9ijQ75rs83PsoUnhG/irivMvdhxZVdcvkFXdtDGad/z4wq8umE1IHvde9qX6xjnalLtIH95N9rh08J1BHHlKhyLEyE0FcWBf0OaOu43gyuWu+TdMnyavqbJP3KPr0A6jcPuZIe/CorKNe48nrGMChvEXVvTvB/SDi7l2jqzxJ5qBCMHWjiuyhoXJV79BNoSfOaoshJ0efeQ4EEl06m7x6KGr2P08T7fP+wKEXVedh//mmJ/Fhz/IkHkA3ml5fpD9dPtznGTU5k8gSwaYMdz11k+l+uA1vs8rGq957oSu78UyXJ1+UctxRIn9QYCojzwEWPSNBJuAc+kPf73bN8hM7dSgMc7RP6PHI1abej5x0ramqzlVRq1pLommv0hD2JYpqBz7xChSlpS4fQzGqnman07hKyVm9AoKrEh3RIV/hhybi+tyZULvpeZruUh1qRKHskGZyR8a1g7CivfTMj6LsGCMkRhfVqaaR7HfXLXPekr3Pd1rtdeJNf9KksW1o1FUKCa1ljJCeCoF6lK/cye8i91mDf9J4mqJDTqI/fq5702+tNhfl0lWab2X2Xr2/k3toddCkxif5kL+p4g6TmV4VRxK16qnIJj1Hu3h+gPFv47pNfKGuXoVRDfev2v3AA0UUVK1/GSFnrvp2Z5n7gVepEyuaQ9lSlSGH8ooutCkuiC1nQCF9T6TOdXCXYjwqUps6hGMoD/nJ0wtl/o4MtaDrrhZnM+xFyvYqCz6OL4uOY+Rt06ypniQhOrzrfdinfrFfQT5f1StuGpVLNnt0pOWma7maeMXxqGjyFCu7Ybtm4ScczvKq6/U+Z2+bs531CLXMo3Id8V3FooqzzZ+iAtIBwgKtIPnMXnodv9rUt7e56ytj5EVazUVv1llFqkWidq3EncNN53T/Q8tHRlU9t+npkIS+89IVl2flD11c06yp+K5f/KVx1ya6fVZAh3SJcbK3ANF0Mk2DhLjH2Z830ff39C/Kn0f5pyor0Ezyk3lR78zOMa+kUHvYb2PWVef3LgFFEP2kKyjyM22P14xVecjeZtM+FSMBUuZ7VZ9cRv0lRl0RdNdKXSAidY4+1/IzbEVXd7dMVld1wrxTFWV3jG2MhN7gJM94R534cvwmRrZ5lXFzDENCQ07q7jy2Y2RUnd85SfHOoqbLP3XMGqXd1UGd5Hnacl5c5WGf/Gl4+6YzZwfS9Rf9Zy0GcWgRumFfUQRRfpSzzx0a9YhUdP77O4YnncfPijDOmx5gH+9XzdQ9QJ+fJaBTLXOtrOqxXZev4mhXTXySgnlVznq/i1xyqcnDT4oXHVfVU3ZVql5EE8JupfLe8mW+Fjm561NoI8rsJ9nqyM+uVUyfR8JzGmVefbg/oV5TR7ySOqvKn0eUGS3dOMpM8pNvRLarFpADdH2HYtF0XXoA9qyoL2iUa3FV/h9yFMF9kmLwvRvU4Koo8B1eqL+q3PMd80Mh5uAubxCpOuaLhnTlJJ4sih0VBz18kT/Nl5//9Ddz0b0y1+j7jq6MnO96hpOrs2eCzviOfFN8d+XDs1zRNS+rxEW8s0eHVtAVUFPOmumzZto0Rz9/iHBR0bz+4m7pMWsRlB3asURtujKZnSh2urxCv0oV/VcpduVGGT45FOUy8urQGdyRVlXnOncgzP2qGPnjfnr2VUpL78RVuB/hdRag0ce68h6660cn5hWrd2+KeuMqPWRPqcx5wl2eQ8FtikjReIqA6vrOF8xuUmbvn5TBiNdWRfpP/jSNFm/QSvmb6VXn2VXBeZbjkDrBI/sG9N6u3awBSfJtfgeFPFSU/Xy+ny99VAa+Sp3wbRtZ5FyiWzikb3Q4+BNqIML1vWp2RG3XALriuKmWoRhlcHSd6S5zR+r7Kd5htt/Qn6651DlFuS9Whw5QNI/h+Ys6qyrP4/ymPrDdsdbmsRXl7T6UkL7laZyQ6nMd3KFsOBa6d2UdT4cWPbmErr4qe6mtN+PfNWv7h2jS421SgNN8fNH39yHI/QZ9Y5lr4q7369CZqr6drD4r39D8Ntes64hsGUp+0tzZP/Ss6/ydTbQFLx3KB8bPHLnbyFxYPfXhbSZyaHYQVPRzkoerWKia+6NqKFWjf1W3eVXGcmehzl39oe66K2eexdomZXC8eGqxa3bonjOfEdLzvI6zUX/dAfLcI/mDouIxV9hd12UMTR0RlLESb5q764v72yoVKtozOUR3USzcZVkuqnYIUQD2E7gu/TCqeGju3HlBi07yyaGaeNJ1n+VDty70GOQh392gE9vuePTzUdytOEQ6qesdAP4uRJnHQGVBSZpyjP6Wg11Pm8YG6ZBFVqDPdc6EPufPx6guYraq3+YpWSqLVzfs8RY9swHpRbUALryCwP+ud77juWkMfl2kPFWfvWouT6piZ/lkV7Uq8uQmPb/q/ss2JoI6scfS1IXWuR55n8FPZZDePuaQ3tL1wRRqdnNvC5XgdVUPVseRz5ov68EyH1mkP87DqupzuTuPV4ik9KEXrKm5217meLHXXeZ7E10j8d+H8d2KXZVrFyP0XeQwWU2GG0bkmrlcx2uobyPWXcyWu7s/nHbTGBi/a9Oh/epdVfvQ3dhD1Qfw2fdDtc7Kc+/G+InHJ3mpz789BhebPMD3lIvuGOKTMv+W+Npj1i6cf0hXGRudTNFI2K06q5fuI0O8p01X43zz0/6bogw7jPZQvF5Ud/Yx4DqvQdynXe/meJb/fbeFZ0GbrkifQ/9f53vEda71Vd8WLKOyMDaoKTr+WXYTGn6CosgzdfQ2Xf45xDgMcscKVWQk9I3s99a7FQf8um+8aBZ1Vn4qPve7UcuqHGD/pM36WdTbHKpHDIkjWdfjSTIQ4mh43H3RZzaNgX05qPETgE1nq3ONq1IDOkyysSkHila+RPnmieuun6bfchVHxPdbm6oPVQZdbXN/izL7yV70/1C3CUEn7VNdNK9NawqvlS7DFQSujrg3ObZrz7CpV+lKgDrfB6G94UgU2zljslAYV+cbbG98u6nG3e+ctBHxQ3d+3fHe6Ib/SiQd7KGcJOvo2bo0HAf62Z6rujh3hkUT8bfYrB5dVRihg442olnURx3KyTKfDZ80eeBQjpE5RIdYe/37qrwlxK4gVt2bWNNjoA884U8/WdQV47Surp5qflYtLsOZJC2CfCjEfdQ772WZShP9LG+zjnBHWrRDXrUOvek9XuSHrtkRuzLep65R34+55yFbqngsynOv05+kqPvscyhAx445FnX4BI19VWU/zWu6Q8Nu8hhX2TT9Jr7c+aAMqLS97YxFNyD0qsy/yQfUzDWiy588v80ZXMuucz/GFbtqKMGyclrfuEpV5hBxsrGI3Ne7ftX7q1YA68CL9LDJ4YeYPdQSXOYjizK5aBXctVKg327Cij5nH3f2HyU+3iujDSjzbswxvEHuFfmB/1lVfahij0q/0xiJKcY/vndAr3VIzRgzXSVcVHUpfhLb65Gb1SvEuWa9KKuvqo++A0smNK01iBf+OcZ4qEqM0/cj/L7r+FlxxDM3Z/ZTH/Xu/TafuUgbD/2WjvFnP/OjPsIauej7OFz3RTE9lFGPX/U+HdF5PjMaeP5WZ77xKtzRgTuBvyY63oHk3noZms8O0iEnfKMzo6Jl7jp8v4OOt385Ojfv48/X+cxl/qtN3+LMVd86b1KwQ3tiPvOX4s77l/nMVfpPjTjLtz/jmZt2mar+zltXLn0LWl+lAN7tjw5EB6ID0YHoQHQgOhAdiA5EB6ID0YHoQHQgOhAdiA5EB6ID0YHoQHQgOhAdiA5EB6ID0YHoQHQgOhAdiA5EB6ID0YHowPerA9uAqAzHbuO3Xcncxmf3cWSTBxjPeWTOru+wlPGp13G2Mgb2LPs4zrbfPfV9jHh9eG8bvj30hPnHGfjsVfP6yIEqBp90tjbGfAyhqMPbh57EbsMPu6ZwUYhfdPXL7IFD82U8lzEF3u+zB5rO0+bv6x3ywDHGCWv4nxm14X/7/Biqwt/PuYnIs/KES1f9VQHOU4aXiuLluHy46Obq2xh511P66Ml19gxSzzfd8EwZ/qzymC3nvyiaF81uV81yPtiBTyKrKL4njdPfwWl6H3l0bhxzHm5jbM58ePc3Kc5jtNBUdLy/uVk0qm3Egmtdx+syf7OjKNsv8nmdI4VzmKwZ4Sofp72O18xu13UZJ6Q8Kqv7nFFk/g2zN0pyHbM+5hkVRf8k7b0qEzgzWUf+4KXTrBvkbdfZCG6dP1vkSSrvoRjhk/O4VtVoP8ZT9bcF0IpHvVM1o13n5/sFVd+axCevs69usv2qXuI6RsKsu2J0ni1Vo4yr1NmHRVXvpPxsOqfbJ0biuaMVxI7+4SJPviiLjrkv6vIYeb4NBvFtV55sYqTIJ00VxN8Fe1HHdR3zqspMe4zjn3St01d1AlY8jtlnjXXVK6PHa/NIjvFB8vAYo23y4bOi+TLS8isrSJU67TOzN7lKhtSRhzfn7EP92uAR3p+VS3WcgfOfNZ7TPFr6LqbpXmKX+tHh3OSA1RW/0bFTEQ754XmmElK4Ov3eDcvMgqpHf0WnV+dYd6nNoRG63rV51nXQ/awxn6UwriBNeYua1fFO00i2MfdDgYaFojf5DhfdGtWQZoNv5DmOx/x3mU5Kv3s/PGtNUUTNoTPc5MChv3mFzvivaz7PsSgi+pi7Sr7teKgWu651+c1z9PjpCvrIaqqPO7Sm1Q0OORT3on7spnt03a9zdSZPXlUsXC9wI3O56QyPoUKum30+G7l6qP9ps2esA3X8vIwJEkFmfXyVD+k8+503yHxWu87PfV5tPd2N7VmKvY1R4Rly0vqMDtQ5K9w/Wy27/qZcU7b38c42z7rNPPY5A917t7nWt9knTSuspt7AmUBHQeJVdfs3FFfZ7S5G5INtmzv/x7t8eL6LlzvJr7yiq3+7q7NlRNB/mwUw6WG2OZnr0IHXYWk2PjTkSSrEGMqI+1VHbuOzfczu4/irurinudawvuD4+6vTxr9oDOwNtjHBL32KOl5U6cq8n7Mrr46f4Zz7ONUxzv+lq1T56ivPVlW1+WzX2Kp8iP+/Zmxfcy1mx6KeI+/j6Hearr6LI/Nyvbvux+X4eyD2Cdn7MsZ83M0LZeZsXStHNO34wvtWzvvfnr/q/TIG85Vnu5ndvZf8zkf1fFRW89und2V4GvrQZvs0NMSMn+460l2xeJb/n0Txh7Y86vyPv3SWiNz48/UXxn5phOWXwn5pdr8IufEd8+qXbEZh5xfNhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRVt2FnXhp2wE3bCzoo27Kxrw07YCTthZ0Ubdta1YSfshJ2ws6INO+vasBN2wk7YWdGGnXVt2Ak7YSfsrGjDzro27ISdsBN2VrRhZ10bdsJO2Ak7K9qws64NO2En7ISdFW3YWdeGnbATdsLOijbsrGvDTtgJO2FnRRt21rVhJ+yEnbCzog0769qwE3bCTthZ0YaddW3YCTthJ+ysaMPOujbshJ2wE3ZWtGFnXRt2wk7YCTsr2rCzrg07YSfshJ0VbdhZ14adsBN2ws6KNuysa8NO2Ak7YWdFG3bWtWEn7ISdsLOiDTvr2rATdsJO2FnRhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRVt2FnXhp2wE3bCzoo27Kxrw07YCTthZ0Ubdta1YSfshJ2ws6INO+vasBN2wk7YWdGGnXVt2Ak7YSfsrGjDzro27ISdsBN2VrRhZ10bdsJO2Ak7K9qws64NO2En7ISdFW3YWdeGnbATdsLOijbsrGvDTtgJO2FnRRt21rVhJ+yEnbCzog0769qwE3bCTthZ0YaddW3YCTthJ+ysaMPOujbshJ2wE3ZWtGFnXRt2wk7YCTsr2rCzrg07YSfshJ0VbdhZ14adsBN2ws6KNuysa8NO2Ak7YWdFG3bWtWEn7ISdsLOiDTvr2rATdsJO2FnRhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRVt2FnXhp2wE3bCzoo27Kxrw07YCTthZ0Ubdta1YSfshJ2ws6INO+vasBN2wk7YWdGGnXVt2Ak7YSfsrGjDzro27ISdsBN2VrRhZ10bdsJO2Ak7K9qws64NO2En7ISdFW3YWdeGnbATdsLOijbsrGvDTtgJO2FnRRt21rVhJ+yEnbCzog0769qwE3bCTthZ0YaddW3YCTthJ+ysaMPOujbshJ2wE3ZWtGFnXRt2wk7YCTsr2rCzrg07YSfshJ0VbdhZ14adsBN2ws6KNuysa8NO2Ak7YWdFG3bWtWEn7ISdsLOiDTvr2rATdsJO2FnRhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRVt2FnXhp2wE3bCzoo27Kxrw07YCTthZ0Ubdta1YSfshJ2ws6INO+vasBN2wk7YWdGGnXVt2Ak7YSfsrGjDzro27ISdsBN2VrRhZ10bdsJO2Ak7K9qws64NO2En7ISdFW3YWdeGnbATdsLOijbsrGvDTtgJO2FnRRt21rVhJ+yEnbCzog0769qwE3bCTthZ0YaddW3YCTthJ+ysaMPOujbshJ2wE3ZWtGFnXRt2wk7YCTsr2rCzrg07YSfshJ0VbdhZ14adsBN2ws6KNuysa8NO2Ak7YWdFG3bWtWEn7ISdsLOiDTvr2rATdsJO2FnRhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRVt2FnXhp2wE3bCzoo27Kxrw07YCTthZ0Ubdta1YSfshJ2ws6INO+vasBN2wk7YWdGGnXVt2Ak7YSfsrGjDzro27ISdsBN2VrRhZ10bdsJO2Ak7K9qws64NO2En7ISdFW3YWdeGnbATdsLOijbsrGvDTtgJO2FnRRt21rVhJ+yEnbCzog0769qwE3bCTthZ0YaddW3YCTthJ+ysaMPOujbshJ2wE3ZWtGFnXRt2wk7YCTsr2rCzrg07YSfshJ0VbdhZ14adsBN2ws6KNuysa8NO2Ak7YWdFG3bWtWEn7ISdsLOiDTvr2rATdsJO2FnRhp11bdgJO2En7Kxow866NuyEnbATdla0YWddG3bCTtgJOyvasLOuDTthJ+yEnRXtLz07/T2ON7Nr8zF/8+HTf/DTn3/2o4ff+/v/9B88PHz6Ww//+Fcefu/h4ccPD3/vfykP/+BHDz/+1Ycf/9rDj3/08H/9yk9//tcfPfyzv/zwf2z//PPrT3/74df+58/mf/qNy28/PDy8nT+b/hf/5+13f7bjvut/3v7kh7nOdM1PP9A13/7gh5zc2x//4K58+70f7pJv/+V3vdbbn/3wufYvrvyTn9OV3/73H+zCby/f9FJv/+l3GNzbX50+/PbffV8TffvTn0us3v7855Wc71f/Jz+Hq7/9+Ie66Nvf+Bmu9PbffC/DefvPv9Np3v6H79Epb//oB/Hw2x/90Nnz9js/xBXf3n4+TL79J9/6um//0bf86Nvv/4BzffuH//9f7O0Pf+Dgvf2fP0cFf7t8g4u/vX7jkb797W8zubenn/1Tb7/9rdz39ne+vdff/ta3+Ozb3/0+wvz2H367s7z9999bkr39x9/2VG//2XcZxNt/+/1M4e3vfU+ueDu+yYl+2so//I+XX/9sf/Jbn83v/9kf/+THf/qP/uTP/+Hv/OHv/NEf/PFPfu3zm//4Tz6/+JXPL/7gJ7/62f7u7//p+3//4Y//+KecPvx/5u0vfTavv//br//kz3/yVz6//C/+6E8+/dkf/vi/+unrt//trz08/D/5l3/5l3/5l3/5l3/5l3/5l3/5l3/59x3//d8P/y/npcAGt
__import__t loadst
decompresst b64decodet Exceptiont et strs obf.pys <module>i s 9 N( t __doc__t _t typet func_codet codet None( ( ( s 3.pyt <module>ï s Z ÿ ÿ ÿ ð
| 58.587771
| 24,162
| 0.395267
| 986
| 59,408
| 23.810345
| 0.90568
| 0.072411
| 0.08732
| 0.160157
| 0.16957
| 0.16957
| 0.168676
| 0.168676
| 0.168676
| 0.168676
| 0
| 0.075981
| 0.017472
| 59,408
| 1,013
| 24,163
| 58.645607
| 0.326126
| 0
| 0
| 0.98618
| 0
| 0
| 0.517607
| 0.517607
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.000987
| null | null | 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c506287a06a54377d0e12c42bd51129eed95374
| 181
|
py
|
Python
|
python/projetos_dev/python_programacao_procedural/aula24_pacotes/vendas/calcula_preco.py
|
jonfisik/Projects
|
7847f32c9e333cfca31cc127db175d9b4080ed0f
|
[
"MIT"
] | 2
|
2020-09-05T22:25:37.000Z
|
2021-06-01T21:34:54.000Z
|
python/projetos_dev/python_programacao_procedural/aula24_pacotes/vendas/calcula_preco.py
|
jonfisik/Projects
|
7847f32c9e333cfca31cc127db175d9b4080ed0f
|
[
"MIT"
] | null | null | null |
python/projetos_dev/python_programacao_procedural/aula24_pacotes/vendas/calcula_preco.py
|
jonfisik/Projects
|
7847f32c9e333cfca31cc127db175d9b4080ed0f
|
[
"MIT"
] | null | null | null |
def aumento(valor, porcentagem):
r = valor + (valor * (porcentagem/100))
return r
def reducao(valor, porcentagem):
r = valor - (valor * (porcentagem/100))
return r
| 22.625
| 43
| 0.640884
| 22
| 181
| 5.272727
| 0.363636
| 0.551724
| 0.293103
| 0.37931
| 0.827586
| 0.827586
| 0.827586
| 0.827586
| 0.827586
| 0
| 0
| 0.042857
| 0.226519
| 181
| 7
| 44
| 25.857143
| 0.785714
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 12
|
1c5ef6da1ea5adf8c183a56d4a9193ed3e2913d8
| 209,466
|
py
|
Python
|
src/nidigital/metadata/functions.py
|
alexdubois-ni/nimi-python
|
8c8a5e8cbb505c62393bb36e53f91c4e091c7236
|
[
"MIT"
] | null | null | null |
src/nidigital/metadata/functions.py
|
alexdubois-ni/nimi-python
|
8c8a5e8cbb505c62393bb36e53f91c4e091c7236
|
[
"MIT"
] | null | null | null |
src/nidigital/metadata/functions.py
|
alexdubois-ni/nimi-python
|
8c8a5e8cbb505c62393bb36e53f91c4e091c7236
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# This file is generated from NI-Digital Pattern Driver API metadata version 19.5.0d7
functions = {
'Abort': {
'documentation': {
'description': 'Stops bursting the pattern.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'AbortKeepAlive': {
'documentation': {
'description': 'Stops the keep alive pattern if it is currently running. If a pattern burst is in progress, the function aborts the pattern burst. If you start a new pattern burst while a keep alive pattern is running, the keep alive pattern runs to the last keep alive vector, and the new pattern burst starts on the next cycle.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ApplyLevelsAndTiming': {
'documentation': {
'description': 'Applies digital levels and timing values defined in previously loaded levels and timing sheets. When applying a levels sheet, only the levels specified in the sheet are affected. Any levels not specified in the sheet remain unchanged. When applying a timing sheet, all existing time sets are deleted before the new time sets are loaded.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Comma-delimited list of strings in the form of ``siteN`` , where ``N`` is the site number. If you enter an empty string, this function applies the levels and initial states to all sites.\n'
},
'name': 'siteList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Name of the levels sheet to apply. Use the name of the sheet or pass the absolute file path you use in the niDigital_FancyLoadSpecificationsLevelsAndTiming function. The name of the levels sheet is the file name without the directory and file extension.\n'
},
'name': 'levelsSheet',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Name of the timing sheet to apply. Use the name of the sheet or pass the absolute file path that you use in the niDigital_FancyLoadSpecificationsLevelsAndTiming function. The name of the timing sheet is the file name without the directory and file extension.\n'
},
'name': 'timingSheet',
'type': 'ViConstString'
},
{
'default_value': 'None',
'direction': 'in',
'documentation': {
'description': 'Comma-delimited list of pins, pin groups, or channels to initialize to a high state.\n'
},
'name': 'initialStateHighPins',
'python_api_converter_name': 'convert_repeated_capabilities_without_prefix',
'type': 'ViConstString',
'type_in_documentation': 'basic sequence types or str',
},
{
'default_value': 'None',
'direction': 'in',
'documentation': {
'description': 'Comma-delimited list of pins, pin groups, or channels to initialize to a low state.\n'
},
'name': 'initialStateLowPins',
'python_api_converter_name': 'convert_repeated_capabilities_without_prefix',
'type': 'ViConstString',
'type_in_documentation': 'basic sequence types or str',
},
{
'default_value': 'None',
'direction': 'in',
'documentation': {
'description': 'Comma-delimited list of pins, pin groups, or channels to initialize to a non-drive state (X)\n'
},
'name': 'initialStateTristatePins',
'python_api_converter_name': 'convert_repeated_capabilities_without_prefix',
'type': 'ViConstString',
'type_in_documentation': 'basic sequence types or str',
}
],
'returns': 'ViStatus'
},
'ApplyTDROffsets': {
'documentation': {
'description': 'Applies the correction for propagation delay offsets to a digital pattern instrument. Use this function to apply TDR offsets that are stored from a past measurement or are measured by means other than the niDigital_TDR function. Also use this function to apply correction for offsets if the **applyOffsets** input of the niDigital_TDR function was set to False at the time of measurement.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Number of offsets.\n'
},
'name': 'numOffsets',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'TDR offsets to apply, in seconds. Specify an offset for each pin or channel in the repeated capabilities. If the repeated capabilities contain pin names, you must specify offsets for each site in the channel map per pin.\n'
},
'name': 'offsets',
'python_api_converter_name': 'convert_timedeltas_to_seconds_real64',
'size': {
'mechanism': 'len',
'value': 'numOffsets'
},
'type': 'ViReal64[]',
'type_in_documentation': 'basic sequence of hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'BurstPattern': {
'codegen_method': 'private',
'documentation': {
'description': 'Uses the **startLabel** you specify to burst the pattern on the sites you specify and provides the option to wait for the burst to complete. Digital pins retain their state at the end of a pattern burst until the first vector of a subsequent pattern burst, a call to niDigital_WriteStatic, or a call to niDigital_ApplyLevelsAndTiming.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'The sites on which to burst the pattern as a comma-delimited list of strings in the form site\\ ``N``, where ``N`` is the site number. If you specify an empty string, the pattern is burst on all sites.\n'
},
'name': 'siteList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Pattern name or exported pattern label from which to start bursting the pattern.\n'
},
'name': 'startLabel',
'type': 'ViConstString'
},
{
'default_value': True,
'direction': 'in',
'documentation': {
'description': 'A Boolean that specifies whether to select the digital function for the pins in the pattern prior to bursting.\n'
},
'name': 'selectDigitalFunction',
'type': 'ViBoolean'
},
{
'default_value': True,
'direction': 'in',
'documentation': {
'description': 'A Boolean that indicates whether to wait until the bursting is complete.\n'
},
'name': 'waitUntilDone',
'type': 'ViBoolean'
},
{
'default_value': 'hightime.timedelta(seconds=10.0)',
'direction': 'in',
'documentation': {
'description': 'Maximum time (in seconds) allowed for this function to complete. If this function does not complete within this time interval, this function returns an error.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'ClearError': {
'codegen_method': 'no',
'documentation': {
'description': 'Clears the error information for the current execution thread and the IVI session you specify. If you pass VI_NULL for the **vi** parameter, this function clears the error information only for the current execution thread.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ClockGenerator_Abort': {
'documentation': {
'description': 'Stops clock generation on the specified channel(s) or pin(s) and pin group(s).\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
}
],
'python_name': 'clock_generator_abort',
'returns': 'ViStatus'
},
'ClockGenerator_GenerateClock': {
'documentation': {
'description': 'Configures clock generator frequency and initiates clock generation on the specified channel(s) or pin(s) and pin group(s).\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The frequency of the clock generation, in Hz.\n'
},
'name': 'frequency',
'type': 'ViReal64'
},
{
'default_value': True,
'direction': 'in',
'documentation': {
'description': 'A Boolean that specifies whether to select the digital function for the pins specified prior to starting clock generation.\n'
},
'name': 'selectDigitalFunction',
'type': 'ViBoolean'
}
],
'python_name': 'clock_generator_generate_clock',
'returns': 'ViStatus'
},
'ClockGenerator_Initiate': {
'codegen_method': 'no',
'documentation': {
'description': 'Initiates clock generation on the specified channel(s) or pin(s) and pin group(s).\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
}
],
'python_name': 'clock_generator_initiate',
'returns': 'ViStatus'
},
'Commit': {
'documentation': {
'description': 'Applies all previously configured pin levels, termination modes, clocks, triggers, and pattern timing to a digital pattern instrument. If you do not call the niDigital_Commit function, then the initiate function or the niDigital_FancyBurstPattern function will implicitly call this function for you. Calling this function moves the session from the Uncommitted state to the Committed state.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'ConfigureActiveLoadLevels': {
'documentation': {
'description': 'Configures I\\ :sub:`OL`, I\\ :sub:`OH`, and V\\ :sub:`COM` levels for the active load on the pins you specify. The DUT sources or sinks current based on the level values. To enable active load, set the termination mode to NIDIGITAL_VAL_ACTIVE_LOAD. To disable active load, set the termination mode of the instrument to NIDIGITAL_VAL_HIGH_Z or NIDIGITAL_VAL_VTERM.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Maximum current that the DUT sinks while outputting a voltage below V\\ :sub:`COM`.\n'
},
'name': 'iol',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Maximum current that the DUT sources while outputting a voltage above V\\ :sub:`COM`.\n'
},
'name': 'ioh',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Commutating voltage level at which the active load circuit switches between sourcing current and sinking current.\n'
},
'name': 'vcom',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'ConfigurePatternBurstSites': {
'documentation': {
'description': 'Configures which sites burst the pattern on the next call to the initiate function. The pattern burst sites can also be modified through the repeated capabilities for the niDigital_FancyBurstPattern function. If a site has been disabled through the niDigital_DisableSites function, the site does not burst a pattern even if included in the pattern burst sites.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'A comma-delimited list of strings in the form of site\\ ``N``, where ``N`` is the site number. If you specify an empty string, the function returns pass or fail results for all sites. If the string is empty, all sites are configured for pattern bursting.\n'
},
'name': 'siteList',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'ConfigureTimeSetCompareEdgesStrobe': {
'documentation': {
'description': 'Configures the strobe edge time for the specified pins. Use this function to modify time set values after applying a timing sheet with the niDigital_ApplyLevelsAndTiming function, or to create time sets programmatically without the use of timing sheets. This function does not modify the timing sheet file or the timing sheet contents that will be used in future calls to niDigital_ApplyLevelsAndTiming; it only affects the values of the current timing context.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'List of pin and pin group names for which to configure the time set edges.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Time when the comparison happens within a vector period.\n'
},
'name': 'strobeEdge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'ConfigureTimeSetCompareEdgesStrobe2x': {
'documentation': {
'description': 'Configures the compare strobes for the specified pins in the time set, including the 2x strobe. Use this function to modify time set values after applying a timing sheet with the niDigital_ApplyLevelsAndTiming function, or to create time sets programmatically without the use of timing sheets. This function does not modify the timing sheet file or the timing sheet contents that will be used in future calls to niDigital_ApplyLevelsAndTiming; it only affects the values of the current timing context.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'List of pin and pin group names for which to configure the time set edges.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Time when the comparison happens within a vector period.\n'
},
'name': 'strobeEdge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
},
{
'direction': 'in',
'documentation': {
'description': 'Time when the comparison happens for the second DUT cycle within a vector period.\n'
},
'name': 'strobe2Edge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'ConfigureTimeSetDriveEdges': {
'documentation': {
'description': 'Configures the drive format and drive edge placement for the specified pins. Use this function to modify time set values after applying a timing sheet with the niDigital_ApplyLevelsAndTiming function, or to create time sets programmatically without the use of timing sheets. This function does not modify the timing sheet file or the timing sheet contents that will be used in future calls to niDigital_ApplyLevelsAndTiming; it only affects the values of the current timing context.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'List of pin and pin group names for which to configure the time set edges.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Drive format of the time set.\n\n- NIDIGITAL_VAL_NR: Non-return.\n- NIDIGITAL_VAL_RL: Return to low.\n- NIDIGITAL_VAL_RH: Return to high.\n- NIDIGITAL_VAL_SBC: Surround by complement.\n'
},
'enum': 'DriveFormat',
'name': 'format',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Delay, in seconds, from the beginning of the vector period for turning on the pin driver.This option applies only when the prior vector left the pin in a non-drive pin state (L, H, X, V, M, E). For the SBC format, this option specifies the delay from the beginning of the vector period at which the complement of the pattern value is driven.\n'
},
'name': 'driveOnEdge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
},
{
'direction': 'in',
'documentation': {
'description': 'Delay, in seconds, from the beginning of the vector period until the pattern data is driven to the pattern value.The ending state from the previous vector persists until this point.\n'
},
'name': 'driveDataEdge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
},
{
'direction': 'in',
'documentation': {
'description': 'Delay, in seconds, from the beginning of the vector period until the pin changes from the pattern data to the return value, as specified in the format.\n'
},
'name': 'driveReturnEdge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
},
{
'direction': 'in',
'documentation': {
'description': 'Delay, in seconds, from the beginning of the vector period to turn off the pin driver when the next vector period uses a non-drive symbol (L, H, X, V, M, E).\n'
},
'name': 'driveOffEdge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'ConfigureTimeSetDriveEdges2x': {
'documentation': {
'description': 'Configures the drive edges of the pins in the time set, including 2x edges. Use this function to modify time set values after applying a timing sheet with the niDigital_ApplyLevelsAndTiming function, or to create time sets programmatically without the use of timing sheets. This function does not modify the timing sheet file or the timing sheet contents that will be used in future calls to niDigital_ApplyLevelsAndTiming; it only affects the values of the current timing context.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'List of pin and pin group names for which to configure the time set edges.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Drive format of the time set.\n\n- NIDIGITAL_VAL_NR: Non-return.\n- NIDIGITAL_VAL_RL: Return to low.\n- NIDIGITAL_VAL_RH: Return to high.\n- NIDIGITAL_VAL_SBC: Surround by complement.\n'
},
'enum': 'DriveFormat',
'name': 'format',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Delay, in seconds, from the beginning of the vector period for turning on the pin driver.This option applies only when the prior vector left the pin in a non-drive pin state (L, H, X, V, M, E). For the SBC format, this option specifies the delay from the beginning of the vector period at which the complement of the pattern value is driven.\n'
},
'name': 'driveOnEdge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
},
{
'direction': 'in',
'documentation': {
'description': 'Delay, in seconds, from the beginning of the vector period until the pattern data is driven to the pattern value.The ending state from the previous vector persists until this point.\n'
},
'name': 'driveDataEdge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
},
{
'direction': 'in',
'documentation': {
'description': 'Delay, in seconds, from the beginning of the vector period until the pin changes from the pattern data to the return value, as specified in the format.\n'
},
'name': 'driveReturnEdge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
},
{
'direction': 'in',
'documentation': {
'description': 'Delay, in seconds, from the beginning of the vector period to turn off the pin driver when the next vector period uses a non-drive symbol (L, H, X, V, M, E).\n'
},
'name': 'driveOffEdge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
},
{
'direction': 'in',
'documentation': {
'description': 'Delay, in seconds, from the beginning of the vector period until the pattern data in the second DUT cycle is driven to the pattern value.\n'
},
'name': 'driveData2Edge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
},
{
'direction': 'in',
'documentation': {
'description': 'Delay, in seconds, from the beginning of the vector period until the pin changes from the pattern data in the second DUT cycle to the return value, as specified in the format.\n'
},
'name': 'driveReturn2Edge',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'ConfigureTimeSetDriveFormat': {
'documentation': {
'description': 'Configures the drive format for the pins specified in the **pinList**. Use this function to modify time set values after applying a timing sheet with the niDigital_ApplyLevelsAndTiming function, or to create time sets programmatically without the use of timing sheets. This function does not modify the timing sheet file or the timing sheet contents that will be used in future calls to niDigital_ApplyLevelsAndTiming; it only affects the values of the current timing context.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'List of pin and pin group names for which to configure the time set edges.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Drive format of the time set.\n\n- NIDIGITAL_VAL_NR: Non-return.\n- NIDIGITAL_VAL_RL: Return to low.\n- NIDIGITAL_VAL_RH: Return to high.\n- NIDIGITAL_VAL_SBC: Surround by complement.\n'
},
'enum': 'DriveFormat',
'name': 'driveFormat',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ConfigureTimeSetEdge': {
'documentation': {
'description': 'Configures the edge placement for the pins specified in the pin list. Use this function to modify time set values after applying a timing sheet with the niDigital_ApplyLevelsAndTiming function, or to create time sets programmatically without the use of timing sheets. This function does not modify the timing sheet file or the timing sheet contents that will be used in future calls to niDigital_ApplyLevelsAndTiming; it only affects the values of the current timing context.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified digital pattern instrument handle\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'List of pin and pin group names for which to configure the time set edges.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Name of the edge.\n\n- NIDIGITAL_VAL_DRIVE_ON\n- NIDIGITAL_VAL_DRIVE_DATA\n- NIDIGITAL_VAL_DRIVE_RETURN\n- NIDIGITAL_VAL_DRIVE_OFF\n- NIDIGITAL_VAL_COMPARE_STROBE\n- NIDIGITAL_VAL_DRIVE_DATA2\n- NIDIGITAL_VAL_DRIVE_RETURN2\n- NIDIGITAL_VAL_COMPARE_STROBE2\n'
},
'enum': 'TimeSetEdgeType',
'name': 'edge',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'The time from the beginning of the vector period in which to place the edge.\n'
},
'name': 'time',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'ConfigureTimeSetEdgeMultiplier': {
'documentation': {
'description': 'Configures the edge multiplier of the pins in the time set. Use this function to modify time set values after applying a timing sheet with the niDigital_ApplyLevelsAndTiming function, or to create time sets programmatically without the use of timing sheets. This function does not modify the timing sheet file or the timing sheet contents that will be used in future calls to niDigital_ApplyLevelsAndTiming; it only affects the values of the current timing context.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified digital pattern instrument handle\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'List of pin and pin group names for which to configure the time set edges.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The specified edge multiplier for the pins in the pin list.\n'
},
'name': 'edgeMultiplier',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ConfigureTimeSetPeriod': {
'documentation': {
'description': 'Configures the period of a time set. Use this function to modify time set values after applying a timing sheet with the niDigital_ApplyLevelsAndTiming function, or to create time sets programmatically without the use of timing sheets. This function does not modify the timing sheet file or the timing sheet contents that will be used in future calls to niDigital_ApplyLevelsAndTiming; it only affects the values of the current timing context.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Period for this time set, in seconds.\n'
},
'name': 'period',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'ConfigureVoltageLevels': {
'documentation': {
'description': 'Configures voltage levels for the pins you specify.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Voltage that the instrument will apply to the input of the DUT when the pin driver drives a logic low (0).\n'
},
'name': 'vil',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Voltage that the instrument will apply to the input of the DUT when the test instrument drives a logic high (1).\n'
},
'name': 'vih',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Output voltage below which the comparator on the pin driver interprets a logic low (L).\n'
},
'name': 'vol',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Output voltage above which the comparator on the pin driver interprets a logic high (H).\n'
},
'name': 'voh',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'Termination voltage the instrument applies during non-drive cycles when the termination mode is set to V\\ :sub:`term`. The instrument applies the termination voltage through a 50 ohm parallel termination resistance.\n'
},
'name': 'vterm',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'CreateCaptureWaveformFromFileDigicapture': {
'documentation': {
'description': 'Creates a capture waveform with the configuration information from a Digicapture file generated by the Digital Pattern Editor.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Waveform name you want to use. You must specify waveform_name if the file contains multiple waveforms. Use the waveform_name with the capture_start opcode in your pattern.\n'
},
'name': 'waveformName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Absolute file path to the capture waveform file (.digicapture) you want to load.\n'
},
'name': 'waveformFilePath',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'CreateCaptureWaveformParallel': {
'documentation': {
'description': 'Sets the capture waveform settings for parallel acquisition. Settings apply across all sites if multiple sites are configured in the pin map. You cannot reconfigure settings after waveforms are created.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'List of capture pins from the waveform. The **pinList** must match the capture pins in the pattern that references the waveform. The pin order in the **pinList** determines the bit positions of the data captured by the niDigital_FetchCaptureWaveform function.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Waveform name you want to use. Use the waveform_name with the capture_start opcode in your pattern.\n'
},
'name': 'waveformName',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'CreateCaptureWaveformSerial': {
'documentation': {
'description': 'Sets the capture waveform settings for serial acquisition. Settings apply across all sites if multiple sites are configured in the pin map. You cannot reconfigure settings after waveforms are created.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'List of capture pins from the waveform. The **pinList** must match the capture pins in the pattern that references the waveform. The pin order in the **pinList** determines the bit positions of the data captured by the niDigital_FetchCaptureWaveform function.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Waveform name you want to use. Use the waveform_name with the capture_start opcode in your pattern.\n'
},
'name': 'waveformName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Width in bits of each serial sample. Valid values are between 1 and 32.\n'
},
'name': 'sampleWidth',
'type': 'ViUInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Order in which to shift the bits.\n\n- NIDIGITAL_VAL_MSB_FIRST: Specifies the bit order by most significant bit first.\n- NIDIGITAL_VAL_LSB_FIRST: Specifies the bit order by least significant bit first.\n'
},
'enum': 'BitOrder',
'name': 'bitOrder',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'CreateSourceWaveformFromFileTDMS': {
'documentation': {
'description': 'Creates a source waveform with configuration information from a TDMS file generated by the Digital Pattern Editor. It also optionally writes waveform data from the file.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The waveform name you want to use from the file. You must specify waveform_name if the file contains multiple waveforms. Use the waveform_name with the source_start opcode in your pattern.\n'
},
'name': 'waveformName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Absolute file path to the load source waveform file (.tdms).\n'
},
'name': 'waveformFilePath',
'type': 'ViConstString'
},
{
'default_value': True,
'direction': 'in',
'documentation': {
'description': 'A Boolean that writes waveform data to source memory if True and the waveform data is in the file.\n'
},
'name': 'writeWaveformData',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'CreateSourceWaveformParallel': {
'documentation': {
'description': 'Sets the source waveform settings required for parallel sourcing. Settings apply across all sites if multiple sites are configured in the pin map. You cannot reconfigure settings after waveforms are created.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'Source pins for the waveform. The **pinList** must match the source pins in the pattern that references the waveform. The pin order in the **pinList** determines the bit positions of the data written by the niDigital_WriteSourceWaveform function.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The name to assign to the waveform. Use the waveform_name with source_start opcode in your pattern.\n'
},
'name': 'waveformName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Parameter that specifies how to map data on multiple sites.\n\n- NIDIGITAL_VAL_BROADCAST: Broadcasts the waveform you specify to all sites.\n- NIDIGITAL_VAL_SITE_UNIQUE: Sources unique waveform data to each site.\n'
},
'enum': 'SourceDataMapping',
'name': 'dataMapping',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'CreateSourceWaveformSerial': {
'documentation': {
'description': 'Sets the source waveform settings required for serial sourcing. Settings apply across all sites if multiple sites are configured in the pin map. You cannot reconfigure settings after waveforms are created.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'Source pins for the waveform. The **pinList** must match the source pins in the pattern that references the waveform. The pin order in the **pinList** determines the bit positions of the data written by the niDigital_WriteSourceWaveform function.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The name to assign to the waveform. Use the waveform_name with source_start opcode in your pattern.\n'
},
'name': 'waveformName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Parameter that specifies how to map data on multiple sites.\n\n- NIDIGITAL_VAL_BROADCAST: Broadcasts the waveform you specify to all sites.\n- NIDIGITAL_VAL_SITE_UNIQUE: Sources unique waveform data to each site.\n'
},
'enum': 'SourceDataMapping',
'name': 'dataMapping',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Width in bits of each serial sample. Valid values are between 1 and 32.\n'
},
'name': 'sampleWidth',
'type': 'ViUInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Order in which to shift the bits.\n\n- NIDIGITAL_VAL_MSB_FIRST: Specifies the bit order by most significant bit first.\n- NIDIGITAL_VAL_LSB_FIRST: Specifies the bit order by least significant bit first.\n'
},
'enum': 'BitOrder',
'name': 'bitOrder',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'CreateTimeSet': {
'documentation': {
'description': 'Creates a time set with the name that you specify. Use this function when you want to create time sets programmatically rather than with a timing sheet.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The specified name of the new time set.\n'
},
'name': 'name',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'DeleteAllTimeSets': {
'documentation': {
'description': 'Deletes all time sets from instrument memory.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'DisableSites': {
'documentation': {
'description': 'Disables specified sites. Disabled sites are not included in pattern bursts initiated by the initiate function or the niDigital_FancyBurstPattern function, even if the site is specified in the list of pattern burst sites in niDigital_ConfigurePatternBurstSites function or in the repeated capabilities for the niDigital_FancyBurstPattern function. Additionally, if you specify a list of pin or pin group names in repeated capabilities in any NI-Digital function, digital pattern instrument channels mapped to disabled sites are not affected by the function. The functions that return per-pin data, such as the niDigital_PPMU_Measure function, do not return data for channels mapped to disabled sites. The digital pattern instrument channels mapped to the sites specified are left in their current state. NI TestStand Semiconductor Module requires all sites to always be enabled, and manages the set of active sites without disabling the sites in the digital instrument session. Do not use this function with the Semiconductor Module.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Comma-delimited list of strings in the form of site\\ ``N``, where ``N`` is the site number. If you enter an empty string, the function disables all sites.\n'
},
'name': 'siteList',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'EnableSites': {
'documentation': {
'description': 'Enables the sites you specify. All sites are enabled by default.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Comma-delimited list of strings in the form of site\\ ``N``, where ``N`` is the site number. If you enter an empty string, the function enables all sites.\n'
},
'name': 'siteList',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'FancySelfTest': {
'codegen_method': 'python-only',
'documentation': {
'description': 'Returns self test results from a digital pattern instrument. This test requires several minutes to execute.\n\nRaises `SelfTestError` on self test failure. Attributes on exception object:\n\n- code - failure code from driver\n- message - status message from driver\n',
'table_body': [
[
'0',
'Self test passed.'
],
[
'1',
'Self test failed.'
]
],
'table_header': [
'Self-Test Code',
'Description'
]
},
'method_templates': [
{
'documentation_filename': 'default_method',
'method_python_name_suffix': '',
'session_filename': 'fancy_self_test'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'python_name': 'self_test',
'returns': 'ViStatus'
},
'FetchCaptureWaveformU32': {
'codegen_method': 'library-only',
'documentation': {
'description': 'Fetches a defined number of samples for a specific list of sites. This function only returns data from sites that are enabled when fetch is called.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Site numbers listed as a comma-delimited list of strings of form site\\ ``N``, where ``N`` is the site number. If you enter an empty string, the function fetches data from all sites.\n'
},
'name': 'siteList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Waveform name you create with the create capture waveform function. Use the waveform_nam parameter with capture_start opcode in your pattern.\n'
},
'name': 'waveformName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Number of samples to fetch.\n'
},
'name': 'samplesToRead',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Maximum time (in seconds) allowed for this function to complete. If this function does not complete within this time interval, this function returns an error.\n'
},
'name': 'timeout',
'type': 'ViReal64'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViUInt32 array you specify for data. To determine the size of the buffer to allocate for the data array, pass a value of 0 to the **dataBufferSize** parameter and a value of VI_NULL to the **data** parameter. In this case, the value returned by the **actualNumWaveforms** and **actualSamplesPerWaveform** parameters determine the size of the array necessary to hold the data. The data buffer size should be the number of samples per waveform multiplied by the number of waveforms.\n'
},
'name': 'dataBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'An array of digital states read from the sites in the repeated capabilities. Each row in the array corresponds to a site in the list. If a site is disabled, not enabled for burst, or the current instrument does not include any capture pins, the function does not return data for that site. Data for each site in the repeated capabilities are returned sequentially (non-interleaved). If you are using a list of pin names to read data from multiple instruments, use the niDigital_SortSiteResultsViUInt32Waveform function to order and combine the data to match the repeated capabilities. You can also use the niDigital_GetSiteResultsSiteNumbers function to obtain a list of returned sites.\n'
},
'name': 'data',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'dataBufferSize',
'value_twist': 'actualNumWaveforms'
},
'type': 'ViUInt32[]'
},
{
'direction': 'out',
'documentation': {
'description': 'Number of waveforms written to the data array.\n'
},
'name': 'actualNumWaveforms',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'Number of samples per waveform written to the data array.\n'
},
'name': 'actualSamplesPerWaveform',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'FetchHistoryRAMCycleInformation': {
'codegen_method': 'private',
'documentation': {
'description': 'Gets the per-cycle pattern information acquired for the specified cycle.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Site specified as a string in the form of ``siteN``, where ``N`` is the site number. The function returns an error if more than one site is specified.\n'
},
'name': 'site',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The index of the History RAM sample to fetch. Each History RAM sample contains information about a single cycle in the pattern burst.\n'
},
'name': 'sampleIndex',
'type': 'ViInt64'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned index of the pattern for the acquired cycle. Use niDigital_GetPatternName to get the name of the pattern from its index.\n'
},
'name': 'patternIndex',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned time set for the acquired cycle. Use niDigital_GetTimeSetName to get the name of the time set from its index.\n'
},
'name': 'timeSetIndex',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned vector number within the pattern for the acquired cycle. Vector numbers start at 0 from the beginning of the pattern.\n'
},
'name': 'vectorNumber',
'type': 'ViInt64'
},
{
'direction': 'out',
'documentation': {
'description': 'Returns the cycle number acquired by this History RAM sample. Cycle numbers start at 0 from the beginning of the pattern burst.\n'
},
'name': 'cycleNumber',
'type': 'ViInt64'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned number of DUT cycles contained in the cycle acquired by this History RAM sample. This is only needed if the pattern uses the edge multiplier feature.\n'
},
'name': 'numDutCycles',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'FetchHistoryRAMCyclePinData': {
'codegen_method': 'private',
'documentation': {
'description': 'Gets the per-pin pattern data acquired for the specified cycle.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Site specified as a string in the form of ``siteN``, where ``N`` is the site number. The function returns an error if more than one site is specified.\n'
},
'name': 'site',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The specified pins for which to retrieve History RAM data. If empty, the pin list from the pattern containing the start label is used. Call niDigital_GetPatternPinList or niDigital_GetPatternPinIndexeswith the start label to retrieve the pins associated with the pattern burst.\n'
},
'name': 'pinList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The index of the History RAM sample to fetch. Each History RAM sample contains information about a single cycle in the pattern burst.\n'
},
'name': 'sampleIndex',
'type': 'ViInt64'
},
{
'direction': 'in',
'documentation': {
'description': 'The specified index of the DUT cycle. If the pattern does not use the edge multiplier feature, pass 0 for this parameter. For History RAM samples that contain multiple DUT cycles, indicated by the **numDutCycles** value returned by niDigital_FetchHistoryRAMCycleInformation, call this function multiple times to retrieve pin states for each DUT cycle. The DUT cycle index should start at 0.\n'
},
'name': 'dutCycleIndex',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'The specified number of elements in the **expectedPinStates**, **actualPinStates**, and **perPinPassFail** arrays. All three array parameters must be of the same size if they are not set to VI_NULL.\n\nTo determine the size of the buffer to allocate for the arrays, pass a value of 0 to the **pinDataBufferSize** parameter and a value of VI_NULL to the array parameters. In this case, the value returned by the **actualNumPinData** parameter is the size of the arrays necessary to hold the data.\n'
},
'name': 'pinDataBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned pin state as expected by the loaded pattern in the order specified in **pinList**. Pins without defined edges in the specified DUT cycle will return NIDIGITAL_VAL_NOT_A_PIN_STATE\n'
},
'enum': 'PinState',
'name': 'expectedPinStates',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'pinDataBufferSize',
'value_twist': 'actualNumPinData'
},
'type': 'ViUInt8[]'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned pin state acquired by History RAM in the order specified in **pinList**. Pins without defined edges in the specified DUT cycle will return NIDIGITAL_VAL_NOT_A_PIN_STATE\n'
},
'enum': 'PinState',
'name': 'actualPinStates',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'pinDataBufferSize',
'value_twist': 'actualNumPinData'
},
'type': 'ViUInt8[]'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned pass fail information for pins in the order specified in **pinList**. Pins without defined edges in the specified DUT cycle will return pass (VI_TRUE).\n'
},
'name': 'perPinPassFail',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'pinDataBufferSize',
'value_twist': 'actualNumPinData'
},
'type': 'ViBoolean[]'
},
{
'direction': 'out',
'documentation': {
'description': 'Number of values written to the array parameters.\n'
},
'name': 'actualNumPinData',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'FetchHistoryRAMScanCycleNumber': {
'codegen_method': 'private',
'documentation': {
'description': 'Fetches the History RAM Scan Cycle Number for the sample index. If the sample is not from a scan vector, the scan cycle number will be returned as -1.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Site specified as a string in the form of ``siteN``, where ``N`` is the site number. The function returns an error if more than one site is specified.\n'
},
'name': 'site',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The index of the History RAM sample to fetch. Each History RAM sample contains information about a single cycle in the pattern burst.\n'
},
'name': 'sampleIndex',
'type': 'ViInt64'
},
{
'direction': 'out',
'documentation': {
'description': 'Returns the scan cycle number acquired by this History RAM sample. Scan cycle numbers start at 0 from the first cycle of the scan vector. Scan cycle numbers are -1 for cycles that do not have a scan opcode.\n'
},
'name': 'scanCycleNumber',
'type': 'ViInt64'
}
],
'returns': 'ViStatus'
},
'FrequencyCounter_MeasureFrequency': {
'documentation': {
'description': 'Measures the frequency on the specified channel(s) over the specified measurement time. All channels in the repeated capabilities should have the same measurement time.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The specified number of elements in the ViReal64 array you specify for the frequency counter measurements.\n'
},
'name': 'frequenciesBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned frequency counter measurement, in Hz.This function returns -1 if the measurement is invalid for the channel.\n'
},
'name': 'frequencies',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'frequenciesBufferSize',
'value_twist': 'actualNumFrequencies'
},
'type': 'ViReal64[]'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned number of frequency counter measurements written to the measurements array.\n'
},
'name': 'actualNumFrequencies',
'type': 'ViInt32'
}
],
'python_name': 'frequency_counter_measure_frequency',
'returns': 'ViStatus'
},
'GetAttributeViBoolean': {
'codegen_method': 'private',
'documentation': {
'description': 'Queries the value of a ViBoolean attribute. Use this function to get the values of digital pattern instrument-specific attributes and inherent IVI attributes.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.\n'
},
'name': 'attribute',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned current value of the attribute; pass the address of a ViBoolean variable.\n'
},
'name': 'value',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'GetAttributeViInt32': {
'codegen_method': 'private',
'documentation': {
'description': 'Queries the value of a ViInt32 attribute. Use this function to get the values of digital pattern instrument-specific attributes and inherent IVI attributes.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.\n'
},
'name': 'attribute',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned current value of the attribute; pass the address of a ViInt32 variable.\n'
},
'name': 'value',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'GetAttributeViInt64': {
'codegen_method': 'private',
'documentation': {
'description': 'Queries the value of a ViInt64 attribute. Use this function to get the values of digital pattern instrument-specific attributes and inherent IVI attributes.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.\n'
},
'name': 'attribute',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned current value of the attribute; pass the address of a ViInt64 variable.\n'
},
'name': 'value',
'type': 'ViInt64'
}
],
'returns': 'ViStatus'
},
'GetAttributeViReal64': {
'codegen_method': 'private',
'documentation': {
'description': 'This function queries the value of a ViReal64 attribute. Use this function to get the values of digital pattern instrument-specific attributes and inherent IVI attributes.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.\n'
},
'name': 'attribute',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned current value of the attribute; pass the address of a ViReal64 variable.\n'
},
'name': 'value',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'GetAttributeViString': {
'codegen_method': 'private',
'documentation': {
'description': 'Queries the value of a ViString attribute. Use this function to get the values of digital pattern instrument-specific attributes and inherent IVI attributes. You must provide a ViChar array to serve as a buffer for the value. You pass the number of bytes in the buffer as the **bufferSize**. If the current value of the attribute, including the terminating NULL byte, is larger than the size you indicate in the **bufferSize**, the function copies (bufferSize - 1) bytes into the buffer, places an ASCII NULL byte at the end of the buffer, and returns the **bufferSize** you must pass to get the entire value. For example, if the value is "123456" and the **bufferSize** is 4, the function places "123" into the buffer and returns 7. If you want to call this function just to get the required buffer size, you can pass 0 for the **bufferSize** and VI_NULL for the value.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.\n'
},
'name': 'attribute',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViChar array you specify for value.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The buffer in which the function returns the current value of the attribute; the buffer must be of type ViChar and have at least as many bytes as indicated in the **bufferSize**.\n'
},
'name': 'value',
'size': {
'mechanism': 'ivi-dance',
'value': 'bufferSize'
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'GetChannelName': {
'codegen_method': 'no',
'documentation': {
'description': 'Returns the channel name that corresponds to the index you specify. Channel indexes are one-based. You must provide a ViChar array to serve as a buffer for the value. You pass the number of bytes in the buffer as the **nameBufferSize**. If the current value of the attribute, including the terminating NULL byte, is larger than the size you indicate in the buffer size, the function copies (buffer size - 1) bytes into the buffer, places an ASCII NULL byte at the end of the buffer, and returns the buffer size you must pass to get the entire value. For example, if the value is "123456" and the buffer size is 4, the function places "123" into the buffer and returns 7. If you want to call this function just to get the required buffer size, you can pass 0 for **nameBufferSize** and VI_NULL for the name.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Specifies a one-based index for the desired channel in the session. Valid values are from one to the total number of channels in the session.\n'
},
'name': 'index',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViChar array you specify for name.\n'
},
'name': 'nameBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned channel name(s) at the specified index.\n'
},
'name': 'name',
'size': {
'mechanism': 'ivi-dance',
'value': 'nameBufferSize'
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'GetChannelNameFromString': {
'documentation': {
'description': 'Returns a comma-separated list of channel names from a string index list.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Index list for the channels in the session. Valid values are from zero to the total number of channels in the session minus one. The index string can be one of the following formats:\n\n- A comma-separated list—for example, "0,2,3,1"\n- A range using a hyphen—for example, "0-3"\n- A range using a colon—for example, "0:3 "\n\nYou can combine comma-separated lists and ranges that use a hyphen or colon. Both out-of-order and repeated indices are supported ("2,3,0," "1,2,2,3"). White space characters, including spaces, tabs, feeds, and carriage returns, are allowed between characters. Ranges can be incrementing or decrementing.\n'
},
'name': 'indices',
'python_api_converter_name': 'convert_repeated_capabilities_without_prefix',
'type': 'ViConstString',
'type_in_documentation': 'basic sequence types or str or int',
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViChar array you specify for name.\n'
},
'name': 'nameBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned channel name(s) at the specified index.\n'
},
'name': 'names',
'python_api_converter_name': 'convert_comma_separated_string_to_list',
'size': {
'mechanism': 'ivi-dance',
'value': 'nameBufferSize'
},
'type': 'ViChar[]',
'type_in_documentation': 'list of str',
}
],
'python_name': 'get_channel_names',
'render_in_session_base': True, # Used in FancyGetPinResultsPinInformation()
'returns': 'ViStatus'
},
'GetError': {
'codegen_method': 'private',
'documentation': {
'description': 'Returns the error information associated with the digital pattern instrument handle. This function retrieves and then clears the error information for the session. If **vi** is VI_NULL, this function retrieves and then clears the error information for the current thread. You must provide a ViChar array to serve as a buffer for the value. You pass the number of bytes in the buffer as the buffer size. If the current value of the error description, including the terminating NULL byte, is larger than the size you indicate in the buffer size, the function copies (buffer size -1) bytes into the buffer, places an ASCII NULL byte at the end of the buffer, and returns the buffer size you must pass to get the entire value. For example, if the value is "123456" and the buffer size is 4, the function places "123" into the buffer and returns 7. If you want to call this function just to get the required buffer size, you can pass 0 for the buffer size and VI_NULL for **errorDescription**.\n'
},
'is_error_handling': True,
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned error code for the session or execution thread.\n'
},
'name': 'errorCode',
'type': 'ViStatus'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViChar array you specify for **errorDescription**.\n'
},
'name': 'errorDescriptionBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned error description for the IVI session or execution thread.\nIf there is no description, the function returns an empty string. The buffer must contain at least as many elements as the value you specify with the buffer size parameter.\nIf you pass 0 for **errorDescriptionBufferSize**, you can pass VI_NULL for this parameter.\n'
},
'name': 'errorDescription',
'size': {
'mechanism': 'ivi-dance',
'value': 'errorDescriptionBufferSize'
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus',
'use_session_lock': False
},
'GetFailCount': {
'documentation': {
'description': 'Returns the comparison fail count for pins in the repeated capabilities.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViInt64 array you specify for **failureCount**. To determine the size of the buffer to allocate for the **failureCount** array, pass a value of 0 to the **bufferSize** parameter and a value of VI_NULL to the **failureCount** parameter. In this case, the value returned by the **actualNumRead** parameter is the size of the array necessary to hold the failure counts.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'Number of failures in an array. If a site is disabled or not enabled for burst, the function does not return data for that site. You can also use the niDigital_FancyGetPinResultsPinInformation function to obtain a sorted list of returned sites and channels.\n'
},
'name': 'failureCount',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'bufferSize',
'value_twist': 'actualNumRead'
},
'type': 'ViInt64[]'
},
{
'direction': 'out',
'documentation': {
'description': 'Number of failure count values written to the **failureCount** array.\n'
},
'name': 'actualNumRead',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'GetHistoryRAMSampleCount': {
'documentation': {
'description': 'Returns the number of samples History RAM acquired on the last pattern burst.\n',
'note': """\nBefore bursting a pattern, you must configure the History RAM trigger and specify which cycles to acquire.
NIDIGITAL_ATTR_HISTORY_RAM_TRIGGER_TYPE should be used to specify the trigger condition on which History RAM
starts acquiring pattern information.
If History RAM trigger is configured as NIDIGITAL_VAL_CYCLE_NUMBER,
NIDIGITAL_ATTR_CYCLE_NUMBER_HISTORY_RAM_TRIGGER_CYCLE_NUMBER should be used to specify the cycle number on which
History RAM starts acquiring pattern information.
If History RAM trigger is configured as NIDIGITAL_VAL_PATTERN_LABEL,
NIDIGITAL_ATTR_PATTERN_LABEL_HISTORY_RAM_TRIGGER_LABEL should be used to specify the pattern label from which to
start acquiring pattern information.
NIDIGITAL_ATTR_PATTERN_LABEL_HISTORY_RAM_TRIGGER_VECTOR_OFFSET should be used to specify the number of vectors
following the specified pattern label from which to start acquiring pattern information.
NIDIGITAL_ATTR_PATTERN_LABEL_HISTORY_RAM_TRIGGER_CYCLE_OFFSET should be used to specify the number of cycles
following the specified pattern label and vector offset from which to start acquiring pattern information.
For all History RAM trigger conditions, NIDIGITAL_ATTR_HISTORY_RAM_PRETRIGGER_SAMPLES should be used to specify
the number of samples to acquire before the trigger conditions are met. If you configure History RAM to only
acquire failed cycles, you must set NIDIGITAL_ATTR_HISTORY_RAM_PRETRIGGER_SAMPLES to 0.
NIDIGITAL_ATTR_HISTORY_RAM_CYCLES_TO_ACQUIRE should be used to specify which cycles History RAM acquires after
the trigger conditions are met.
""",
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Site specified as a string in the form of ``siteN``, where ``N`` is the site number. The function returns an error if more than one site is specified.\n'
},
'name': 'site',
'type': 'ViConstString'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned number of samples that History RAM acquired.\n'
},
'name': 'sampleCount',
'type': 'ViInt64'
}
],
'returns': 'ViStatus'
},
'GetPatternName': {
'codegen_method': 'private',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'patternIndex',
'type': 'ViInt32'
},
{
'direction': 'in',
'name': 'nameBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'name': 'name',
'size': {
'mechanism': 'ivi-dance',
'value': 'nameBufferSize'
},
'type': 'ViChar[]'
}
],
'render_in_session_base': True, # Called from FancyFetchHistoryRAMCycleInformation() which uses rep cap
'returns': 'ViStatus'
},
'GetPatternPinList': {
'documentation': {
'description': 'Returns the pattern pin list.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Pattern name or exported pattern label from which to get the pin names that the pattern references.\n'
},
'name': 'startLabel',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViChar array you specify for **pinList**.\n'
},
'name': 'pinListBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'List of pins referenced by the pattern with the **startLabel**.\n'
},
'name': 'pinList',
'python_api_converter_name': 'convert_comma_separated_string_to_list',
'size': {
'mechanism': 'ivi-dance',
'value': 'pinListBufferSize'
},
'type': 'ViChar[]',
'type_in_documentation': 'list of str',
}
],
'python_name': 'get_pattern_pin_names',
'returns': 'ViStatus'
},
'GetPinName': {
'codegen_method': 'private',
'documentation': {
'description': 'Returns the name of the pin at the index you specify. You must provide a ViChar array to serve as a buffer for the value. You pass the number of bytes in the buffer as the **nameBufferSize**. If the current value of the attribute, including the terminating NULL byte, is larger than the size you indicate in the buffer size, the function copies (buffer size - 1) bytes into the buffer, places an ASCII NULL byte at the end of the buffer, and returns the buffer size you must pass to get the entire value. For example, if the value is "123456" and the buffer size is 4, the function places "123" into the buffer and returns 7. If you want to call this function just to get the required buffer size, you can pass 0 for **nameBufferSize** and VI_NULL for the name.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Index of pin to query. Pin indexes begin at 0.\n'
},
'name': 'pinIndex',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViChar array you specify for name.\n'
},
'name': 'nameBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'Returns the pin name at the specified **pinIndex**.\n'
},
'name': 'name',
'size': {
'mechanism': 'ivi-dance',
'value': 'nameBufferSize'
},
'type': 'ViChar[]'
}
],
'render_in_session_base': True,
'returns': 'ViStatus'
},
'GetPinResultsPinInformation': {
'codegen_method': 'private',
'documentation': {
'description': 'Returns the pin names, site numbers, and channel names that correspond to per-pin data read from the digital pattern instrument. The function returns pin information in the same order as values read using the niDigital_ReadStatic function, niDigital_PPMU_Measure function, and niDigital_GetFailCount function. Use this function to match values the previously listed functions return with pins, sites, and instrument channels.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the arrays you specify for **pinIndexes**, **siteNumbers**, and **channelIndexes**, if they are not NULL. To determine the size of the buffer to allocate for the arrays, pass a value of 0 to the **bufferSize** parameter and a value of VI_NULL to the array parameters. In this case, the value returned by the **actualNumValues** parameter is the size of the arrays necessary to hold the output values.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned index of the pins corresponding to data read from the digital pattern instrument using the specified repeated capabilities. If you do not want to use this parameter, pass VI_NULL.\nCall niDigital_GetPinName to get the name of the pin associated with an index.\n'
},
'name': 'pinIndexes',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'bufferSize',
'value_twist': 'actualNumValues'
},
'type': 'ViInt32[]'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned site numbers that correspond to data read from the digital pattern instrument using the specified repeated capabilities. If you do not want to use this parameter, pass VI_NULL.\n'
},
'name': 'siteNumbers',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'bufferSize',
'value_twist': 'actualNumValues'
},
'type': 'ViInt32[]'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned index of channels corresponding to data read from the digital pattern instrument using the specified repeated capabilities. If you do not want to use this parameter, pass VI_NULL.\nCall niDigital_GetChannelName to get the name of the channel associated with an index. Channel indexes are one-based.\n'
},
'name': 'channelIndexes',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'bufferSize',
'value_twist': 'actualNumValues'
},
'type': 'ViInt32[]'
},
{
'direction': 'out',
'documentation': {
'description': 'The number of values written to the output arrays. This function always writes the same number of values to all output arrays, if they are not set to VI_NULL.\n'
},
'name': 'actualNumValues',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'GetSitePassFail': {
'codegen_method': 'private',
'documentation': {
'description': 'Returns the pass or fail results for each site.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'A comma-delimited list of strings in the form of site\\ ``N``, where ``N`` is the site number. If you specify an empty string, the function returns pass or fail results for all sites.\n'
},
'name': 'siteList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViBoolean array you specify for **passFail**. To determine the size of the buffer to allocate for the **passFail** array, pass a value of 0 to the **passFailBufferSize** parameter and a value of VI_NULL to the **passFail** parameter. In this case, the value returned by the **actualNumSites** parameter is the size of the array necessary to hold the pass/fail values.\n'
},
'name': 'passFailBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned array of pass (VI_TRUE) and fail results for the sites you specify in the repeated capabilities. If sites span multiple digital pattern instruments, you must use an AND operator for the partial results for those sites returned by each instrument. If a site is disabled or not enabled for burst, the function does not return data for that site. Use the niDigital_SortSiteResultsViBoolean function to order and combine the data to match the repeated capabilities. You can also use the niDigital_GetSiteResultsSiteNumbers function to determine the order of the sites returned from this function call so that you can match the pass array with site numbers.\n'
},
'name': 'passFail',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'passFailBufferSize',
'value_twist': 'actualNumSites'
},
'type': 'ViBoolean[]'
},
{
'direction': 'out',
'documentation': {
'description': 'Number of values written to the **passFailBufferSize** and **passFail** arrays.\n'
},
'name': 'actualNumSites',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'GetSiteResultsSiteNumbers': {
'codegen_method': 'private',
'documentation': {
'description': 'Returns the site numbers that correspond to per-site data read from the digital pattern instrument. The function returns site numbers in the same order as values read using the niDigital_GetSitePassFail and niDigital_FetchCaptureWaveformU32 functions. Use this function to match values the previously listed functions return with site numbers.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Site numbers listed as a comma-delimited list of strings of form site\\ ``N``, where ``N`` is the site number.\n'
},
'name': 'siteList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The type of data specified in the results array.\n\n- NIDIGITAL_VAL_PASS_FAIL: Get site numbers for pass/fail data.\n- NIDIGITAL_VAL_CAPTURE_WAVEFORM: Get site numbers for capture waveforms.\n'
},
'enum': 'SiteResultType',
'name': 'siteResultType',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViInt32 array you specify for **siteNumbers**. To determine the size of the buffer to allocate for the **siteNumbers** array, pass a value of 0 to the **siteNumbersBufferSize** parameter and a value of VI_NULL to the **siteNumbers** parameter. In this case, the value returned by the **actualNumSiteNumbers** parameter is the size of the array necessary to hold the site numbers.\n'
},
'name': 'siteNumbersBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned array of site numbers that correspond to the values specified by **siteResultType**.\n'
},
'name': 'siteNumbers',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'siteNumbersBufferSize',
'value_twist': 'actualNumSiteNumbers'
},
'type': 'ViInt32[]'
},
{
'direction': 'out',
'documentation': {
'description': 'Number of sites written in the **siteNumbers** array.\n'
},
'name': 'actualNumSiteNumbers',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'GetTimeSetDriveFormat': {
'documentation': {
'description': 'Returns the drive format of a pin in the specified time set.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified digital pattern instrument handle\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'Name of the specified pin.\n'
},
'name': 'pin',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'out',
'documentation': {
'description': 'Returned drive format of the time set for the specified pin.\n'
},
'enum': 'DriveFormat',
'name': 'format',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'GetTimeSetEdge': {
'documentation': {
'description': 'Returns the edge time of a pin in the specified time set.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified digital pattern instrument handle\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'Name of the specified pin.\n'
},
'name': 'pin',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Name of the edge.\n\n- NIDIGITAL_VAL_DRIVE_ON\n- NIDIGITAL_VAL_DRIVE_DATA\n- NIDIGITAL_VAL_DRIVE_RETURN\n- NIDIGITAL_VAL_DRIVE_OFF\n- NIDIGITAL_VAL_COMPARE_STROBE\n- NIDIGITAL_VAL_DRIVE_DATA2\n- NIDIGITAL_VAL_DRIVE_RETURN2\n- NIDIGITAL_VAL_COMPARE_STROBE2\n'
},
'enum': 'TimeSetEdgeType',
'name': 'edge',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'Time from the beginning of the vector period in which to place the edge.\n'
},
'name': 'time',
'python_api_converter_name': 'convert_seconds_real64_to_timedelta',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta'
}
],
'returns': 'ViStatus'
},
'GetTimeSetEdgeMultiplier': {
'documentation': {
'description': 'Returns the edge multiplier of the specified time set.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'pins',
'documentation': {
'description': 'Name of the specified pin.\n'
},
'name': 'pin',
'type': 'ViConstString'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'out',
'documentation': {
'description': 'Returned edge multiplier of the time set for the specified pin.\n'
},
'name': 'edgeMultiplier',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'GetTimeSetName': {
'codegen_method': 'private',
'documentation': {
'description': 'TBD'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'timeSetIndex',
'type': 'ViInt32'
},
{
'direction': 'in',
'name': 'nameBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'name': 'name',
'size': {
'mechanism': 'ivi-dance',
'value': 'nameBufferSize'
},
'type': 'ViChar[]'
}
],
'render_in_session_base': True, # Called from FancyFetchHistoryRAMCycleInformation() which uses rep cap
'returns': 'ViStatus'
},
'GetTimeSetPeriod': {
'documentation': {
'description': 'Returns the period of the specified time set.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified digital pattern instrument handle\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'timeSetName',
'documentation': {
'description': 'The specified time set name.\n'
},
'type': 'ViConstString'
},
{
'direction': 'out',
'documentation': {
'description': 'Returned period, in seconds, that the edge is configured to.\n'
},
'name': 'period',
'python_api_converter_name': 'convert_seconds_real64_to_timedelta',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta'
}
],
'returns': 'ViStatus'
},
'InitWithOptions': {
'codegen_method': 'private',
'documentation': {
'description': 'Creates and returns a new session to the specified digital pattern instrument to use in all subsequent function calls. To place the instrument in a known startup state when creating a new session, set the reset parameter to VI_TRUE, which is equivalent to calling the niDigital_reset function immediately after initializing the session.\n'
},
'method_name_for_documentation': '__init__',
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified resource name shown in Measurement & Automation Explorer (MAX) for a digital pattern instrument, for example, PXI1Slot3, where PXI1Slot3 is an instrument resource name. **resourceName** can also be a logical IVI name. This parameter accepts a comma-delimited list of strings in the form PXI1Slot2,PXI1Slot3, where ``PXI1Slot2`` is one instrument resource name and ``PXI1Slot3`` is another. When including more than one digital pattern instrument in the comma-delimited list of strings, list the instruments in the same order they appear in the pin map.\n\n+--------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+\n| |Note| | Note\xa0\xa0 You only can specify multiple instruments of the same model. For example, you can list two PXIe-6570s but not a PXIe-6570 and PXIe-6571. The instruments must be in the same chassis. |\n+--------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+\n\n.. |Note| image:: note.gif\n',
'note': '\n'
},
'name': 'resourceName',
'type': 'ViConstString'
},
{
'default_value': False,
'direction': 'in',
'documentation': {
'description': 'A Boolean that verifies that the digital pattern instrument you initialize is supported by NI-Digital. NI-Digital automatically performs this query, so setting this parameter is not necessary.\n'
},
'name': 'idQuery',
'type': 'ViBoolean',
'use_in_python_api': False
},
{
'default_value': False,
'direction': 'in',
'documentation': {
'description': 'A Boolean that specifies whether to reset a digital pattern instrument to a known state when the session is initialized. Setting the **resetDevice** value to VI_TRUE is equivalent to calling the niDigital_reset function immediately after initializing the session.\n'
},
'name': 'resetDevice',
'type': 'ViBoolean'
},
{
'default_value': '""',
'direction': 'in',
'documentation': {
'description': 'The initial values of certain properties for the NI-Digital Pattern Driver session. The string can be empty. You can use the DriverSetup flag to simulate a digital pattern instrument. When simulating a digital pattern instrument, you must specify the model you want to simulate. For example, Simulate = 1, DriverSetup = Model:6570.\n'
},
'name': 'optionString',
'python_api_converter_name': 'convert_init_with_options_dictionary',
'type': 'ViConstString',
'type_in_documentation': 'dict'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned instrument session.\n'
},
'name': 'newVi',
'type': 'ViSession'
}
],
'returns': 'ViStatus',
'use_session_lock': False
},
'Initiate': {
'codegen_method': 'private',
'documentation': {
'description': 'Starts bursting the pattern configured by NIDIGITAL_ATTR_START_LABEL, causing the NI-Digital session to be committed. To stop the pattern burst, call niDigital_Abort. If keep alive pattern is bursting when niDigital_Abort is called or upon exiting the context manager, keep alive pattern will not be stopped. To stop the keep alive pattern, call niDigital_AbortKeepAlive.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'IsDone': {
'documentation': {
'description': 'Checks the hardware to determine if the pattern burst has completed or if any errors have occurred.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': 'A Boolean that indicates whether the pattern burst completed.\n'
},
'name': 'done',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'IsSiteEnabled': {
'documentation': {
'description': 'Checks if a specified site is enabled.\n',
'note': 'The function returns an error if more than one site is specified.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Site specified as a string in the form of ``siteN``, where ``N`` is the site number. The function returns an error if more than one site is specified.\n'
},
'name': 'site',
'type': 'ViConstString'
},
{
'direction': 'out',
'documentation': {
'description': 'Boolean value that returns whether the site is enabled or disabled.\n'
},
'name': 'enable',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'LoadLevels': {
'codegen_method': 'private',
'documentation': {
'description': 'Loads a levels sheet from a specified file.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'filePath',
'documentation': {
'description': 'Absolute file path to the specified levels sheet file.\n'
},
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'LoadPattern': {
'documentation': {
'description': 'Loads the specified pattern file.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Absolute file path of the binary .digipat pattern file to load. Specify the pattern to burst using NIDIGITAL_ATTR_START_LABEL or the start_label parameter of the niDigital_FancyBurstPattern function.\n'
},
'name': 'filePath',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'LoadPinMap': {
'documentation': {
'description': 'Loads a pin map file. You can load only a single pin and channel map file during an NI-Digital Pattern Driver session. To switch pin maps, create a new session or call the niDigital_reset function.\n'
},
'parameters': [
{
'direction': 'in',
'name': 'vi',
'type': 'ViSession',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
},
{
'direction': 'in',
'name': 'filePath',
'documentation': {
'description': 'Absolute file path to a pin map file created with the Digital Pattern Editor or the NI TestStand Semiconductor Module.\n'
},
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'LoadSpecifications': {
'codegen_method': 'private',
'documentation': {
'description': 'Loads a specifications sheet from a specified file.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'filePath',
'documentation': {
'description': 'Absolute file path to a specifications file.\n'
},
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'LoadTiming': {
'codegen_method': 'private',
'documentation': {
'description': 'Loads a timing sheet from a specified file.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'filePath',
'documentation': {
'description': 'Absolute file path to the specified timing sheet file.\n'
},
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'LockSession': {
'documentation': {
'description': 'Obtains the multithreaded lock on the instrument session. Before doing so, the function waits until all other execution threads have released the lock on the instrument session. Other threads might have obtained the lock on this session in the following ways: niDigital_LockSession After the call to niDigital_LockSession returns successfully, no other threads can access the instrument session until you call niDigital_UnlockSession. Use niDigital_LockSession and niDigital_UnlockSession around a sequence of calls to instrument driver functions if you require exclusive access through the end of the sequence. You can safely make nested calls to niDigital_LockSession within the same thread. To completely unlock the session, you must balance each call to niDigital_LockSession with a call to niDigital_UnlockSession. If, however, you use the **callerHasLock** parameter in all calls to niDigital_LockSession and niDigital_UnlockSession within a function, the IVI Library locks the session only once within the function, regardless of the number of calls you make to niDigital_LockSession. This functionality allows you to call niDigital_UnlockSession just once at the end of the function.\n'
},
'method_templates': [
{
'documentation_filename': 'lock',
'method_python_name_suffix': '',
'session_filename': 'lock'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': 'This parameter serves as a convenience. If you do not want to use this parameter, pass VI_NULL. You can use this parameter in complex functions to track lock status and the need to unlock the session. Pass the address of a local ViBoolean variable in the declaration of the local variable and initialize it to VI_FALSE. Also, pass the address of the same local variable to any other calls you make to niDigital_LockSession or niDigital_UnlockSession in the same function.\n'
},
'name': 'callerHasLock',
'type': 'ViBoolean'
}
],
'python_name': 'lock',
'render_in_session_base': True,
'returns': 'ViStatus',
'use_session_lock': False
},
'PPMU_Measure': {
'documentation': {
'description': 'Instructs the PPMU to measure voltage or current. This function can be called to take a voltage measurement even if the pin function is not set to PPMU.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Parameter that specifies whether the PPMU measures voltage or current from the DUT.\n\n- NIDIGITAL_VAL_MEASURE_CURRENT: The PPMU measures current from the DUT.\n- NIDIGITAL_VAL_MEASURE_VOLTAGE: The PPMU measures voltage from the DUT.\n'
},
'enum': 'PPMUMeasurementType',
'name': 'measurementType',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViReal64 array you specify for measurements. To determine the size of the buffer to allocate for the measurements array, pass a value of 0 to the **bufferSize** parameter and a value of VI_NULL to the **measurements** parameter. In this case, the value returned by the **actualNumRead** parameter is the size of the array necessary to hold the measurements.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned array of measurements in the order you specify in the repeated capabilities. If a site is disabled, the function does not return data for that site. You can also use the niDigital_FancyGetPinResultsPinInformation function to obtain a sorted list of returned sites and channels.\n'
},
'name': 'measurements',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'bufferSize',
'value_twist': 'actualNumRead'
},
'type': 'ViReal64[]'
},
{
'direction': 'out',
'documentation': {
'description': 'Number of measurements written to the measurements array.\n'
},
'name': 'actualNumRead',
'type': 'ViInt32'
}
],
'python_name': 'ppmu_measure',
'returns': 'ViStatus'
},
'PPMU_Source': {
'documentation': {
'description': 'Starts sourcing voltage or current from the PPMU. This function automatically selects the PPMU function. Changes to PPMU source settings do not take effect until you call this function. If you modify source settings after you call this function, you must call this function again for changes in the configuration to take effect.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
}
],
'python_name': 'ppmu_source',
'returns': 'ViStatus'
},
'ReadSequencerFlag': {
'documentation': {
'description': 'Reads the state of a pattern sequencer flag. Use pattern sequencer flags to coordinate execution between the pattern sequencer and a runtime test program.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The pattern sequencer flag you want to read.\n\n- NIDIGITAL_VAL_SEQUENCER_FLAG0 ("seqflag0"): Reads pattern sequencer flag 0.\n- NIDIGITAL_VAL_SEQUENCER_FLAG1 ("seqflag1"): Reads pattern sequencer flag 1.\n- NIDIGITAL_VAL_SEQUENCER_FLAG2 ("seqflag2"): Reads pattern sequencer flag 2.\n- NIDIGITAL_VAL_SEQUENCER_FLAG3 ("seqflag3"): Reads pattern sequencer flag 3.\n'
},
'enum': 'SequencerFlag',
'name': 'flag',
'type': 'ViConstString'
},
{
'direction': 'out',
'documentation': {
'description': 'A Boolean that indicates the state of the pattern sequencer flag you specify.\n'
},
'name': 'value',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'ReadSequencerRegister': {
'documentation': {
'description': 'Reads the value of a pattern sequencer register. Use pattern sequencer registers to pass numeric values between the pattern sequencer and a runtime test program. For example, you can use this function to read a register modified by the write_reg opcode during a pattern burst.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The sequencer register to read from.\n\n- NIDIGITAL_VAL_SEQUENCER_REGISTER0 ("reg0"): Reads sequencer register 0.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER1 ("reg1"): Reads sequencer register 1.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER2 ("reg2"): Reads sequencer register 2.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER3 ("reg3"): Reads sequencer register 3.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER4 ("reg4"): Reads sequencer register 4.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER5 ("reg5"): Reads sequencer register 5.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER6 ("reg6"): Reads sequencer register 6.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER7 ("reg7"): Reads sequencer register 7.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER8 ("reg8"): Reads sequencer register 8.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER9 ("reg9"): Reads sequencer register 9.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER10 ("reg10"): Reads sequencer register 10.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER11 ("reg11"): Reads sequencer register 11.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER12 ("reg12"): Reads sequencer register 12.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER13 ("reg13"): Reads sequencer register 13.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER14 ("reg14"): Reads sequencer register 14.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER15 ("reg15"): Reads sequencer register 15.\n'
},
'enum': 'SequencerRegister',
'name': 'reg',
'type': 'ViConstString'
},
{
'direction': 'out',
'documentation': {
'description': 'Value read from the sequencer register.\n'
},
'name': 'value',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ReadStatic': {
'documentation': {
'description': 'Reads the current state of comparators for pins you specify in the repeated capabilities. If there are uncommitted changes to levels or the termination mode, this function commits the changes to the pins.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViUInt8 array you specify for data. To determine the size of the buffer to allocate for the data array, pass a value of 0 to the **bufferSize** parameter and a value of VI_NULL to the **data** parameter. In this case, the value returned by the **actualNumRead** parameter is the size of the array necessary to hold the data.\n'
},
'name': 'bufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned array of pin states read from the channels in the repeated capabilities. Data is returned in the order you specify in the repeated capabilities. If a site is disabled, the function does not return data for that site. You can also use the niDigital_FancyGetPinResultsPinInformation function to obtain a sorted list of returned sites and channels.\n\n- NIDIGITAL_VAL_L: The comparators read a logic low pin state.\n- NIDIGITAL_VAL_H: The comparators read a logic high pin state.\n- NIDIGITAL_VAL_M: The comparators read a midband pin state.\n- NIDIGITAL_VAL_V: The comparators read a value that is above VOH and below VOL, which can occur when you set VOL higher than VOH.\n'
},
'enum': 'PinState',
'name': 'data',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'bufferSize',
'value_twist': 'actualNumRead'
},
'type': 'ViUInt8[]'
},
{
'direction': 'out',
'documentation': {
'description': 'The number of values written to the data array.\n'
},
'name': 'actualNumRead',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'ResetAttribute': {
'codegen_method': 'no',
'documentation': {
'description': 'Resets the attribute to its default value.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Pass the ID of an attribute.\n'
},
'name': 'attributeId',
'type': 'ViAttr'
}
],
'returns': 'ViStatus'
},
'ResetDevice': {
'documentation': {
'description': 'Returns a digital pattern instrument to a known state. This function performs the following actions:\n\n- Aborts pattern execution.\n- Clears pin maps, time sets, source and capture waveforms, and patterns.\n- Resets all properties to default values, including the NIDIGITAL_ATTR_SELECTED_FUNCTION property that is set to NIDIGITAL_VAL_DISCONNECT, causing the I/O switches to open.\n- Stops export of all external signals and events.\n- Clears over-temperature and over-power conditions.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'SelfCalibrate': {
'documentation': {
'description': 'Performs self-calibration on a digital pattern instrument.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'SendSoftwareEdgeTrigger': {
'documentation': {
'description': 'Forces a particular edge-based trigger to occur regardless of how the specified trigger is configured. You can use this function as a software override.\n'
},
'parameters': [
{
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'direction': 'in',
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Trigger specifies the trigger you want to override.',
'table_body': [
[
'NIDIGITAL_VAL_START_TRIGGER',
'Overrides the Start trigger. You must specify an empty string in the trigger_identifier parameter.'
],
[
'NIDIGITAL_VAL_CONDITIONAL_JUMP_TRIGGER',
'Specifies to route a conditional jump trigger. You must specify a conditional jump trigger in the trigger_identifier parameter.'
],
],
'table_header': [
'Defined Values',
],
},
'enum': 'SoftwareTrigger',
'name': 'trigger',
'type': 'ViInt32'
},
{
'documentation': {
'description': """Trigger Identifier specifies the instance of the trigger you want to override.
If trigger is specified as NIDIGITAL_VAL_START_TRIGGER, this parameter must be an empty string. If trigger is
specified as NIDIGITAL_VAL_CONDITIONAL_JUMP_TRIGGER, allowed values are conditionalJumpTrigger0,
conditionalJumpTrigger1, conditionalJumpTrigger2, and conditionalJumpTrigger3.
"""
},
'direction': 'in',
'name': 'triggerIdentifier',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'SetAttributeViBoolean': {
'codegen_method': 'private',
'documentation': {
'description': 'Sets the value of a ViBoolean attribute. Use this function to set the values of digital pattern instrument-specific attributes and inherent IVI attributes.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.\n'
},
'name': 'attribute',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': 'The value to which you want to set the attribute; some of the values might not be valid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'SetAttributeViInt32': {
'codegen_method': 'private',
'documentation': {
'description': 'Sets the value of a ViInt32 attribute. Use this function to set the values of digital pattern instrument-specific attributes and inherent IVI attributes.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.\n'
},
'name': 'attribute',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': 'The value to which you want to set the attribute; some of the values might not be valid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'SetAttributeViInt64': {
'codegen_method': 'private',
'documentation': {
'description': 'Sets the value of a ViInt64 attribute. Use this function to set the values of digital pattern instrument-specific attributes and inherent IVI attributes.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.\n'
},
'name': 'attribute',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': 'The value to which you want to set the attribute; some of the values might not be valid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViInt64'
}
],
'returns': 'ViStatus'
},
'SetAttributeViReal64': {
'codegen_method': 'private',
'documentation': {
'description': 'Sets the value of a ViIntReal64 attribute. Use this function to set the values of digital pattern instrument-specific attributes and inherent IVI attributes.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.\n'
},
'name': 'attribute',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': 'The value to which you want to set the attribute; some of the values might not be valid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'SetAttributeViString': {
'codegen_method': 'private',
'documentation': {
'description': 'Sets the value of a ViString attribute. Use this function to set the values of digital pattern instrument-specific attributes and inherent IVI attributes.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'channelName',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The ID of an attribute.\n'
},
'name': 'attribute',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': 'The value to which you want to set the attribute; some of the values might not be valid depending on the current settings of the instrument session.\n'
},
'name': 'value',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'TDR': {
'documentation': {
'description': 'Measures propagation delays through cables, connectors, and load boards using Time-Domain Reflectometry (TDR). Ensure that the channels and pins you select are connected to an open circuit.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'default_value': True,
'direction': 'in',
'documentation': {
'description': 'A Boolean that specifies whether to apply the measured TDR offsets. If you need to adjust the measured offsets prior to applying, set this input to VI_FALSE, and call the niDigital_ApplyTDROffsets function to specify the adjusted TDR offsets values.\n'
},
'name': 'applyOffsets',
'type': 'ViBoolean'
},
{
'direction': 'in',
'documentation': {
'description': 'The number of elements in the ViReal64 array you specify for offsets. To determine the size of the buffer to allocate for the offsets array, pass a value of 0 to the **offsetsBufferSize** parameter and a value of VI_NULL to the **offsets** parameter. In this case, the value returned by the **actualNumOffsets** parameter is the size of the array necessary to hold the TDR offsets.\n'
},
'name': 'offsetsBufferSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'Measured TDR offsets specified in seconds.\n'
},
'name': 'offsets',
'python_api_converter_name': 'convert_seconds_real64_to_timedeltas',
'size': {
'mechanism': 'ivi-dance-with-a-twist',
'value': 'offsetsBufferSize',
'value_twist': 'actualNumOffsets'
},
'type': 'ViReal64[]',
'type_in_documentation': 'list of hightime.timedelta'
},
{
'direction': 'out',
'documentation': {
'description': 'Number of offsets written to the offsets array.\n'
},
'name': 'actualNumOffsets',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'UnloadAllPatterns': {
'documentation': {
'description': 'Unloads all patterns, source waveforms, and capture waveforms from a digital pattern instrument.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'default_value': False,
'direction': 'in',
'documentation': {
'description': 'A Boolean that specifies whether to keep or unload the keep alive pattern.\n'
},
'name': 'unloadKeepAlivePattern',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'UnloadSpecifications': {
'codegen_method': 'private',
'documentation': {
'description': 'Unloads the given specifications sheet present in the previously loaded specifications file that you select. You must call the niDigital_LoadSpecifications function to reload the file with updated specifications values. You must then call the niDigital_ApplyLevelsAndTiming function in order to apply the levels and timing values that reference the updated specifications values.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'name': 'filePath',
'documentation': {
'description': 'Absolute file path to a loaded specifications file.\n'
},
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'UnlockSession': {
'documentation': {
'description': 'Releases a lock that you acquired on an instrument session using the niDigital_LockSession function.\n'
},
'method_templates': [
{
'documentation_filename': 'unlock',
'method_python_name_suffix': '',
'session_filename': 'unlock'
}
],
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': 'This parameter serves as a convenience. If you do not want to use this parameter, pass VI_NULL. You can use this parameter in complex functions to track lock status and the need to unlock the session. Pass the address of a local ViBoolean variable in the declaration of the local variable and initialize it to VI_FALSE. Also, pass the address of the same local variable to any other calls you make to niDigital_LockSession or niDigital_UnlockSession in the same function.\n'
},
'name': 'callerHasLock',
'type': 'ViBoolean'
}
],
'python_name': 'unlock',
'render_in_session_base': True,
'returns': 'ViStatus',
'use_session_lock': False
},
'WaitUntilDone': {
'documentation': {
'description': 'Waits until the pattern burst has completed or the timeout has expired.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'default_value': 'hightime.timedelta(seconds=10.0)',
'direction': 'in',
'documentation': {
'description': 'Maximum time (in seconds) allowed for this function to complete. If this function does not complete within this time interval, this function returns an error.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'WriteSequencerFlag': {
'documentation': {
'description': 'Writes the state of a pattern sequencer flag. Use pattern sequencer flags to coordinate execution between the pattern sequencer and a runtime test program.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The pattern sequencer flag to write.\n\n- NIDIGITAL_VAL_SEQUENCER_FLAG0 ("seqflag0"): Writes pattern sequencer flag 0.\n- NIDIGITAL_VAL_SEQUENCER_FLAG1 ("seqflag1"): Writes pattern sequencer flag 1.\n- NIDIGITAL_VAL_SEQUENCER_FLAG2 ("seqflag2"): Writes pattern sequencer flag 2.\n- NIDIGITAL_VAL_SEQUENCER_FLAG3 ("seqflag3"): Writes pattern sequencer flag 3.\n'
},
'enum': 'SequencerFlag',
'name': 'flag',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'A Boolean that assigns a state to the pattern sequencer flag you specify.\n'
},
'name': 'value',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'WriteSequencerRegister': {
'documentation': {
'description': 'Writes a value to a pattern sequencer register. Use pattern sequencer registers to pass numeric values between the pattern sequencer and a runtime test program.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The sequencer register you want to write to.\n\n- NIDIGITAL_VAL_SEQUENCER_REGISTER0 ("reg0"): Writes sequencer register 0.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER1 ("reg1"): Writes sequencer register 1.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER2 ("reg2"): Writes sequencer register 2.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER3 ("reg3"): Writes sequencer register 3.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER4 ("reg4"): Writes sequencer register 4.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER5 ("reg5"): Writes sequencer register 5.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER6 ("reg6"): Writes sequencer register 6.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER7 ("reg7"): Writes sequencer register 7.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER8 ("reg8"): Writes sequencer register 8.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER9 ("reg9"): Writes sequencer register 9.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER10 ("reg10"): Writes sequencer register 10.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER11 ("reg11"): Writes sequencer register 11.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER12 ("reg12"): Writes sequencer register 12.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER13 ("reg13"): Writes sequencer register 13.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER14 ("reg14"): Writes sequencer register 14.\n- NIDIGITAL_VAL_SEQUENCER_REGISTER15 ("reg15"): Writes sequencer register 15.\n'
},
'enum': 'SequencerRegister',
'name': 'reg',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The value you want to write to the register.\n'
},
'name': 'value',
'type': 'ViInt32'
}
],
'returns': 'ViStatus'
},
'WriteSourceWaveformBroadcastU32': {
'documentation': {
'description': 'Writes the same waveform data to all sites. Use this write function if you set the data_mapping parameter of the create source waveform function to NIDIGITAL_VAL_BROADCAST.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The name to assign to the waveform. Use the waveform_name with source_start opcode in your pattern.\n'
},
'name': 'waveformName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Size of the data array.\n'
},
'name': 'waveformSize',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': '1D array of samples to use as source data to apply to all sites.\n'
},
'name': 'waveformData',
'size': {
'mechanism': 'len',
'value': 'waveformSize'
},
'type': 'ViUInt32[]'
}
],
'python_name': 'write_source_waveform_broadcast',
'returns': 'ViStatus'
},
'WriteSourceWaveformDataFromFileTDMS': {
'documentation': {
'description': 'Writes a source waveform based on the waveform data and configuration information the file contains.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The name to assign to the waveform. Use the waveform_name with source_start opcode in your pattern.\n'
},
'name': 'waveformName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Absolute file path to the load source waveform file (.tdms).\n'
},
'name': 'waveformFilePath',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'WriteSourceWaveformSiteUniqueU32': {
'codegen_method': 'private',
'documentation': {
'description': 'Writes one waveform per site. Use this write function if you set the parameter of the create source waveform function to Site Unique.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'is_repeated_capability': True,
'repeated_capability_type': 'sites',
'documentation': {
'description': 'Site numbers listed as a comma-delimited list of strings of form site\\ ``N``, where ``N`` is the site number.\n'
},
'name': 'siteList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'The name to assign to the waveform. Use the waveform_name with source_start opcode in your pattern.\n'
},
'name': 'waveformName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Number of waveforms.\n'
},
'name': 'numWaveforms',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'Number of samples per waveform.\n'
},
'name': 'samplesPerWaveform',
'type': 'ViInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'An array of samples to use as source data. Data for each site must be appended sequentially in the array (non-interleaved).\n'
},
'name': 'waveformData',
'size': {
'mechanism': 'fixed',
'value': 1
},
'type': 'ViUInt32[]',
'use_array': True
}
],
'returns': 'ViStatus'
},
'WriteStatic': {
'documentation': {
'description': 'Writes a static state to the specified pins. The selected pins remain in the specified state until the next pattern burst or call to this function. If there are uncommitted changes to levels or the termination mode, this function commits the changes to the pins. This function does not change the selected pin function. If you write a static state to a pin that does not have the Digital function selected, the new static state is stored by the instrument, and affects the state of the pin the next time you change the selected function to Digital.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'List of channel names or list of pins. Do not pass a mix of channel names and pin names. An empty string denotes all digital pattern instrument channels.\n\nPin names and pin groups apply to all enabled sites, unless the pin name explicitly specifies the site. You can specify a pin in a specific site using the form site\\ ``N``/pinName\\ ````, where ``N`` is the site number. This function ignores pins that are not mapped to the digital pattern instrument.\n\nSpecify channel names using the form ``PXI1Slot3``/``0``,\\ ``2-3`` or ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``, where ``PXI1Slot3`` is the instrument resource name and ``0``, ``2``, ``3`` are channel names. To specify channels from multiple instruments, use the form ``PXI1Slot3``/``0``,\\ ``PXI1Slot3``/``2-3``,\\ ``PXI1Slot4``/``2-3``. The instruments must be in the same chassis.\n'
},
'name': 'channelList',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': 'Parameter that specifies one of the following digital states to assign to the pin.\n\n- NIDIGITAL_VAL_0: Specifies to drive low.\n- NIDIGITAL_VAL_1: Specifies to drive high.\n- NIDIGITAL_VAL_X: Specifies to not drive.\n'
},
'enum': 'WriteStaticPinState',
'name': 'state',
'type': 'ViUInt8'
}
],
'returns': 'ViStatus'
},
'close': {
'codegen_method': 'private',
'documentation': {
'description': 'Closes the specified instrument session to a digital pattern instrument, aborts pattern execution, and unloads pattern memory. The channels on a digital pattern instrument remain in their current state.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'python_name': '_close',
'returns': 'ViStatus',
'use_session_lock': False
},
'error_message': {
'codegen_method': 'private',
'documentation': {
'description': 'Takes the error code returned by the digital pattern instrument driver functions, interprets it, and returns it as a user readable string.\n'
},
'is_error_handling': True,
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns. You may also specify VI_NULL as the instrument session to retrieve the error message even when the niDigital_init function or the niDigital_InitWithOptions function fails.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'The specified error code.\n'
},
'name': 'errorCode',
'type': 'ViStatus'
},
{
'direction': 'out',
'documentation': {
'description': 'The error information formatted as a string. The array must contain at least 256 characters.\n'
},
'name': 'errorMessage',
'size': {
'mechanism': 'fixed',
'value': 256
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus',
'use_session_lock': False
},
'reset': {
'documentation': {
'description': 'Returns a digital pattern instrument to a known state. This function performs the following actions:\n\n- Aborts pattern execution.\n- Clears pin maps, time sets, source and capture waveforms, and patterns.\n- Resets all properties to default values, including the NIDIGITAL_ATTR_SELECTED_FUNCTION property that is set to NIDIGITAL_VAL_DISCONNECT, causing the I/O switches to open.\n- Stops exporting all external signals and events.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'self_test': {
'codegen_method': 'private',
'documentation': {
'description': 'Returns self test results from a digital pattern instrument. This test requires several minutes to execute.\n'
},
'method_name_for_documentation': 'self_test',
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'The specified instrument session the niDigital_init or niDigital_InitWithOptions function returns.\n'
},
'name': 'vi',
'type': 'ViSession'
},
{
'direction': 'out',
'documentation': {
'description': 'A parameter that indicates if the self test passed (0) or failed (!=0).\n'
},
'name': 'testResult',
'type': 'ViInt16'
},
{
'direction': 'out',
'documentation': {
'description': 'The returned self test status message. The array must contain at least 256 characters.\n'
},
'name': 'testMessage',
'size': {
'mechanism': 'fixed',
'value': 2048
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
}
}
| 51.694472
| 1,351
| 0.531528
| 20,088
| 209,466
| 5.482079
| 0.052618
| 0.097853
| 0.051433
| 0.075006
| 0.788765
| 0.759998
| 0.739603
| 0.719979
| 0.697822
| 0.672751
| 0
| 0.010231
| 0.363061
| 209,466
| 4,051
| 1,352
| 51.707233
| 0.815158
| 0.001375
| 0
| 0.550445
| 1
| 0.050445
| 0.624816
| 0.084428
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.01731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c9c4a89c5a0361bbf437d6d840a851e027eedd3
| 90,812
|
py
|
Python
|
tests/test_scripts.py
|
jessicamizzi/khmer
|
997009400508218475cd3faa7cfa5d904f1cd8b1
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_scripts.py
|
jessicamizzi/khmer
|
997009400508218475cd3faa7cfa5d904f1cd8b1
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_scripts.py
|
jessicamizzi/khmer
|
997009400508218475cd3faa7cfa5d904f1cd8b1
|
[
"BSD-3-Clause"
] | null | null | null |
#
# This file is part of khmer, http://github.com/ged-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2015. It is licensed under
# the three-clause BSD license; see doc/LICENSE.txt.
# Contact: khmer-project@idyll.org
#
# pylint: disable=C0111,C0103,E1103,W0612
import json
import sys
import os
import shutil
from cStringIO import StringIO
import traceback
from nose.plugins.attrib import attr
import subprocess
import threading
import bz2
import io
import khmer_tst_utils as utils
import khmer
import khmer.kfile
import screed
def scriptpath(script):
return script
def teardown():
utils.cleanup()
def test_check_space():
# @CTB this probably belongs in a new test file, along with other
# tests of the file.py module.
khmer.kfile.check_space(
['', utils.get_test_data('test-abund-read-2.fa')], False)
def test_load_into_counting():
script = scriptpath('load-into-counting.py')
args = ['-x', '1e3', '-N', '2', '-k', '20', '-t']
outfile = utils.get_temp_filename('out.ct')
infile = utils.get_test_data('test-abund-read-2.fa')
args.extend([outfile, infile])
(status, out, err) = utils.runscript(script, args)
assert 'Total number of unique k-mers: 89' in err, err
assert os.path.exists(outfile)
@attr('linux')
def test_load_into_counting_toobig():
script = scriptpath('load-into-counting.py')
args = ['-x', '1e12', '-N', '2', '-k', '20', '-t', '--force']
outfile = utils.get_temp_filename('out.kh')
infile = utils.get_test_data('test-abund-read-2.fa')
args.extend([outfile, infile])
(status, out, err) = utils.runscript(script, args, fail_ok=True)
assert status == -1, status
assert "MemoryError" in err, err
def test_load_into_counting_fail():
script = scriptpath('load-into-counting.py')
args = ['-x', '1e2', '-N', '2', '-k', '20'] # use small HT
outfile = utils.get_temp_filename('out.ct')
infile = utils.get_test_data('test-abund-read-2.fa')
args.extend([outfile, infile])
(status, out, err) = utils.runscript(script, args, fail_ok=True)
assert status == 1, status
assert "ERROR:" in err
def test_load_into_counting_multifile():
script = scriptpath('load-into-counting.py')
args = ['-x', '1e7', '-N', '2', '-k', '20', '-t']
outfile = utils.get_temp_filename('out.kh')
infile = utils.get_test_data('test-abund-read-2.fa')
args.extend([outfile, infile, infile, infile, infile, infile,
infile, infile, infile, infile, infile, infile])
(status, out, err) = utils.runscript(script, args)
assert 'Total number of unique k-mers: 95' in err, err
assert os.path.exists(outfile)
def test_load_into_counting_tsv():
script = scriptpath('load-into-counting.py')
args = ['-x', '1e7', '-N', '2', '-k', '20', '-t', '-s', 'tsv']
outfile = utils.get_temp_filename('out.ct')
tabfile = outfile + '.info.tsv'
infile = utils.get_test_data('test-abund-read-2.fa')
args.extend([outfile, infile])
(status, out, err) = utils.runscript(script, args)
assert 'Total number of unique k-mers: 95' in err, err
assert os.path.exists(outfile)
assert os.path.exists(tabfile)
with open(tabfile) as tabfh:
tabfile_lines = tabfh.readlines()
assert len(tabfile_lines) == 2
outbase = os.path.basename(outfile)
expected_tsv_line = '\t'.join([outbase, '0.000', '95', infile]) + '\n'
assert tabfile_lines[1] == expected_tsv_line, tabfile_lines
def test_load_into_counting_json():
script = scriptpath('load-into-counting.py')
args = ['-x', '1e7', '-N', '2', '-k', '20', '-t', '-s', 'json']
outfile = utils.get_temp_filename('out.ct')
jsonfile = outfile + '.info.json'
infile = utils.get_test_data('test-abund-read-2.fa')
args.extend([outfile, infile])
(status, out, err) = utils.runscript(script, args)
assert 'Total number of unique k-mers: 95' in err, err
assert os.path.exists(outfile)
assert os.path.exists(jsonfile)
with open(jsonfile) as jsonfh:
got_json = json.load(jsonfh)
outbase = os.path.basename(outfile)
expected_json = {
"files": [infile],
"ht_name": outbase,
"num_kmers": 95,
"fpr": 9.024965705097741e-11,
"mrinfo_version": "0.1.0",
}
assert got_json == expected_json, got_json
def test_load_into_counting_bad_summary_fmt():
script = scriptpath('load-into-counting.py')
args = ['-x', '1e7', '-N', '2', '-k', '20', '-s', 'badfmt']
outfile = utils.get_temp_filename('out.ct')
infile = utils.get_test_data('test-abund-read-2.fa')
args.extend([outfile, infile])
(status, out, err) = utils.runscript(script, args, fail_ok=True)
assert status != 0, status
assert "invalid choice: 'badfmt'" in err, err
def _make_counting(infilename, SIZE=1e7, N=2, K=20, BIGCOUNT=True):
script = scriptpath('load-into-counting.py')
args = ['-x', str(SIZE), '-N', str(N), '-k', str(K)]
if not BIGCOUNT:
args.append('-b')
outfile = utils.get_temp_filename('out.ct')
args.extend([outfile, infilename])
utils.runscript(script, args)
assert os.path.exists(outfile)
return outfile
def test_filter_abund_1():
script = scriptpath('filter-abund.py')
infile = utils.get_temp_filename('test.fa')
n_infile = utils.get_temp_filename('test-fastq-n-reads.fq')
in_dir = os.path.dirname(infile)
n_in_dir = os.path.dirname(n_infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
shutil.copyfile(utils.get_test_data('test-fastq-n-reads.fq'), n_infile)
counting_ht = _make_counting(infile, K=17)
n_counting_ht = _make_counting(n_infile, K=17)
args = [counting_ht, infile]
utils.runscript(script, args, in_dir)
outfile = infile + '.abundfilt'
n_outfile = n_infile + '.abundfilt'
n_outfile2 = n_infile + '2.abundfilt'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 1, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs
args = [n_counting_ht, n_infile]
utils.runscript(script, args, n_in_dir)
seqs = set([r.sequence for r in screed.open(n_infile)])
assert os.path.exists(n_outfile), n_outfile
args = [n_counting_ht, n_infile, '-o', n_outfile2]
utils.runscript(script, args, in_dir)
assert os.path.exists(n_outfile2), n_outfile2
def test_filter_abund_2():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
counting_ht = _make_counting(infile, K=17)
script = scriptpath('filter-abund.py')
args = ['-C', '1', counting_ht, infile, infile]
utils.runscript(script, args, in_dir)
outfile = infile + '.abundfilt'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs
# make sure that FASTQ records are retained.
def test_filter_abund_3_fq_retained():
infile = utils.get_temp_filename('test.fq')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fq'), infile)
counting_ht = _make_counting(infile, K=17)
script = scriptpath('filter-abund.py')
args = ['-C', '1', counting_ht, infile, infile]
utils.runscript(script, args, in_dir)
outfile = infile + '.abundfilt'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs
# check for 'quality' string.
quals = set([r.quality for r in screed.open(outfile)])
assert len(quals) == 2, quals
assert '##################' in quals
# make sure that FASTQ names are properly parsed, both formats.
def test_filter_abund_4_fq_casava_18():
infile = utils.get_temp_filename('test.fq')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.paired2.fq'),
infile)
counting_ht = _make_counting(infile, K=17)
script = scriptpath('filter-abund.py')
args = [counting_ht, infile, infile]
utils.runscript(script, args, in_dir)
outfile = infile + '.abundfilt'
assert os.path.exists(outfile), outfile
seqs = set([r.name for r in screed.open(outfile, parse_description=False)])
assert 'pair:foo 1::N' in seqs, seqs
def test_filter_abund_1_singlefile():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
script = scriptpath('filter-abund-single.py')
args = ['-x', '1e7', '-N', '2', '-k', '17', '-t', infile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert 'Total number of unique k-mers: 98' in err, err
outfile = infile + '.abundfilt'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 1, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs
def test_filter_abund_2_singlefile():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
tabfile = utils.get_temp_filename('test-savetable.ct')
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
script = scriptpath('filter-abund-single.py')
args = ['-x', '1e7', '-N', '2', '-k', '17', '-t', '--savetable',
tabfile, infile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert 'Total number of unique k-mers: 98' in err, err
outfile = infile + '.abundfilt'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 1, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs
def test_filter_abund_2_singlefile_fq_casava_18():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.paired2.fq'),
infile)
script = scriptpath('filter-abund-single.py')
args = ['-x', '1e7', '-N', '2', '-k', '17', infile]
(status, out, err) = utils.runscript(script, args, in_dir)
outfile = infile + '.abundfilt'
assert os.path.exists(outfile), outfile
seqs = set([r.name for r in screed.open(outfile, parse_description=False)])
assert 'pair:foo 1::N' in seqs, seqs
def test_filter_abund_4_retain_low_abund():
# test that the -V option does not trim sequences that are low abundance
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
counting_ht = _make_counting(infile, K=17)
script = scriptpath('filter-abund.py')
args = ['-V', counting_ht, infile]
utils.runscript(script, args, in_dir)
outfile = infile + '.abundfilt'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs
# test that the -V option *does* trim sequences that are low abundance
def test_filter_abund_5_trim_high_abund():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-3.fa'), infile)
counting_ht = _make_counting(infile, K=17)
script = scriptpath('filter-abund.py')
args = ['-V', counting_ht, infile]
utils.runscript(script, args, in_dir)
outfile = infile + '.abundfilt'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
# trimmed sequence @ error
assert 'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGC' in seqs
# test that -V/-Z setting - should not trip if -Z is set high enough.
def test_filter_abund_6_trim_high_abund_Z():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-3.fa'), infile)
counting_ht = _make_counting(infile, K=17)
script = scriptpath('filter-abund.py')
args = ['-V', '-Z', '25', counting_ht, infile]
utils.runscript(script, args, in_dir)
outfile = infile + '.abundfilt'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
# untrimmed seq.
badseq = 'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCgtgCCGCAGCTGTCGTCAGGG' \
'GATTTCCGGGCGG'
assert badseq in seqs # should be there, untrimmed
def test_filter_stoptags():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
stopfile = utils.get_temp_filename('stoptags', in_dir)
# first, copy test-abund-read-2.fa to 'test.fa' in the temp dir.
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
# now, create a file with some stop tags in it --
K = 18
kh = khmer.new_hashbits(K, 1, 1)
kh.add_stop_tag('GTTGACGGGGCTCAGGGG')
kh.save_stop_tags(stopfile)
del kh
# finally, run filter-stoptags.
script = scriptpath('filter-stoptags.py')
args = ['-k', str(K), stopfile, infile, infile]
utils.runscript(script, args, in_dir)
# verify that the basic output file exists
outfile = infile + '.stopfilt'
assert os.path.exists(outfile), outfile
# it should contain only one unique sequence, because we've trimmed
# off everything after the beginning of the only long sequence in there.
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 1, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs, seqs
def test_filter_stoptags_fq():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
stopfile = utils.get_temp_filename('stoptags', in_dir)
# first, copy test-abund-read-2.fa to 'test.fa' in the temp dir.
shutil.copyfile(utils.get_test_data('test-abund-read-2.fq'), infile)
# now, create a file with some stop tags in it --
K = 18
kh = khmer.new_hashbits(K, 1, 1)
kh.add_stop_tag('GTTGACGGGGCTCAGGGG')
kh.save_stop_tags(stopfile)
del kh
# finally, run filter-stoptags.
script = scriptpath('filter-stoptags.py')
args = ['-k', str(K), stopfile, infile, infile]
utils.runscript(script, args, in_dir)
# verify that the basic output file exists
outfile = infile + '.stopfilt'
assert os.path.exists(outfile), outfile
# it should contain only one unique sequence, because we've trimmed
# off everything after the beginning of the only long sequence in there.
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 1, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs, seqs
# make sure that record names are carried through unparsed
names = [r.name for r in screed.open(outfile, parse_description=False)]
names = set(names)
assert 'seq 1::BAR' in names
def test_normalize_by_median_indent():
infile = utils.get_test_data('paired-mixed.fa.pe')
hashfile = utils.get_test_data('normC20k20.ct')
outfile = utils.get_temp_filename('paired-mixed.fa.pe.keep')
script = scriptpath('normalize-by-median.py')
args = ['--loadtable', hashfile, '-o', outfile, infile]
(status, out, err) = utils.runscript(script, args)
assert status == 0, (out, err)
assert os.path.exists(outfile)
def test_normalize_by_median():
CUTOFF = '1'
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
script = scriptpath('normalize-by-median.py')
args = ['-C', CUTOFF, '-k', '17', '-t', infile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert 'Total number of unique k-mers: 98' in err, err
outfile = infile + '.keep'
assert os.path.exists(outfile), outfile
seqs = [r.sequence for r in screed.open(outfile)]
assert len(seqs) == 1, seqs
assert seqs[0].startswith('GGTTGACGGGGCTCAGGGGG'), seqs
def test_normalize_by_median_append():
outfile = utils.get_temp_filename('test.fa.keep')
shutil.copyfile(utils.get_test_data('test-abund-read.fa'), outfile)
in_dir = os.path.dirname(outfile)
CUTOFF = '1'
infile = utils.get_temp_filename('test.fa', in_dir)
shutil.copyfile(utils.get_test_data('test-abund-read-3.fa'), infile)
script = scriptpath('normalize-by-median.py')
args = ['-C', CUTOFF, '-k', '17', '-t', '-o', outfile, '--append', infile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert os.path.exists(outfile), outfile
seqs = [r.sequence for r in screed.open(outfile)]
assert len(seqs) == 2, seqs
assert 'GACAGCgtgCCGCA' in seqs[1], seqs
def test_normalize_by_median_overwrite():
outfile = utils.get_temp_filename('test.fa.keep')
shutil.copyfile(utils.get_test_data('test-abund-read.fa'), outfile)
in_dir = os.path.dirname(outfile)
CUTOFF = '1'
infile = utils.get_temp_filename('test.fa', in_dir)
shutil.copyfile(utils.get_test_data('test-abund-read-3.fa'), infile)
script = scriptpath('normalize-by-median.py')
args = ['-C', CUTOFF, '-k', '17', '-t', '-o', outfile, infile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert os.path.exists(outfile), outfile
seqs = [r.sequence for r in screed.open(outfile)]
assert len(seqs) == 1, seqs
assert 'GACAGCgtgCCGCA' in seqs[0], seqs
def test_normalize_by_median_version():
script = scriptpath('normalize-by-median.py')
args = ['--version']
status, out, err = utils.runscript(script, args)
errlines = err.splitlines()
for err in errlines:
if err.startswith('||') or \
not err.strip():
continue
break
print errlines
print err
assert err.startswith('khmer ')
def test_normalize_by_median_2():
CUTOFF = '2'
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
script = scriptpath('normalize-by-median.py')
args = ['-C', CUTOFF, '-k', '17', infile]
utils.runscript(script, args, in_dir)
outfile = infile + '.keep'
assert os.path.exists(outfile), outfile
seqs = [r.sequence for r in screed.open(outfile)]
assert len(seqs) == 2, seqs
assert seqs[0].startswith('GGTTGACGGGGCTCAGGGGG'), seqs
assert seqs[1] == 'GGTTGACGGGGCTCAGGG', seqs
def test_normalize_by_median_paired():
CUTOFF = '1'
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-paired.fa'), infile)
script = scriptpath('normalize-by-median.py')
args = ['-C', CUTOFF, '-p', '-k', '17', infile]
utils.runscript(script, args, in_dir)
outfile = infile + '.keep'
assert os.path.exists(outfile), outfile
seqs = [r.sequence for r in screed.open(outfile)]
assert len(seqs) == 2, seqs
assert seqs[0].startswith('GGTTGACGGGGCTCAGGGGG'), seqs
assert seqs[1].startswith('GGTTGACGGGGCTCAGGG'), seqs
def test_normalize_by_median_paired_fq():
CUTOFF = '20'
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-paired.fq'), infile)
script = scriptpath('normalize-by-median.py')
args = ['-C', CUTOFF, '-p', '-k', '17', infile]
_, out, err = utils.runscript(script, args, in_dir)
print out
print err
outfile = infile + '.keep'
assert os.path.exists(outfile), outfile
seqs = [r.sequence for r in screed.open(outfile)]
assert len(seqs) == 6, len(seqs)
assert seqs[0].startswith('GGTTGACGGGGCTCAGGGGG'), seqs
assert seqs[1].startswith('GGTTGACGGGGCTCAGGG'), seqs
names = [r.name for r in screed.open(outfile, parse_description=False)]
assert len(names) == 6, names
assert '895:1:37:17593:9954 1::FOO' in names, names
assert '895:1:37:17593:9954 2::FOO' in names, names
def test_normalize_by_median_impaired():
CUTOFF = '1'
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-impaired.fa'), infile)
script = scriptpath('normalize-by-median.py')
args = ['-C', CUTOFF, '-p', '-k', '17', infile]
utils.runscript(script, args, in_dir, fail_ok=True)
def test_normalize_by_median_force():
CUTOFF = '1'
corrupt_infile = utils.get_temp_filename('test-corrupt.fq')
good_infile = utils.get_temp_filename('test-good.fq',
tempdir=os.path.dirname(
corrupt_infile))
in_dir = os.path.dirname(good_infile)
shutil.copyfile(utils.get_test_data('test-error-reads.fq'), corrupt_infile)
shutil.copyfile(utils.get_test_data('test-fastq-reads.fq'), good_infile)
script = scriptpath('normalize-by-median.py')
args = ['-f', '-C', CUTOFF, '-k', '17', corrupt_infile, good_infile]
(status, out, err) = utils.runscript(script, args, in_dir)
test_ht = khmer.load_counting_hash(corrupt_infile + '.ct.failed')
test_good_read = 'CAGGCGCCCACCACCGTGCCCTCCAACCTGATGGT'
test_good_read2 = 'TAGTATCATCAAGGTTCAAGATGTTAATGAATAACAATTGCGCAGCAA'
assert test_ht.count(test_good_read[:17]) > 0
assert test_ht.count(test_good_read2[:17]) > 0
assert os.path.exists(corrupt_infile + '.ct.failed')
assert '*** Skipping' in err
assert '** IOErrors' in err
def test_normalize_by_median_no_bigcount():
infile = utils.get_temp_filename('test.fa')
hashfile = utils.get_temp_filename('test-out.ct')
outfile = infile + '.keep'
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
counting_ht = _make_counting(infile, K=8)
script = scriptpath('normalize-by-median.py')
args = ['-C', '1000', '-k 8', '--savetable', hashfile, infile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert status == 0, (out, err)
print(out, err)
assert os.path.exists(hashfile), hashfile
kh = khmer.load_counting_hash(hashfile)
assert kh.get('GGTTGACG') == 255
def test_normalize_by_median_dumpfrequency():
CUTOFF = '1'
infiles = [utils.get_temp_filename('test-0.fq')]
in_dir = os.path.dirname(infiles[0])
for x in range(1, 5):
infiles.append(utils.get_temp_filename('test-{x}.fq'.format(x=x),
tempdir=in_dir))
for infile in infiles:
shutil.copyfile(utils.get_test_data('test-fastq-reads.fq'), infile)
script = scriptpath('normalize-by-median.py')
args = ['-d', '2', '-C', CUTOFF, '-k', '17']
args.extend(infiles)
(status, out, err) = utils.runscript(script, args, in_dir)
test_ht = khmer.load_counting_hash(os.path.join(in_dir, 'backup.ct'))
test_good_read = 'CAGGCGCCCACCACCGTGCCCTCCAACCTGATGGT'
test_good_read2 = 'TAGTATCATCAAGGTTCAAGATGTTAATGAATAACAATTGCGCAGCAA'
assert test_ht.count(test_good_read[:17]) > 0
assert test_ht.count(test_good_read2[:17]) > 0
assert os.path.exists(os.path.join(in_dir, 'backup.ct'))
assert out.count('Backup: Saving') == 2
assert 'Nothing' in out
def test_normalize_by_median_empty():
CUTOFF = '1'
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-empty.fa'), infile)
script = scriptpath('normalize-by-median.py')
args = ['-C', CUTOFF, '-k', '17', infile]
utils.runscript(script, args, in_dir)
outfile = infile + '.keep'
assert os.path.exists(outfile), outfile
def test_normalize_by_median_fpr():
MIN_TABLESIZE_PARAM = 1
infile = utils.get_temp_filename('test-fpr.fq')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-fastq-reads.fq'), infile)
script = scriptpath('normalize-by-median.py')
args = ['-f', '-k 17', '-x ' + str(MIN_TABLESIZE_PARAM), infile]
(status, out, err) = utils.runscript(script, args, in_dir, fail_ok=True)
assert os.path.exists(infile + '.keep')
assert 'fp rate estimated to be' in out, out
assert '** ERROR: the k-mer counting table is too small' in err, err
def test_count_median():
infile = utils.get_temp_filename('test.fa')
outfile = infile + '.counts'
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
counting_ht = _make_counting(infile, K=8)
script = scriptpath('count-median.py')
args = [counting_ht, infile, outfile]
utils.runscript(script, args)
assert os.path.exists(outfile), outfile
data = [x.strip() for x in open(outfile)]
data = set(data)
assert len(data) == 2, data
assert 'seq 1001 1001.0 0.0 18' in data
assert '895:1:37:17593:9954/1 1 103.803741455 303.702941895 114' in data
def test_count_median_fq():
infile = utils.get_temp_filename('test.fa')
outfile = infile + '.counts'
shutil.copyfile(utils.get_test_data('test-abund-read-2.fq'), infile)
counting_ht = _make_counting(infile, K=8)
script = scriptpath('count-median.py')
args = [counting_ht, infile, outfile]
utils.runscript(script, args)
assert os.path.exists(outfile), outfile
data = [x.strip() for x in open(outfile)]
data = set(data)
assert len(data) == 2, data
assert 'seq 1001 1001.0 0.0 18' in data
assert '895:1:37:17593:9954 1 103.803741455 303.702941895 114' in data
def test_count_median_fq_csv():
infile = utils.get_temp_filename('test.fa')
outfile = infile + '.counts'
shutil.copyfile(utils.get_test_data('test-abund-read-2.fq'), infile)
counting_ht = _make_counting(infile, K=8)
script = scriptpath('count-median.py')
args = ['--csv', counting_ht, infile, outfile]
utils.runscript(script, args)
assert os.path.exists(outfile), outfile
data = [x.strip() for x in open(outfile)]
data = set(data)
assert len(data) == 4, data
assert 'name,median,average,stddev,seqlen' in data
assert 'seq,1001,1001.0,0.0,18' in data
# verify that sequence names remain unparsed with '--csv'
names = set([line.split(',')[0] for line in data])
assert '895:1:37:17593:9954 1::FOO' in names, names
#
def test_load_graph():
script = scriptpath('load-graph.py')
args = ['-x', '1e7', '-N', '2', '-k', '20', '-t']
outfile = utils.get_temp_filename('out')
infile = utils.get_test_data('random-20-a.fa')
args.extend([outfile, infile])
(status, out, err) = utils.runscript(script, args)
assert 'Total number of unique k-mers: 3960' in err, err
ht_file = outfile + '.pt'
assert os.path.exists(ht_file), ht_file
tagset_file = outfile + '.tagset'
assert os.path.exists(tagset_file), tagset_file
ht = khmer.load_hashbits(ht_file)
ht.load_tagset(tagset_file)
# check to make sure we get the expected result for this data set
# upon partitioning (all in one partition). This is kind of a
# roundabout way of checking that load-graph worked :)
subset = ht.do_subset_partition(0, 0)
x = ht.subset_count_partitions(subset)
assert x == (1, 0), x
def test_load_graph_no_tags():
script = scriptpath('load-graph.py')
args = ['-x', '1e7', '-N', '2', '-k', '20', '-n']
outfile = utils.get_temp_filename('out')
infile = utils.get_test_data('random-20-a.fa')
args.extend([outfile, infile])
utils.runscript(script, args)
ht_file = outfile + '.pt'
assert os.path.exists(ht_file), ht_file
tagset_file = outfile + '.tagset'
assert not os.path.exists(tagset_file), tagset_file
assert khmer.load_hashbits(ht_file)
# can't think of a good way to make sure this worked, beyond just
# loading the ht file...
def test_load_graph_fail():
script = scriptpath('load-graph.py')
args = ['-x', '1e3', '-N', '2', '-k', '20'] # use small HT
outfile = utils.get_temp_filename('out')
infile = utils.get_test_data('random-20-a.fa')
args.extend([outfile, infile])
(status, out, err) = utils.runscript(script, args, fail_ok=True)
assert status == 1, status
assert "ERROR:" in err
def test_load_graph_write_fp():
script = scriptpath('load-graph.py')
args = ['-x', '1e5', '-N', '2', '-k', '20', '-w'] # use small HT
outfile = utils.get_temp_filename('out')
infile = utils.get_test_data('random-20-a.fa')
args.extend([outfile, infile])
(status, out, err) = utils.runscript(script, args)
ht_file = outfile + '.pt'
assert os.path.exists(ht_file), ht_file
info_file = outfile + '.info'
assert os.path.exists(info_file), info_file
data = [x.strip() for x in open(info_file)]
data = set(data)
assert '3959 unique k-mers' in data
assert 'false positive rate estimated to be 0.002' in data
def test_load_graph_multithread():
script = scriptpath('load-graph.py')
outfile = utils.get_temp_filename('test')
infile = utils.get_test_data('test-reads.fa')
args = ['-N', '4', '-x', '1e7', '-T', '8', outfile, infile]
(status, out, err) = utils.runscript(script, args)
def _make_graph(infilename, min_hashsize=1e7, n_hashes=2, ksize=20,
do_partition=False,
annotate_partitions=False,
stop_big_traverse=False):
script = scriptpath('load-graph.py')
args = ['-x', str(min_hashsize), '-N', str(n_hashes), '-k', str(ksize)]
outfile = utils.get_temp_filename('out')
infile = infilename
args.extend([outfile, infile])
utils.runscript(script, args)
ht_file = outfile + '.pt'
assert os.path.exists(ht_file), ht_file
tagset_file = outfile + '.tagset'
assert os.path.exists(tagset_file), tagset_file
if do_partition:
script = scriptpath('partition-graph.py')
args = [outfile]
if stop_big_traverse:
args.insert(0, '--no-big-traverse')
utils.runscript(script, args)
script = scriptpath('merge-partitions.py')
args = [outfile, '-k', str(ksize)]
utils.runscript(script, args)
final_pmap_file = outfile + '.pmap.merged'
assert os.path.exists(final_pmap_file)
if annotate_partitions:
script = scriptpath('annotate-partitions.py')
args = ["-k", str(ksize), outfile, infilename]
in_dir = os.path.dirname(outfile)
utils.runscript(script, args, in_dir)
baseinfile = os.path.basename(infilename)
assert os.path.exists(os.path.join(in_dir, baseinfile + '.part'))
return outfile
def _DEBUG_make_graph(infilename, min_hashsize=1e7, n_hashes=2, ksize=20,
do_partition=False,
annotate_partitions=False,
stop_big_traverse=False):
script = scriptpath('load-graph.py')
args = ['-x', str(min_hashsize), '-N', str(n_hashes), '-k', str(ksize)]
outfile = utils.get_temp_filename('out')
infile = utils.get_test_data(infilename)
args.extend([outfile, infile])
utils.runscript(script, args)
ht_file = outfile + '.ct'
assert os.path.exists(ht_file), ht_file
tagset_file = outfile + '.tagset'
assert os.path.exists(tagset_file), tagset_file
if do_partition:
print ">>>> DEBUG: Partitioning <<<"
script = scriptpath('partition-graph.py')
args = [outfile]
if stop_big_traverse:
args.insert(0, '--no-big-traverse')
utils.runscript(script, args)
print ">>>> DEBUG: Merging Partitions <<<"
script = scriptpath('merge-partitions.py')
args = [outfile, '-k', str(ksize)]
utils.runscript(script, args)
final_pmap_file = outfile + '.pmap.merged'
assert os.path.exists(final_pmap_file)
if annotate_partitions:
print ">>>> DEBUG: Annotating Partitions <<<"
script = scriptpath('annotate-partitions.py')
args = ["-k", str(ksize), outfile, infilename]
in_dir = os.path.dirname(outfile)
utils.runscript(script, args, in_dir)
baseinfile = os.path.basename(infilename)
assert os.path.exists(os.path.join(in_dir, baseinfile + '.part'))
return outfile
def test_partition_graph_1():
graphbase = _make_graph(utils.get_test_data('random-20-a.fa'))
script = scriptpath('partition-graph.py')
args = [graphbase]
utils.runscript(script, args)
script = scriptpath('merge-partitions.py')
args = [graphbase, '-k', str(20)]
utils.runscript(script, args)
final_pmap_file = graphbase + '.pmap.merged'
assert os.path.exists(final_pmap_file)
ht = khmer.load_hashbits(graphbase + '.pt')
ht.load_tagset(graphbase + '.tagset')
ht.load_partitionmap(final_pmap_file)
x = ht.count_partitions()
assert x == (1, 0), x # should be exactly one partition.
def test_partition_graph_nojoin_k21():
# test with K=21
graphbase = _make_graph(utils.get_test_data('random-20-a.fa'), ksize=21)
script = scriptpath('partition-graph.py')
args = [graphbase]
utils.runscript(script, args)
script = scriptpath('merge-partitions.py')
args = [graphbase, '-k', str(21)]
utils.runscript(script, args)
final_pmap_file = graphbase + '.pmap.merged'
assert os.path.exists(final_pmap_file)
ht = khmer.load_hashbits(graphbase + '.pt')
ht.load_tagset(graphbase + '.tagset')
ht.load_partitionmap(final_pmap_file)
x = ht.count_partitions()
assert x == (99, 0), x # should be 99 partitions at K=21
def test_partition_graph_nojoin_stoptags():
# test with stoptags
graphbase = _make_graph(utils.get_test_data('random-20-a.fa'))
# add in some stop tags
ht = khmer.load_hashbits(graphbase + '.pt')
ht.add_stop_tag('TTGCATACGTTGAGCCAGCG')
stoptags_file = graphbase + '.stoptags'
ht.save_stop_tags(stoptags_file)
del ht
# run script with stoptags option
script = scriptpath('partition-graph.py')
args = ['--stoptags', stoptags_file, graphbase]
utils.runscript(script, args)
script = scriptpath('merge-partitions.py')
args = [graphbase, '-k', str(20)]
utils.runscript(script, args)
final_pmap_file = graphbase + '.pmap.merged'
assert os.path.exists(final_pmap_file)
ht = khmer.load_hashbits(graphbase + '.pt')
ht.load_tagset(graphbase + '.tagset')
ht.load_partitionmap(final_pmap_file)
x = ht.count_partitions()
assert x == (2, 0), x # should be 2 partitions
def test_partition_graph_big_traverse():
graphbase = _make_graph(utils.get_test_data('biglump-random-20-a.fa'),
do_partition=True, stop_big_traverse=False)
final_pmap_file = graphbase + '.pmap.merged'
assert os.path.exists(final_pmap_file)
ht = khmer.load_hashbits(graphbase + '.pt')
ht.load_tagset(graphbase + '.tagset')
ht.load_partitionmap(final_pmap_file)
x = ht.count_partitions()
assert x == (1, 0), x # should be exactly one partition.
def test_partition_graph_no_big_traverse():
# do NOT exhaustively traverse
graphbase = _make_graph(utils.get_test_data('biglump-random-20-a.fa'),
do_partition=True, stop_big_traverse=True)
final_pmap_file = graphbase + '.pmap.merged'
assert os.path.exists(final_pmap_file)
ht = khmer.load_hashbits(graphbase + '.pt')
ht.load_tagset(graphbase + '.tagset')
ht.load_partitionmap(final_pmap_file)
x = ht.count_partitions()
assert x[0] == 4, x # should be four partitions, broken at knot.
def test_annotate_partitions():
seqfile = utils.get_test_data('random-20-a.fa')
graphbase = _make_graph(seqfile, do_partition=True)
in_dir = os.path.dirname(graphbase)
# get the final pmap file
final_pmap_file = graphbase + '.pmap.merged'
assert os.path.exists(final_pmap_file)
script = scriptpath('annotate-partitions.py')
args = ["-k", "20", graphbase, seqfile]
utils.runscript(script, args, in_dir)
partfile = os.path.join(in_dir, 'random-20-a.fa.part')
parts = [r.name.split('\t')[1] for r in screed.open(partfile)]
parts = set(parts)
assert '2' in parts
assert len(parts) == 1
def test_annotate_partitions_2():
# test with K=21 (no joining of sequences)
seqfile = utils.get_test_data('random-20-a.fa')
graphbase = _make_graph(seqfile, do_partition=True,
ksize=21)
in_dir = os.path.dirname(graphbase)
# get the final pmap file
final_pmap_file = graphbase + '.pmap.merged'
assert os.path.exists(final_pmap_file)
script = scriptpath('annotate-partitions.py')
args = ["-k", "21", graphbase, seqfile]
utils.runscript(script, args, in_dir)
partfile = os.path.join(in_dir, 'random-20-a.fa.part')
parts = [r.name.split('\t')[1] for r in screed.open(partfile)]
parts = set(parts)
print parts
assert len(parts) == 99, len(parts)
def test_extract_partitions():
seqfile = utils.get_test_data('random-20-a.fa')
graphbase = _make_graph(
seqfile, do_partition=True, annotate_partitions=True)
in_dir = os.path.dirname(graphbase)
# get the final part file
partfile = os.path.join(in_dir, 'random-20-a.fa.part')
# ok, now run extract-partitions.
script = scriptpath('extract-partitions.py')
args = ['extracted', partfile]
utils.runscript(script, args, in_dir)
distfile = os.path.join(in_dir, 'extracted.dist')
groupfile = os.path.join(in_dir, 'extracted.group0000.fa')
assert os.path.exists(distfile)
assert os.path.exists(groupfile)
dist = open(distfile).readline()
assert dist.strip() == '99 1 1 99'
parts = [r.name.split('\t')[1] for r in screed.open(partfile)]
assert len(parts) == 99, len(parts)
parts = set(parts)
assert len(parts) == 1, len(parts)
def test_extract_partitions_fq():
seqfile = utils.get_test_data('random-20-a.fq')
graphbase = _make_graph(
seqfile, do_partition=True, annotate_partitions=True)
in_dir = os.path.dirname(graphbase)
# get the final part file
partfile = os.path.join(in_dir, 'random-20-a.fq.part')
# ok, now run extract-partitions.
script = scriptpath('extract-partitions.py')
args = ['extracted', partfile]
utils.runscript(script, args, in_dir)
distfile = os.path.join(in_dir, 'extracted.dist')
groupfile = os.path.join(in_dir, 'extracted.group0000.fq')
assert os.path.exists(distfile)
assert os.path.exists(groupfile)
dist = open(distfile).readline()
assert dist.strip() == '99 1 1 99'
screed_iter = screed.open(partfile, parse_description=False)
names = [r.name.split('\t')[0] for r in screed_iter]
assert '35 1::FOO' in names
assert '46 1::FIZ' in names
screed_iter = screed.open(partfile, parse_description=False)
parts = [r.name.split('\t')[1] for r in screed_iter]
assert len(parts) == 99, len(parts)
parts = set(parts)
assert len(parts) == 1, len(parts)
quals = set([r.quality for r in screed.open(partfile)])
quals = list(quals)
assert quals[0], quals
def test_extract_partitions_output_unassigned():
seqfile = utils.get_test_data('random-20-a.fa')
graphbase = _make_graph(
seqfile, do_partition=True, annotate_partitions=True)
in_dir = os.path.dirname(graphbase)
# get the final part file
partfile = os.path.join(in_dir, 'random-20-a.fa.part')
# ok, now run extract-partitions.
script = scriptpath('extract-partitions.py')
args = ['-U', 'extracted', partfile]
utils.runscript(script, args, in_dir)
distfile = os.path.join(in_dir, 'extracted.dist')
groupfile = os.path.join(in_dir, 'extracted.group0000.fa')
unassigned_file = os.path.join(in_dir, 'extracted.unassigned.fa')
assert os.path.exists(distfile)
assert os.path.exists(groupfile)
assert os.path.exists(unassigned_file)
dist = open(distfile).readline()
assert dist.strip() == '99 1 1 99'
parts = [r.name.split('\t')[1] for r in screed.open(partfile)]
assert len(parts) == 99, len(parts)
parts = set(parts)
assert len(parts) == 1, len(parts)
def test_extract_partitions_no_output_groups():
seqfile = utils.get_test_data('random-20-a.fq')
graphbase = _make_graph(
seqfile, do_partition=True, annotate_partitions=True)
in_dir = os.path.dirname(graphbase)
# get the final part file
partfile = os.path.join(in_dir, 'random-20-a.fa.part')
# ok, now run extract-partitions.
script = scriptpath('extract-partitions.py')
args = ['-n', 'extracted', partfile]
# We expect a sys.exit -> we need the test to be tolerant
utils.runscript(script, args, in_dir, fail_ok=True)
# Group files are created after output_groups is
# checked. They should not exist in this scenario
groupfile = os.path.join(in_dir, 'extracted.group0000.fa')
assert not os.path.exists(groupfile)
def test_extract_partitions_pid_0():
basefile = utils.get_test_data('random-20-a.fa.part')
partfile = utils.get_temp_filename('random-20-a.fa.part')
shutil.copyfile(basefile, partfile)
in_dir = os.path.dirname(partfile)
# ok, now run extract-partitions.
script = scriptpath('extract-partitions.py')
args = ['-U', 'extracted', partfile]
utils.runscript(script, args, in_dir)
distfile = os.path.join(in_dir, 'extracted.dist')
groupfile = os.path.join(in_dir, 'extracted.group0000.fa')
unassigned_file = os.path.join(in_dir, 'extracted.unassigned.fa')
assert os.path.exists(distfile)
assert os.path.exists(groupfile)
assert os.path.exists(unassigned_file)
# Assert unassigned file not empty
unassigned_content = open(unassigned_file).readline()
assert unassigned_content.strip().split('\t')[0] != ''
def test_extract_partitions_multi_groups():
basefile = utils.get_test_data('random-20-a.fa.part')
partfile = utils.get_temp_filename('random-20-a.fa.part')
shutil.copyfile(basefile, partfile)
in_dir = os.path.dirname(partfile)
# ok, now run extract-partitions.
script = scriptpath('extract-partitions.py')
args = ['-m', '1', '-X', '1', 'extracted', partfile]
utils.runscript(script, args, in_dir)
# Multiple group files are created after should be created
groupfile1 = os.path.join(in_dir, 'extracted.group0000.fa')
groupfile2 = os.path.join(in_dir, 'extracted.group0001.fa')
groupfile3 = os.path.join(in_dir, 'extracted.group0002.fa')
assert os.path.exists(groupfile1)
assert os.path.exists(groupfile2)
assert os.path.exists(groupfile3)
def test_extract_partitions_no_groups():
empty_file = utils.get_temp_filename('empty-file')
basefile = utils.get_test_data('empty-file')
shutil.copyfile(basefile, empty_file)
in_dir = os.path.dirname(empty_file)
# ok, now run extract-partitions.
script = scriptpath('extract-partitions.py')
args = ['extracted', empty_file]
utils.runscript(script, args, in_dir, fail_ok=True)
# No group files should be created
groupfile = os.path.join(in_dir, 'extracted.group0000.fa')
assert not os.path.exists(groupfile)
def test_abundance_dist():
infile = utils.get_temp_filename('test.fa')
outfile = utils.get_temp_filename('test.dist')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
htfile = _make_counting(infile, K=17)
script = scriptpath('abundance-dist.py')
args = ['-z', htfile, infile, outfile]
utils.runscript(script, args, in_dir)
fp = iter(open(outfile))
line = fp.next().strip()
assert line == '1 96 96 0.98', line
line = fp.next().strip()
assert line == '1001 2 98 1.0', line
os.remove(outfile)
args = ['-z', '--csv', htfile, infile, outfile]
utils.runscript(script, args, in_dir)
fp = iter(open(outfile))
line = fp.next().strip()
assert (line == 'abundance,count,cumulative,cumulative_fraction'), line
line = fp.next().strip()
assert line == '1,96,96,0.98', line
line = fp.next().strip()
assert line == '1001,2,98,1.0', line
def test_abundance_dist_nobigcount():
infile = utils.get_temp_filename('test.fa')
outfile = utils.get_temp_filename('test.dist')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
htfile = _make_counting(infile, K=17, BIGCOUNT=False)
script = scriptpath('abundance-dist.py')
args = ['-z', htfile, infile, outfile]
utils.runscript(script, args, in_dir)
fp = iter(open(outfile))
line = fp.next().strip()
assert line == '1 96 96 0.98', line
line = fp.next().strip()
assert line == '255 2 98 1.0', line
def test_abundance_dist_single():
infile = utils.get_temp_filename('test.fa')
outfile = utils.get_temp_filename('test.dist')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
script = scriptpath('abundance-dist-single.py')
args = ['-x', '1e7', '-N', '2', '-k', '17', '-z', '-t', infile,
outfile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert 'Total number of unique k-mers: 98' in err, err
fp = iter(open(outfile))
line = fp.next().strip()
assert line == '1 96 96 0.98', line
line = fp.next().strip()
assert line == '1001 2 98 1.0', line
def test_abundance_dist_single_csv():
infile = utils.get_temp_filename('test.fa')
outfile = utils.get_temp_filename('test.dist')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
script = scriptpath('abundance-dist-single.py')
args = ['-x', '1e7', '-N', '2', '-k', '17', '-z', '--csv', infile,
outfile]
(status, out, err) = utils.runscript(script, args, in_dir)
fp = iter(open(outfile))
line = fp.next().strip()
assert (line == 'abundance,count,cumulative,cumulative_fraction'), line
line = fp.next().strip()
assert line == '1,96,96,0.98', line
line = fp.next().strip()
assert line == '1001,2,98,1.0', line
def test_abundance_dist_single_nobigcount():
infile = utils.get_temp_filename('test.fa')
outfile = utils.get_temp_filename('test.dist')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
script = scriptpath('abundance-dist-single.py')
args = ['-x', '1e7', '-N', '2', '-k', '17', '-z', '-b', infile, outfile]
utils.runscript(script, args, in_dir)
fp = iter(open(outfile))
line = fp.next().strip()
assert line == '1 96 96 0.98', line
line = fp.next().strip()
assert line == '255 2 98 1.0', line
def test_abundance_dist_single_nosquash():
infile = utils.get_temp_filename('test.fa')
outfile = utils.get_temp_filename('test-abund-read-2.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
script = scriptpath('abundance-dist-single.py')
args = ['-x', '1e7', '-N', '2', '-k', '17', '-z', '-t', infile, outfile]
utils.runscript(script, args, in_dir)
fp = iter(open(outfile))
line = fp.next().strip()
assert line == '1 96 96 0.98', line
line = fp.next().strip()
assert line == '1001 2 98 1.0', line
def test_abundance_dist_single_savetable():
infile = utils.get_temp_filename('test.fa')
outfile = utils.get_temp_filename('test.dist')
tabfile = utils.get_temp_filename('test-savetable.ct')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
script = scriptpath('abundance-dist-single.py')
args = ['-x', '1e7', '-N', '2', '-k', '17', '-z', '-t', '--savetable',
tabfile, infile, outfile]
utils.runscript(script, args, in_dir)
fp = iter(open(outfile))
line = fp.next().strip()
assert line == '1 96 96 0.98', line
line = fp.next().strip()
assert line == '1001 2 98 1.0', line
def test_do_partition():
seqfile = utils.get_test_data('random-20-a.fa')
graphbase = utils.get_temp_filename('out')
in_dir = os.path.dirname(graphbase)
script = scriptpath('do-partition.py')
args = ["-k", "20", graphbase, seqfile]
utils.runscript(script, args, in_dir)
partfile = os.path.join(in_dir, 'random-20-a.fa.part')
parts = [r.name.split('\t')[1] for r in screed.open(partfile)]
parts = set(parts)
assert '2' in parts
assert len(parts) == 1
def test_do_partition_2():
# test with K=21 (no joining of sequences)
seqfile = utils.get_test_data('random-20-a.fa')
graphbase = utils.get_temp_filename('out')
in_dir = os.path.dirname(graphbase)
script = scriptpath('do-partition.py')
args = ["-k", "21", graphbase, seqfile]
utils.runscript(script, args, in_dir)
partfile = os.path.join(in_dir, 'random-20-a.fa.part')
parts = [r.name.split('\t')[1] for r in screed.open(partfile)]
parts = set(parts)
assert len(parts) == 99, len(parts)
def test_do_partition_2_fq():
# test with K=21 (no joining of sequences)
seqfile = utils.get_test_data('random-20-a.fq')
graphbase = utils.get_temp_filename('out')
in_dir = os.path.dirname(graphbase)
script = scriptpath('do-partition.py')
args = ["-k", "21", graphbase, seqfile]
utils.runscript(script, args, in_dir)
partfile = os.path.join(in_dir, 'random-20-a.fq.part')
screed_iter = screed.open(partfile, parse_description=False)
names = [r.name.split('\t')[0] for r in screed_iter]
assert '35 1::FOO' in names
assert '46 1::FIZ' in names
def test_interleave_reads_1_fq():
# test input files
infile1 = utils.get_test_data('paired.fq.1')
infile2 = utils.get_test_data('paired.fq.2')
# correct output
ex_outfile = utils.get_test_data('paired.fq')
# actual output file
outfile = utils.get_temp_filename('out.fq')
script = scriptpath('interleave-reads.py')
args = [infile1, infile2, '-o', outfile]
utils.runscript(script, args)
r = open(ex_outfile).read()
q = open(outfile).read()
assert r == q, (r, q)
def test_interleave_reads_broken_fq():
# test input files
infile1 = utils.get_test_data('paired-broken.fq.1')
infile2 = utils.get_test_data('paired-broken.fq.2')
# actual output file
outfile = utils.get_temp_filename('out.fq')
script = scriptpath('interleave-reads.py')
args = [infile1, infile2, '-o', outfile]
status, err, out = utils.runscript(script, args, fail_ok=True)
assert status == 1
def test_interleave_reads_broken_fq_2():
# test input files
infile1 = utils.get_test_data('paired-broken2.fq.1')
infile2 = utils.get_test_data('paired-broken2.fq.2')
# actual output file
outfile = utils.get_temp_filename('out.fq')
script = scriptpath('interleave-reads.py')
args = [infile1, infile2, '-o', outfile]
status, err, out = utils.runscript(script, args, fail_ok=True)
assert status == 1
def test_interleave_reads_broken_fq_3():
# test input files
infile1 = utils.get_test_data('paired-broken3.fq.1')
infile2 = utils.get_test_data('paired-broken3.fq.2')
# actual output file
outfile = utils.get_temp_filename('out.fq')
script = scriptpath('interleave-reads.py')
args = [infile1, infile2, '-o', outfile]
status, err, out = utils.runscript(script, args, fail_ok=True)
assert status == 1
def test_interleave_reads_broken_fq_4():
# test input files
infile1 = utils.get_test_data('paired-mixed-broken.fq')
# actual output file
outfile = utils.get_temp_filename('out.fq')
script = scriptpath('interleave-reads.py')
args = [infile1, '-o', outfile]
status, err, out = utils.runscript(script, args, fail_ok=True)
assert status == 1
def test_interleave_reads_2_fa():
# test input files
infile1 = utils.get_test_data('paired.fa.1')
infile2 = utils.get_test_data('paired.fa.2')
# correct output
ex_outfile = utils.get_test_data('paired.fa')
# actual output file
outfile = utils.get_temp_filename('out.fa')
script = scriptpath('interleave-reads.py')
args = [infile1, infile2, '-o', outfile]
utils.runscript(script, args)
n = 0
for r, q in zip(screed.open(ex_outfile), screed.open(outfile)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert n > 0
def test_make_initial_stoptags():
# gen input files using load-graph.py -t
# should keep test_data directory size down
# or something like that
# this assumes (obv.) load-graph works properly
bzinfile = utils.get_temp_filename('test-reads.fq.bz2')
shutil.copyfile(utils.get_test_data('test-reads.fq.bz2'), bzinfile)
in_dir = os.path.dirname(bzinfile)
genscript = scriptpath('load-graph.py')
genscriptargs = ['-t', 'test-reads', 'test-reads.fq.bz2']
utils.runscript(genscript, genscriptargs, in_dir)
# test input file gen'd by load-graphs
infile = utils.get_temp_filename('test-reads.pt')
infile2 = utils.get_temp_filename('test-reads.tagset', in_dir)
# get file to compare against
ex_outfile = utils.get_test_data('test-reads.stoptags')
# actual output file
outfile1 = utils.get_temp_filename('test-reads.stoptags', in_dir)
script = scriptpath('make-initial-stoptags.py')
# make-initial-stoptags has weird file argument syntax
# read the code before modifying
args = ['test-reads']
utils.runscript(script, args, in_dir)
assert os.path.exists(outfile1), outfile1
def test_extract_paired_reads_1_fa():
# test input file
infile = utils.get_test_data('paired-mixed.fa')
ex_outfile1 = utils.get_test_data('paired-mixed.fa.pe')
ex_outfile2 = utils.get_test_data('paired-mixed.fa.se')
# actual output files...
outfile1 = utils.get_temp_filename('paired-mixed.fa.pe')
in_dir = os.path.dirname(outfile1)
outfile2 = utils.get_temp_filename('paired-mixed.fa.se', in_dir)
script = scriptpath('extract-paired-reads.py')
args = [infile]
utils.runscript(script, args, in_dir)
assert os.path.exists(outfile1), outfile1
assert os.path.exists(outfile2), outfile2
n = 0
for r, q in zip(screed.open(ex_outfile1), screed.open(outfile1)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert n > 0
n = 0
for r, q in zip(screed.open(ex_outfile2), screed.open(outfile2)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert n > 0
def test_extract_paired_reads_2_fq():
# test input file
infile = utils.get_test_data('paired-mixed.fq')
ex_outfile1 = utils.get_test_data('paired-mixed.fq.pe')
ex_outfile2 = utils.get_test_data('paired-mixed.fq.se')
# actual output files...
outfile1 = utils.get_temp_filename('paired-mixed.fq.pe')
in_dir = os.path.dirname(outfile1)
outfile2 = utils.get_temp_filename('paired-mixed.fq.se', in_dir)
script = scriptpath('extract-paired-reads.py')
args = [infile]
utils.runscript(script, args, in_dir)
assert os.path.exists(outfile1), outfile1
assert os.path.exists(outfile2), outfile2
n = 0
for r, q in zip(screed.open(ex_outfile1, parse_description=False),
screed.open(outfile1, parse_description=False)):
n += 1
assert r.name == q.name, (r.name, q.name, n)
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
n = 0
for r, q in zip(screed.open(ex_outfile2, parse_description=False),
screed.open(outfile2, parse_description=False)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
def test_split_paired_reads_1_fa():
# test input file
infile = utils.get_test_data('paired.fa')
ex_outfile1 = utils.get_test_data('paired.fa.1')
ex_outfile2 = utils.get_test_data('paired.fa.2')
# actual output files...
outfile1 = utils.get_temp_filename('paired.fa.1')
in_dir = os.path.dirname(outfile1)
outfile2 = utils.get_temp_filename('paired.fa.2', in_dir)
script = scriptpath('split-paired-reads.py')
args = [infile]
utils.runscript(script, args, in_dir)
assert os.path.exists(outfile1), outfile1
assert os.path.exists(outfile2), outfile2
n = 0
for r, q in zip(screed.open(ex_outfile1), screed.open(outfile1)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert n > 0
n = 0
for r, q in zip(screed.open(ex_outfile2), screed.open(outfile2)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert n > 0
def test_split_paired_reads_2_fq():
# test input file
infile = utils.get_test_data('paired.fq')
ex_outfile1 = utils.get_test_data('paired.fq.1')
ex_outfile2 = utils.get_test_data('paired.fq.2')
# actual output files...
outfile1 = utils.get_temp_filename('paired.fq.1')
in_dir = os.path.dirname(outfile1)
outfile2 = utils.get_temp_filename('paired.fq.2', in_dir)
script = scriptpath('split-paired-reads.py')
args = [infile]
utils.runscript(script, args, in_dir)
assert os.path.exists(outfile1), outfile1
assert os.path.exists(outfile2), outfile2
n = 0
for r, q in zip(screed.open(ex_outfile1), screed.open(outfile1)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
n = 0
for r, q in zip(screed.open(ex_outfile2), screed.open(outfile2)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
def test_split_paired_reads_2_mixed_fq_require_pair():
# test input file
infile = utils.get_temp_filename('test.fq')
shutil.copyfile(utils.get_test_data('paired-mixed.fq'), infile)
in_dir = os.path.dirname(infile)
script = scriptpath('split-paired-reads.py')
args = ['-p', infile]
status, out, err = utils.runscript(script, args, in_dir, fail_ok=True)
assert status == 1
assert "is not part of a pair" in err
def test_split_paired_reads_2_mixed_fq():
# test input file
infile = utils.get_temp_filename('test.fq')
shutil.copyfile(utils.get_test_data('paired-mixed-2.fq'), infile)
in_dir = os.path.dirname(infile)
script = scriptpath('split-paired-reads.py')
args = [infile]
status, out, err = utils.runscript(script, args, in_dir)
assert status == 0
assert "split 11 sequences (7 left, 4 right)" in err, err
def test_split_paired_reads_2_mixed_fq_broken_pairing_format():
# test input file
infile = utils.get_temp_filename('test.fq')
shutil.copyfile(utils.get_test_data('paired-mixed-broken.fq'), infile)
in_dir = os.path.dirname(infile)
script = scriptpath('split-paired-reads.py')
args = [infile]
status, out, err = utils.runscript(script, args, in_dir, fail_ok=True)
assert status == 1
assert "Unrecognized format" in err
def test_split_paired_reads_3_output_dir():
# test input file
infile = utils.get_test_data('paired.fq')
ex_outfile1 = utils.get_test_data('paired.fq.1')
ex_outfile2 = utils.get_test_data('paired.fq.2')
# actual output files...
outfile1 = utils.get_temp_filename('paired.fq.1')
output_dir = os.path.dirname(outfile1)
outfile2 = utils.get_temp_filename('paired.fq.2', output_dir)
script = scriptpath('split-paired-reads.py')
args = ['--output-dir', output_dir, infile]
utils.runscript(script, args)
assert os.path.exists(outfile1), outfile1
assert os.path.exists(outfile2), outfile2
n = 0
for r, q in zip(screed.open(ex_outfile1), screed.open(outfile1)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
n = 0
for r, q in zip(screed.open(ex_outfile2), screed.open(outfile2)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
def test_split_paired_reads_3_output_files():
# test input file
infile = utils.get_test_data('paired.fq')
ex_outfile1 = utils.get_test_data('paired.fq.1')
ex_outfile2 = utils.get_test_data('paired.fq.2')
# actual output files...
outfile1 = utils.get_temp_filename('xxx')
output_dir = os.path.dirname(outfile1)
outfile2 = utils.get_temp_filename('yyy', output_dir)
script = scriptpath('split-paired-reads.py')
args = ['-1', outfile1, '-2', outfile2, infile]
utils.runscript(script, args)
assert os.path.exists(outfile1), outfile1
assert os.path.exists(outfile2), outfile2
n = 0
for r, q in zip(screed.open(ex_outfile1), screed.open(outfile1)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
n = 0
for r, q in zip(screed.open(ex_outfile2), screed.open(outfile2)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
def test_split_paired_reads_3_output_files_left():
# test input file
infile = utils.get_test_data('paired.fq')
ex_outfile1 = utils.get_test_data('paired.fq.1')
ex_outfile2 = utils.get_test_data('paired.fq.2')
# actual output files...
outfile1 = utils.get_temp_filename('xxx')
output_dir = os.path.dirname(outfile1)
outfile2 = utils.get_temp_filename('paired.fq.2', output_dir)
script = scriptpath('split-paired-reads.py')
args = ['-o', output_dir, '-1', outfile1, infile]
utils.runscript(script, args)
assert os.path.exists(outfile1), outfile1
assert os.path.exists(outfile2), outfile2
n = 0
for r, q in zip(screed.open(ex_outfile1), screed.open(outfile1)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
n = 0
for r, q in zip(screed.open(ex_outfile2), screed.open(outfile2)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
def test_split_paired_reads_3_output_files_right():
# test input file
infile = utils.get_test_data('paired.fq')
ex_outfile1 = utils.get_test_data('paired.fq.1')
ex_outfile2 = utils.get_test_data('paired.fq.2')
# actual output files...
outfile1 = utils.get_temp_filename('paired.fq.1')
output_dir = os.path.dirname(outfile1)
outfile2 = utils.get_temp_filename('yyy', output_dir)
script = scriptpath('split-paired-reads.py')
args = ['-2', outfile2, '-o', output_dir, infile]
utils.runscript(script, args)
assert os.path.exists(outfile1), outfile1
assert os.path.exists(outfile2), outfile2
n = 0
for r, q in zip(screed.open(ex_outfile1), screed.open(outfile1)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
n = 0
for r, q in zip(screed.open(ex_outfile2), screed.open(outfile2)):
n += 1
assert r.name == q.name
assert r.sequence == q.sequence
assert r.quality == q.quality
assert n > 0
def test_sample_reads_randomly():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-reads.fa'), infile)
script = scriptpath('sample-reads-randomly.py')
# fix random number seed for reproducibility
args = ['-N', '10', '-M', '12000', '-R', '1']
args.append(infile)
utils.runscript(script, args, in_dir)
outfile = infile + '.subset'
assert os.path.exists(outfile), outfile
seqs = set([r.name for r in screed.open(outfile)])
assert seqs == set(['850:2:1:2691:14602/1', '850:2:1:1762:5439/1',
'850:2:1:2399:20086/2', '850:2:1:2503:4494/2',
'850:2:1:2084:17145/1', '850:2:1:2273:13309/1',
'850:2:1:2263:11143/2', '850:2:1:1984:7162/2',
'850:2:1:2065:16816/1', '850:2:1:1792:15774/2'])
def test_sample_reads_randomly_fq():
infile = utils.get_temp_filename('test.fq.gz')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-reads.fq.gz'), infile)
script = scriptpath('sample-reads-randomly.py')
# fix random number seed for reproducibility
args = ['-N', '10', '-M', '12000', '-R', '1']
args.append(infile)
utils.runscript(script, args, in_dir)
outfile = infile + '.subset'
assert os.path.exists(outfile), outfile
seqs = set([r.name for r in screed.open(outfile,
parse_description=False)])
assert seqs == set(['850:2:1:2399:20086/2',
'850:2:1:1762:5439 1::FOO',
'850:2:1:2065:16816/1', '850:2:1:2263:11143/2',
'850:2:1:1792:15774/2', '850:2:1:2691:14602/1',
'850:2:1:2503:4494 1::FOO',
'850:2:1:2084:17145/1', '850:2:1:1984:7162 1::FOO',
'850:2:1:2273:13309 1::FOO'])
def test_fastq_to_fasta():
script = scriptpath('fastq-to-fasta.py')
clean_infile = utils.get_temp_filename('test-clean.fq')
n_infile = utils.get_temp_filename('test-n.fq')
shutil.copyfile(utils.get_test_data('test-fastq-reads.fq'), clean_infile)
shutil.copyfile(utils.get_test_data('test-fastq-n-reads.fq'), n_infile)
clean_outfile = clean_infile + '.keep.fa'
n_outfile = n_infile + '.keep.fa'
in_dir = os.path.dirname(clean_infile)
in_dir_n = os.path.dirname(n_infile)
args = [clean_infile, '-n', '-o', clean_outfile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert len(out.splitlines()) == 2, len(out.splitlines())
assert "No lines dropped" in err
names = [r.name for r in screed.open(clean_outfile,
parse_description=False)]
assert '895:1:1:1246:14654 1:N:0:NNNNN' in names, names
args = [n_infile, '-n', '-o', n_outfile]
(status, out, err) = utils.runscript(script, args, in_dir_n)
assert len(out.splitlines()) == 2
assert "No lines dropped" in err
args = [clean_infile, '-o', clean_outfile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert len(out.splitlines()) == 2
assert "0 lines dropped" in err
args = [n_infile, '-o', n_outfile]
(status, out, err) = utils.runscript(script, args, in_dir_n)
assert len(out.splitlines()) == 2, out
assert "4 lines dropped" in err, err
args = [clean_infile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert len(out.splitlines()) > 2
assert "0 lines dropped" in err
args = [n_infile]
(status, out, err) = utils.runscript(script, args, in_dir_n)
assert len(out.splitlines()) > 2
assert "4 lines dropped" in err
def test_extract_long_sequences():
script = scriptpath('extract-long-sequences.py')
fq_infile = utils.get_temp_filename('test.fq')
fa_infile = utils.get_temp_filename('test.fa')
shutil.copyfile(utils.get_test_data('paired-mixed.fq'), fq_infile)
shutil.copyfile(utils.get_test_data('paired-mixed.fa'), fa_infile)
fq_outfile = fq_infile + '.keep.fq'
fa_outfile = fa_infile + '.keep.fa'
in_dir_fq = os.path.dirname(fq_infile)
in_dir_fa = os.path.dirname(fa_infile)
args = [fq_infile, '-l', '10', '-o', fq_outfile]
(status, out, err) = utils.runscript(script, args, in_dir_fa)
countlines = sum(1 for line in open(fq_outfile))
assert countlines == 44, countlines
names = [r.name for r in screed.open(fq_outfile, parse_description=False)]
assert "895:1:37:17593:9954 1::foo" in names
assert "895:1:37:17593:9954 2::foo" in names
args = [fa_infile, '-l', '10', '-o', fa_outfile]
(status, out, err) = utils.runscript(script, args, in_dir_fa)
countlines = sum(1 for line in open(fa_outfile))
assert countlines == 22, countlines
def test_sample_reads_randomly_S():
infile = utils.get_temp_filename('test.fq')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-fastq-reads.fq'), infile)
script = scriptpath('sample-reads-randomly.py')
# fix random number seed for reproducibility
args = ['-N', '10', '-R', '1', '-S', '3']
badargs = list(args)
badargs.extend(['-o', 'test', 'test.fq', 'test.fq'])
(status, out, err) = utils.runscript(script, badargs, in_dir, fail_ok=True)
assert status == 1, (status, out, err)
args.append('test.fq')
utils.runscript(script, args, in_dir)
outfile = infile + '.subset.0'
assert os.path.exists(outfile), outfile
seqs = set([r.name for r in screed.open(outfile)])
print seqs
assert seqs == set(['895:1:1:1298:13380', '895:1:1:1347:3237',
'895:1:1:1295:6189', '895:1:1:1342:11001',
'895:1:1:1252:19493', '895:1:1:1318:10532',
'895:1:1:1314:10430', '895:1:1:1347:8723',
'895:1:1:1381:4958', '895:1:1:1338:6614'])
outfile = infile + '.subset.1'
assert os.path.exists(outfile), outfile
seqs = set([r.name for r in screed.open(outfile)])
print seqs
assert seqs == set(['895:1:1:1384:20217', '895:1:1:1347:3237',
'895:1:1:1348:18672', '895:1:1:1290:11501',
'895:1:1:1386:7536', '895:1:1:1373:13994',
'895:1:1:1355:13535', '895:1:1:1303:6251',
'895:1:1:1381:4958', '895:1:1:1338:6614'])
outfile = infile + '.subset.2'
assert os.path.exists(outfile), outfile
seqs = set([r.name for r in screed.open(outfile)])
print seqs
assert seqs == set(['895:1:1:1326:7273', '895:1:1:1384:20217',
'895:1:1:1347:3237', '895:1:1:1353:6642',
'895:1:1:1340:19387', '895:1:1:1252:19493',
'895:1:1:1381:7062', '895:1:1:1383:3089',
'895:1:1:1342:20695', '895:1:1:1303:6251'])
def test_count_overlap():
seqfile1 = utils.get_temp_filename('test-overlap1.fa')
in_dir = os.path.dirname(seqfile1)
seqfile2 = utils.get_temp_filename('test-overlap2.fa', in_dir)
outfile = utils.get_temp_filename('overlap.out', in_dir)
curvefile = utils.get_temp_filename('overlap.out.curve', in_dir)
shutil.copy(utils.get_test_data('test-overlap1.fa'), seqfile1)
shutil.copy(utils.get_test_data('test-overlap2.fa'), seqfile2)
htfile = _make_graph(seqfile1, ksize=20)
script = scriptpath('count-overlap.py')
args = ['--ksize', '20', '--n_tables', '2', '--min-tablesize', '10000000',
htfile + '.pt', seqfile2, outfile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert status == 0
assert os.path.exists(outfile), outfile
data = [x.strip() for x in open(outfile)]
data = set(data)
assert '# of unique k-mers in dataset2: 759047' in data
assert '# of overlap unique k-mers: 245621' in data
assert os.path.exists(curvefile), curvefile
data = [x.strip() for x in open(curvefile)]
data = set(data)
assert '178633 1155' in data
assert '496285 2970' in data
assert '752053 238627' in data
def test_count_overlap_csv():
seqfile1 = utils.get_temp_filename('test-overlap1.fa')
in_dir = os.path.dirname(seqfile1)
seqfile2 = utils.get_temp_filename('test-overlap2.fa', in_dir)
outfile = utils.get_temp_filename('overlap.out', in_dir)
curvefile = utils.get_temp_filename('overlap.out.curve', in_dir)
shutil.copy(utils.get_test_data('test-overlap1.fa'), seqfile1)
shutil.copy(utils.get_test_data('test-overlap2.fa'), seqfile2)
htfile = _make_graph(seqfile1, ksize=20)
script = scriptpath('count-overlap.py')
args = ['--ksize', '20', '--n_tables', '2', '--min-tablesize',
'10000000', '--csv', htfile + '.pt', seqfile2, outfile]
(status, out, err) = utils.runscript(script, args, in_dir)
assert status == 0
assert os.path.exists(outfile), outfile
data = [x.strip() for x in open(outfile)]
data = set(data)
assert '# of unique k-mers in dataset2: 759047' in data
assert '# of overlap unique k-mers: 245621' in data
assert os.path.exists(curvefile), curvefile
data = [x.strip() for x in open(curvefile)]
data = set(data)
assert '178633,1155' in data
assert '496285,2970' in data
assert '752053,238627' in data
def execute_streaming_diginorm(ifilename):
'''Helper function for the matrix of streaming tests for read_parser
using diginorm, i.e. uncompressed fasta, gzip fasta, bz2 fasta,
uncompressed fastq, etc.
This is not directly executed but is run by the tests themselves
'''
# Get temp filenames, etc.
fifo = utils.get_temp_filename('fifo')
in_dir = os.path.dirname(fifo)
script = scriptpath('normalize-by-median.py')
args = ['-C', '1', '-k', '17', '-o', 'outfile', fifo]
# make a fifo to simulate streaming
os.mkfifo(fifo)
# FIFOs MUST BE OPENED FOR READING BEFORE THEY ARE WRITTEN TO
# If this isn't done, they will BLOCK and things will hang.
thread = threading.Thread(target=utils.runscript,
args=(script, args, in_dir))
thread.start()
ifile = io.open(ifilename, 'rb')
fifofile = io.open(fifo, 'wb')
# read binary to handle compressed files
chunk = ifile.read(8192)
while len(chunk) > 0:
fifofile.write(chunk)
chunk = ifile.read(8192)
fifofile.close()
thread.join()
return in_dir + '/outfile'
def execute_load_graph_streaming(filename):
'''Helper function for the matrix of streaming tests using screed via
filter-abund-single, i.e. uncompressed fasta, gzip fasta, bz2 fasta,
uncompressed fastq, etc.
This is not directly executed but is run by the tests themselves
'''
script = scriptpath('load-graph.py')
args = '-x 1e7 -N 2 -k 20 -t out -'
infile = utils.get_temp_filename('temp')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data(filename), infile)
(status, out, err) = utils.runscriptredirect(script, args, infile, in_dir)
if status != 0:
for line in out:
print out
for line in err:
print err
assert status == 0, status
err.seek(0)
err = err.read()
assert 'Total number of unique k-mers: 3960' in err, err
ht_file = os.path.join(in_dir, 'out.pt')
assert os.path.exists(ht_file), ht_file
tagset_file = os.path.join(in_dir, 'out.tagset')
assert os.path.exists(tagset_file), tagset_file
ht = khmer.load_hashbits(ht_file)
ht.load_tagset(tagset_file)
# check to make sure we get the expected result for this data set
# upon partitioning (all in one partition). This is kind of a
# roundabout way of checking that load-graph worked :)
subset = ht.do_subset_partition(0, 0)
x = ht.subset_count_partitions(subset)
assert x == (1, 0), x
@attr('known_failing')
def test_screed_streaming_ufa():
# uncompressed fa
o = execute_streaming_diginorm(utils.get_test_data('test-abund-read-2.fa'))
pathstat = os.stat(o)
seqs = [r.sequence for r in screed.open(o)]
assert len(seqs) == 1, seqs
assert seqs[0].startswith('GGTTGACGGGGCTCAGGGGG')
@attr('known_failing')
def test_screed_streaming_ufq():
# uncompressed fq
o = execute_streaming_diginorm(utils.get_test_data('test-fastq-reads.fq'))
seqs = [r.sequence for r in screed.open(o)]
assert seqs[0].startswith('CAGGCGCCCACCACCGTGCCCTCCAACCTGATGGT')
@attr('known_failing')
def test_screed_streaming_bzipfq():
# bzip compressed fq
o = execute_streaming_diginorm(utils.get_test_data('100-reads.fq.bz2'))
seqs = [r.sequence for r in screed.open(o)]
assert len(seqs) == 100, seqs
assert seqs[0].startswith('CAGGCGCCCACCACCGTGCCCTCCAACCTGATGGT'), seqs
@attr('known_failing')
def test_screed_streaming_bzipfa():
# bzip compressed fa
o = execute_streaming_diginorm(
utils.get_test_data('test-abund-read-2.fa.bz2'))
seqs = [r.sequence for r in screed.open(o)]
assert len(seqs) == 1, seqs
assert seqs[0].startswith('GGTTGACGGGGCTCAGGGGG')
@attr('known_failing')
def test_screed_streaming_gzipfq():
# gzip compressed fq
o = execute_streaming_diginorm(utils.get_test_data('100-reads.fq.gz'))
assert os.path.exists(o)
seqs = [r.sequence for r in screed.open(o)]
assert seqs[0].startswith('CAGGCGCCCACCACCGTGCCCTCCAACCTG')
@attr('known_failing')
def test_screed_streaming_gzipfa():
o = execute_streaming_diginorm(
utils.get_test_data('test-abund-read-2.fa.gz'))
assert os.path.exists(o)
seqs = [r.sequence for r in screed.open(o)]
assert seqs[0].startswith('GGTTGACGGGGCTCAGGGG')
def test_read_parser_streaming_ufa():
# uncompressed FASTA
execute_load_graph_streaming(utils.get_test_data('random-20-a.fa'))
def test_read_parser_streaming_ufq():
# uncompressed FASTQ
execute_load_graph_streaming(utils.get_test_data('random-20-a.fq'))
@attr('known_failing')
def test_read_parser_streaming_bzfq():
# bzip compressed FASTQ
execute_load_graph_streaming(utils.get_test_data('random-20-a.fq.bz2'))
def test_read_parser_streaming_gzfq():
# gzip compressed FASTQ
execute_load_graph_streaming(utils.get_test_data('random-20-a.fq.gz'))
@attr('known_failing')
def test_read_parser_streaming_bzfa():
# bzip compressed FASTA
execute_load_graph_streaming(utils.get_test_data('random-20-a.fa.bz2'))
def test_read_parser_streaming_gzfa():
# gzip compressed FASTA
execute_load_graph_streaming(utils.get_test_data('random-20-a.fa.gz'))
def test_readstats():
readstats_output = ("358 bp / 5 seqs; 71.6 average length",
"916 bp / 11 seqs; 83.3 average length")
args = [utils.get_test_data("test-sweep-reads.fq"),
utils.get_test_data("paired-mixed.fq")]
status, out, err = utils.runscript('readstats.py', args)
assert status == 0
for k in readstats_output:
assert k in out, (k, out)
def test_readstats_output():
readstats_output = ("358 bp / 5 seqs; 71.6 average length",
"916 bp / 11 seqs; 83.3 average length")
outfile = utils.get_temp_filename('output.txt')
args = ["-o", outfile,
utils.get_test_data("test-sweep-reads.fq"),
utils.get_test_data("paired-mixed.fq")]
status, _, _ = utils.runscript('readstats.py', args)
assert status == 0
out = open(outfile).read()
for k in readstats_output:
assert k in out, (k, out)
def test_readstats_empty():
expected_output = "No sequences found in 2 files"
args = [utils.get_test_data("test-empty.fa"),
utils.get_test_data("test-empty.fa.bz2")]
status, out, err = utils.runscript('readstats.py', args)
assert status == 0
assert expected_output in out
def test_trim_low_abund_1():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
args = ["-k", "17", "-x", "1e7", "-N", "2", infile]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 1, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs
def test_trim_low_abund_1_duplicate_filename_err():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
args = ["-k", "17", "-x", "1e7", "-N", "2", '-C', '1', infile, infile]
try:
utils.runscript('trim-low-abund.py', args, in_dir)
raise Exception("should not reach this")
except AssertionError:
# an error should be raised by passing 'infile' twice.
pass
def test_trim_low_abund_2():
infile = utils.get_temp_filename('test.fa')
infile2 = utils.get_temp_filename('test2.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile2)
args = ["-k", "17", "-x", "1e7", "-N", "2", '-C', '1', infile, infile2]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs
# make sure that FASTQ records are retained.
def test_trim_low_abund_3_fq_retained():
infile = utils.get_temp_filename('test.fq')
infile2 = utils.get_temp_filename('test2.fq')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fq'), infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fq'), infile2)
args = ["-k", "17", "-x", "1e7", "-N", "2", '-C', '1', infile, infile2]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs
# check for 'quality' string.
seqs = set([r.quality for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
assert '##################' in seqs
# test that the -V option does not trim sequences that are low abundance
def test_trim_low_abund_4_retain_low_abund():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.fa'), infile)
args = ["-k", "17", "-x", "1e7", "-N", "2", '-V', infile]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
assert 'GGTTGACGGGGCTCAGGG' in seqs
# test that the -V option *does* trim sequences that are low abundance
def test_trim_low_abund_5_trim_high_abund():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-3.fa'), infile)
args = ["-k", "17", "-x", "1e7", "-N", "2", '-V', infile]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
# trimmed sequence @ error
assert 'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGC' in seqs
# test that -V/-Z setting - should not trip if -Z is set high enough.
def test_trim_low_abund_6_trim_high_abund_Z():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-3.fa'), infile)
args = ["-k", "17", "-x", "1e7", "-N", "2", '-V', '-Z', '25', infile]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
seqs = set([r.sequence for r in screed.open(outfile)])
assert len(seqs) == 2, seqs
# untrimmed seq.
badseq = 'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCgtgCCGCAGCTGTCGTCAGGG' \
'GATTTCCGGGCGG'
assert badseq in seqs # should be there, untrimmed
def test_trim_low_abund_keep_paired():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.paired.fq'), infile)
args = ["-k", "17", "-x", "1e7", "-N", "2", "-V", infile]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
seqs = [r.name for r in screed.open(outfile)]
assert seqs[-2:] == ['pair/1', 'pair/2'], seqs
def test_trim_low_abund_keep_paired_casava18():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.paired2.fq'),
infile)
args = ["-k", "17", "-x", "1e7", "-N", "2", "-V", infile]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
seqs = [r.name for r in screed.open(outfile, parse_description=False)]
assert seqs[-2:] == ['pair:foo 1::N', 'pair:foo 2::N'], seqs
def test_trim_low_abund_highfpr():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.paired.fq'), infile)
args = ["-k", "17", "-x", "1", "-N", "1", "-V", infile]
code, out, err = utils.runscript('trim-low-abund.py', args, in_dir,
fail_ok=True)
assert code == 1
print out
assert "ERROR: the k-mer counting table is too small" in err
def test_trim_low_abund_trimtest():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('test-abund-read-2.paired.fq'), infile)
args = ["-k", "17", "-x", "1e7", "-N", "2", "-Z", "2", "-C", "1",
"-V", infile]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
for record in screed.open(outfile):
if record.name == 'seqtrim/1':
print record.name, record.sequence
assert record.sequence == \
'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCAGCC'
elif record.name == 'seqtrim/2':
print record.name, record.sequence
assert record.sequence == \
'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCAGCCGC'
elif record.name == 'seqtrim2/1':
print record.name, record.sequence
assert record.sequence == \
'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCA'
def test_trim_low_abund_trimtest_after_load():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
saved_table = utils.get_temp_filename('save.ct')
shutil.copyfile(utils.get_test_data('test-abund-read-2.paired.fq'), infile)
args = ["-k", "17", "-x", "1e7", "-N", "2", saved_table, infile]
utils.runscript('load-into-counting.py', args, in_dir)
args = ["-Z", "2", "-C", "2", "-V", '--loadtable', saved_table, infile]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
for record in screed.open(outfile):
if record.name == 'seqtrim/1':
print record.name, record.sequence
assert record.sequence == \
'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCAGCC'
elif record.name == 'seqtrim/2':
print record.name, record.sequence
assert record.sequence == \
'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCAGCCGC'
elif record.name == 'seqtrim2/1':
print record.name, record.sequence
assert record.sequence == \
'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCA'
def test_trim_low_abund_trimtest_savetable():
infile = utils.get_temp_filename('test.fa')
in_dir = os.path.dirname(infile)
saved_table = utils.get_temp_filename('save.ct')
shutil.copyfile(utils.get_test_data('test-abund-read-2.paired.fq'), infile)
args = ["-k", "17", "-x", "1e7", "-N", "2",
"-Z", "2", "-C", "2", "-V", '--savetable', saved_table, infile]
utils.runscript('trim-low-abund.py', args, in_dir)
outfile = infile + '.abundtrim'
assert os.path.exists(outfile), outfile
assert os.path.exists(saved_table)
for record in screed.open(outfile):
if record.name == 'seqtrim/1':
print record.name, record.sequence
assert record.sequence == \
'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCAGCC'
elif record.name == 'seqtrim/2':
print record.name, record.sequence
assert record.sequence == \
'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCAGCCGC'
elif record.name == 'seqtrim2/1':
print record.name, record.sequence
assert record.sequence == \
'GGTTGACGGGGCTCAGGGGGCGGCTGACTCCGAGAGACAGCA'
def test_roundtrip_casava_format_1():
# check to make sure that extract-paired-reads produces a file identical
# to the input file when only paired data is given.
infile = utils.get_temp_filename('test.fq')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('casava_18-pe.fq'), infile)
_, out, err = utils.runscript('extract-paired-reads.py', [infile], in_dir)
r = open(infile).read()
outfile = infile + '.pe'
r2 = open(outfile).read()
assert r == r2, (r, r2)
def test_roundtrip_casava_format_2():
# check that split-paired-reads -> interleave-reads produces a file
# identical to input, when only paired reads are given.
infile = utils.get_temp_filename('test.fq')
outfile = utils.get_temp_filename('test2.fq')
in_dir = os.path.dirname(infile)
shutil.copyfile(utils.get_test_data('casava_18-pe.fq'), infile)
_, out, err = utils.runscript('split-paired-reads.py', [infile], in_dir)
utils.runscript('interleave-reads.py', [infile + '.1',
infile + '.2',
'-o', outfile], in_dir)
r = open(infile).read()
r2 = open(outfile).read()
assert r == r2, (r, r2)
| 32.237132
| 79
| 0.654506
| 12,856
| 90,812
| 4.477287
| 0.056238
| 0.041696
| 0.033357
| 0.044475
| 0.874548
| 0.847151
| 0.822985
| 0.802919
| 0.774705
| 0.751703
| 0
| 0.032666
| 0.202407
| 90,812
| 2,816
| 80
| 32.24858
| 0.762022
| 0.05692
| 0
| 0.681597
| 0
| 0
| 0.158208
| 0.036976
| 0
| 0
| 0
| 0
| 0.219104
| 0
| null | null | 0.00054
| 0.008095
| null | null | 0.012952
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98e7e0665269f28f1ce7647e77301d9fd246eb02
| 2,890
|
py
|
Python
|
bach/tests/unit/sql_models/test_util.py
|
objectiv/objectiv-analytics
|
86ec1508f71c2d61ea7d67479800e4dc417a46e1
|
[
"Apache-2.0"
] | 23
|
2021-11-10T21:37:42.000Z
|
2022-03-30T11:46:19.000Z
|
bach/tests/unit/sql_models/test_util.py
|
objectiv/objectiv-analytics
|
86ec1508f71c2d61ea7d67479800e4dc417a46e1
|
[
"Apache-2.0"
] | 163
|
2021-11-10T10:11:26.000Z
|
2022-03-31T16:04:27.000Z
|
bach/tests/unit/sql_models/test_util.py
|
objectiv/objectiv-analytics
|
86ec1508f71c2d61ea7d67479800e4dc417a46e1
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2021 Objectiv B.V.
"""
import pytest
from sql_models.util import extract_format_fields, quote_identifier, quote_string, is_postgres,\
is_bigquery
@pytest.mark.db_independent
def test_extract_format_fields():
assert extract_format_fields('{test}') == {'test'}
assert extract_format_fields('{test} more text {test}') == {'test'}
assert extract_format_fields('text{test} more {{text}} {test2} te{x}t{test}') == {'test', 'test2', 'x'}
@pytest.mark.db_independent
def test_extract_format_fields_nested():
# assert extract_format_fields('{test}', 2) == set()
# assert extract_format_fields('{test} more text {test}', 2) == set()
# assert extract_format_fields('text{test} more {{text}} {test2} te{x}t{test}', 2) == {'text'}
assert extract_format_fields('{x} {{y}} {{{{z}}}} {a}', 1) == {'x', 'a'}
assert extract_format_fields('{x} {{y}} {{{{z}}}} {a}', 2) == {'y'}
assert extract_format_fields('{x} {{y}} {{{{z}}}} {a}', 3) == {'z'}
assert extract_format_fields('{x} {{y}} {{{{z}}}}', 3) == {'z'}
def test_quote_identifier(dialect):
if is_postgres(dialect):
# https://www.postgresql.org/docs/14/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
assert quote_identifier(dialect, 'test') == '"test"'
assert quote_identifier(dialect, 'te"st') == '"te""st"'
assert quote_identifier(dialect, '"te""st"') == '"""te""""st"""'
assert quote_identifier(dialect, '`te`st`') == '"`te`st`"'
assert quote_identifier(dialect, 'te%st') == '"te%st"'
elif is_bigquery(dialect):
# https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers
assert quote_identifier(dialect, 'test') == '`test`'
assert quote_identifier(dialect, 'te"st') == '`te"st`'
assert quote_identifier(dialect, '"te""st"') == r'`"te""st"`'
assert quote_identifier(dialect, '`te`st`') == r'`\`te\`st\``'
assert quote_identifier(dialect, 'te%st') == '`te%st`'
else:
# if we add more dialects, we should not forget to extend this test
raise Exception()
def test_quote_string(dialect):
if is_postgres(dialect):
# https://www.postgresql.org/docs/14/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS
assert quote_string(dialect, "test") == "'test'"
assert quote_string(dialect, "te'st") == "'te''st'"
assert quote_string(dialect, "'te''st'") == "'''te''''st'''"
elif is_bigquery(dialect):
# https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#string_and_bytes_literals
assert quote_string(dialect, "test") == '"""test"""'
assert quote_string(dialect, "te'st") == '"""te\'st"""'
assert quote_string(dialect, "'te''st'") == '"""\'te\'\'st\'"""'
else:
# if we add more dialects, we should not forget to extend this test
raise Exception()
| 46.612903
| 107
| 0.623529
| 376
| 2,890
| 4.625
| 0.202128
| 0.055204
| 0.142036
| 0.143761
| 0.87119
| 0.842438
| 0.835538
| 0.812536
| 0.715354
| 0.658999
| 0
| 0.007541
| 0.174048
| 2,890
| 62
| 108
| 46.612903
| 0.720989
| 0.245329
| 0
| 0.25
| 0
| 0
| 0.212766
| 0.009713
| 0
| 0
| 0
| 0
| 0.575
| 1
| 0.1
| false
| 0
| 0.05
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98e9991f3761a6c08c8e85393e6a669407e6c239
| 205
|
py
|
Python
|
araElectra/pytorch/main.py
|
HydroxideX/SOQAL
|
345ae5858f23f06c693148c2e2df8202d09cff6d
|
[
"MIT"
] | null | null | null |
araElectra/pytorch/main.py
|
HydroxideX/SOQAL
|
345ae5858f23f06c693148c2e2df8202d09cff6d
|
[
"MIT"
] | null | null | null |
araElectra/pytorch/main.py
|
HydroxideX/SOQAL
|
345ae5858f23f06c693148c2e2df8202d09cff6d
|
[
"MIT"
] | null | null | null |
from data_splitter import combine_json_files
combine_json_files(["D:/GraduationProjectTesting/SOQAL/data/Arabic-SQuAD.json",
"D:/GraduationProjectTesting/SOQAL/data/arcd-train.json"])
| 41
| 79
| 0.756098
| 24
| 205
| 6.25
| 0.583333
| 0.146667
| 0.213333
| 0.453333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131707
| 205
| 4
| 80
| 51.25
| 0.842697
| 0
| 0
| 0
| 0
| 0
| 0.536585
| 0.536585
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c70ec1d9c80ed4fd590763cb1ef06295d644cb03
| 20,808
|
py
|
Python
|
packages/ratreleases.py
|
kalpanasingh/xsnoed
|
0baaf4bb968e8c7c88a956a3814e7840806c6a9c
|
[
"MIT"
] | null | null | null |
packages/ratreleases.py
|
kalpanasingh/xsnoed
|
0baaf4bb968e8c7c88a956a3814e7840806c6a9c
|
[
"MIT"
] | null | null | null |
packages/ratreleases.py
|
kalpanasingh/xsnoed
|
0baaf4bb968e8c7c88a956a3814e7840806c6a9c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# RatRelasePost3, RatReleasePre4, RatReleasePre3, RatReleasePre2
#
# Base classes for the various rat releases, oldest at the bottom
# RAT-2 is first curl and bzip one!
# RAT-3 adds avalanche, xerces and zeromq extra
# RAT-4 slightly changes the geant dependency
# RAT-4.2 removes zmq & avalanche dependencies
# RAT-4.5 removes xercesc and clhep dependency
#
# Author P G Jones - 21/06/2012 <p.g.jones@qmul.ac.uk> : First revision
# Author P G Jones - 23/09/2012 <p.g.jones@qmul.ac.uk> : Major refactor of snoing.
####################################################################################################
import os
import rat
class RatRelease6(rat.RatRelease):
""" Base installer for rat relase 6.0.0 onwards."""
def __init__(self, name, system, root_dep, tar_name, postgres=False):
""" Initlaise, take extra dependencies."""
super(RatRelease6, self).__init__(name, system, root_dep, "geant4.10.0.p02", "scons-2.1.0",
tar_name)
self._curl_dep = "curl-7.26.0"
self._bzip_dep = "bzip2-1.0.6"
self._postgres_dep = "postgresql-9.5.2"
self._require_postgres = postgres
def _get_dependencies(self):
""" Return the extra dependencies."""
if self._require_postgres is True:
return [self._curl_dep, self._bzip_dep, self._postgres_dep]
else:
return [self._curl_dep, self._bzip_dep]
def _write_env_file(self):
""" Diff geant env file and no need to patch rat."""
self._env_file.add_source(self._dependency_paths[self._geant_dep], "bin/geant4")
self._env_file.append_path(os.path.join(self._dependency_paths[self._geant_dep], "bin"))
if self._dependency_paths[self._curl_dep] is not None: # Conditional Package
self._env_file.append_path(os.path.join(self._dependency_paths[self._curl_dep], "bin"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._curl_dep], "lib"))
if self._dependency_paths[self._bzip_dep] is not None: # Conditional Package
self._env_file.add_environment("BZIPROOT", self._dependency_paths[self._bzip_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._bzip_dep],
"lib"))
if self._require_postgres is True and self._dependency_paths[self._postgres_dep] is not None: # Conditional Package
self._env_file.append_path(os.path.join(self._dependency_paths[self._postgres_dep], "bin"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._postgres_dep], "lib"))
class RatRelease5(rat.RatRelease):
""" Base installer for rat relase 5.0.0 onwards."""
def __init__(self, name, system, root_dep, tar_name):
""" Initlaise, take extra dependencies."""
super(RatRelease5, self).__init__(name, system, root_dep, "geant4.10.0.p02", "scons-2.1.0",
tar_name)
self._curl_dep = "curl-7.26.0"
self._bzip_dep = "bzip2-1.0.6"
def _get_dependencies(self):
""" Return the extra dependencies."""
return [self._curl_dep, self._bzip_dep]
def _write_env_file(self):
""" Diff geant env file and no need to patch rat."""
self._env_file.add_source(self._dependency_paths[self._geant_dep], "bin/geant4")
self._env_file.append_path(os.path.join(self._dependency_paths[self._geant_dep], "bin"))
if self._dependency_paths[self._curl_dep] is not None: # Conditional Package
self._env_file.append_path(os.path.join(self._dependency_paths[self._curl_dep], "bin"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._curl_dep], "lib"))
if self._dependency_paths[self._bzip_dep] is not None: # Conditional Package
self._env_file.add_environment("BZIPROOT", self._dependency_paths[self._bzip_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._bzip_dep],
"lib"))
class RatRelease4Post4(rat.RatRelease):
""" Base installer for rat relase 4.4.0 onwards."""
def __init__(self, name, system, root_dep, tar_name):
""" Initlaise, take extra dependencies."""
super(RatRelease4Post4, self).__init__(name, system, root_dep, "geant4.9.6.p02", "scons-2.1.0",
tar_name)
self._curl_dep = "curl-7.26.0"
self._bzip_dep = "bzip2-1.0.6"
def _get_dependencies(self):
""" Return the extra dependencies."""
return [self._curl_dep, self._bzip_dep]
def _write_env_file(self):
""" Diff geant env file and no need to patch rat."""
self._env_file.add_source(self._dependency_paths[self._geant_dep], "bin/geant4")
self._env_file.append_path(os.path.join(self._dependency_paths[self._geant_dep], "bin"))
if self._dependency_paths[self._curl_dep] is not None: # Conditional Package
self._env_file.append_path(os.path.join(self._dependency_paths[self._curl_dep], "bin"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._curl_dep], "lib"))
if self._dependency_paths[self._bzip_dep] is not None: # Conditional Package
self._env_file.add_environment("BZIPROOT", self._dependency_paths[self._bzip_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._bzip_dep],
"lib"))
class RatRelease4Post1(rat.RatRelease):
""" Base installer for rat relase 4.20 onwards."""
def __init__(self, name, system, root_dep, tar_name):
""" Initlaise, take extra dependencies."""
super(RatRelease4Post1, self).__init__(name, system, root_dep, "geant4.9.5.p01", "scons-2.1.0",
tar_name)
self._clhep_dep = "clhep-2.1.1.0"
self._curl_dep = "curl-7.26.0"
self._bzip_dep = "bzip2-1.0.6"
self._xercesc_dep = "xerces-c-3.1.1"
def _get_dependencies(self):
""" Return the extra dependencies."""
return [self._clhep_dep, self._curl_dep, self._bzip_dep, self._xercesc_dep]
def _write_env_file(self):
""" Diff geant env file and no need to patch rat."""
self._env_file.add_source(self._dependency_paths[self._geant_dep], "bin/geant4")
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._clhep_dep],
"lib"))
if self._dependency_paths[self._xercesc_dep] is not None: # Conditional Package
self._env_file.add_environment("XERCESCROOT", self._dependency_paths[self._xercesc_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._xercesc_dep],
"lib"))
self._env_file.append_path(os.path.join(self._dependency_paths[self._clhep_dep], "bin"))
self._env_file.append_path(os.path.join(self._dependency_paths[self._geant_dep], "bin"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._clhep_dep],
"lib"))
if self._dependency_paths[self._curl_dep] is not None: # Conditional Package
self._env_file.append_path(os.path.join(self._dependency_paths[self._curl_dep], "bin"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._curl_dep], "lib"))
if self._dependency_paths[self._bzip_dep] is not None: # Conditional Package
self._env_file.add_environment("BZIPROOT", self._dependency_paths[self._bzip_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._bzip_dep],
"lib"))
class RatRelease4Pre2(rat.RatRelease):
""" Base installer for rat release 4.00 and 4.10."""
def __init__(self, name, system, root_dep, tar_name):
""" Initlaise, take extra dependencies."""
super(RatRelease4Pre2, self).__init__(name, system, root_dep, "geant4.9.5.p01", "scons-2.1.0",
tar_name)
self._clhep_dep = "clhep-2.1.1.0"
self._curl_dep = "curl-7.26.0"
self._bzip_dep = "bzip2-1.0.6"
self._avalanche_dep = "avalanche-1"
self._zeromq_dep = "zeromq-2.2.0"
self._xercesc_dep = "xerces-c-3.1.1"
def _get_dependencies(self):
""" Return the extra dependencies."""
return [self._clhep_dep, self._curl_dep, self._bzip_dep, self._avalanche_dep,
self._zeromq_dep, self._xercesc_dep]
def _is_installed(self):
""" Rat releases and dev share a common install check."""
# Check rat, root, RATLib and RATDSLib
sys = os.uname()[0]
return self._system.file_exists('rat_%s-g++' % sys,
os.path.join(self.get_install_path(), "bin")) \
and self._system.file_exists('root',
os.path.join(self.get_install_path(), "bin")) \
and self._system.library_exists("librat_%s-g++" % sys,
os.path.join(self.get_install_path(), "lib")) \
and self._system.library_exists("libRATEvent_%s-g++" % sys,
os.path.join(self.get_install_path(), "lib"))
def _write_env_file(self):
""" Diff geant env file and no need to patch rat."""
self._env_file.add_source(self._dependency_paths[self._geant_dep], "bin/geant4")
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._clhep_dep],
"lib"))
self._env_file.add_environment("AVALANCHEROOT", self._dependency_paths[self._avalanche_dep])
if self._dependency_paths[self._zeromq_dep] is not None: # Conditional Package
self._env_file.add_environment("ZEROMQROOT", self._dependency_paths[self._zeromq_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._zeromq_dep],
"lib"))
if self._dependency_paths[self._xercesc_dep] is not None: # Conditional Package
self._env_file.add_environment("XERCESCROOT", self._dependency_paths[self._xercesc_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._xercesc_dep],
"lib"))
self._env_file.append_path(os.path.join(self._dependency_paths[self._clhep_dep], "bin"))
self._env_file.append_path(os.path.join(self._dependency_paths[self._geant_dep], "bin"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._clhep_dep],
"lib"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._avalanche_dep],
"lib/cpp"))
if self._dependency_paths[self._curl_dep] is not None: # Conditional Package
self._env_file.append_path(os.path.join(self._dependency_paths[self._curl_dep], "bin"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._curl_dep], "lib"))
if self._dependency_paths[self._bzip_dep] is not None: # Conditional Package
self._env_file.add_environment("BZIPROOT", self._dependency_paths[self._bzip_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._bzip_dep],
"lib"))
class RatRelease3(rat.RatRelease):
""" Base package installer for rat release 3."""
def __init__(self, name, system, tar_name):
""" Initlaise, take extra dependencies."""
super(RatRelease3, self).__init__(name, system, "root-5.32.04", "geant4.9.4.p01", "scons-2.1.0",
tar_name)
self._clhep_dep = "clhep-2.1.0.1"
self._curl_dep = "curl-7.26.0"
self._bzip_dep = "bzip2-1.0.6"
self._avalanche_dep = "avalanche-1"
self._zeromq_dep = "zeromq-2.2.0"
self._xercesc_dep = "xerces-c-3.1.1"
def _is_installed(self):
""" Rat releases and dev share a common install check."""
# Check rat, root, RATLib and RATDSLib
sys = os.uname()[0]
return self._system.file_exists('rat_%s-g++' % sys,
os.path.join(self.get_install_path(), "bin")) \
and self._system.file_exists('root',
os.path.join(self.get_install_path(), "bin")) \
and self._system.library_exists("librat_%s-g++" % sys,
os.path.join(self.get_install_path(), "lib")) \
and self._system.library_exists("libRATEvent_%s-g++" % sys,
os.path.join(self.get_install_path(), "lib"))
def _get_dependencies(self):
""" Return the extra dependencies."""
return [self._clhep_dep, self._curl_dep, self._bzip_dep, self._avalanche_dep, \
self._zeromq_dep, self._xercesc_dep]
def _write_env_file(self):
""" Add the extra info to the env file."""
self._env_file.add_source(self._dependency_paths[self._geant_dep], "env")
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._clhep_dep],
"lib"))
self._env_file.add_environment("AVALANCHEROOT", self._dependency_paths[self._avalanche_dep])
if self._dependency_paths[self._zeromq_dep] is not None: # Conditional Package
self._env_file.add_environment("ZEROMQROOT", self._dependency_paths[self._zeromq_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._zeromq_dep],
"lib"))
if self._dependency_paths[self._xercesc_dep] is not None: # Conditional Package
self._env_file.add_environment("XERCESCROOT", self._dependency_paths[self._xercesc_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._xercesc_dep],
"lib"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._avalanche_dep],
"lib/cpp"))
if self._dependency_paths[self._curl_dep] is not None: # Conditional Package
self._env_file.append_path(os.path.join(self._dependency_paths[self._curl_dep], "bin"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._curl_dep], "lib"))
if self._dependency_paths[self._bzip_dep] is not None: # Conditional Package
self._env_file.add_environment("BZIPROOT", self._dependency_paths[self._bzip_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._bzip_dep],
"lib"))
# Must patch the rat config/EXTERNALS file if BZIPROOT is present
externals_file = open(os.path.join(self.get_install_path(), "config/EXTERNAL.scons"), "r")
text = externals_file.read()
externals_file.close()
externals_file = open(os.path.join(self.get_install_path(), "config/EXTERNAL.scons"), "w")
text = text.replace("ext_deps['bz2']['path'] = None",
"ext_deps['bz2']['path'] = os.environ['BZIPROOT']")
externals_file.write(text)
externals_file.close()
class RatRelease2(rat.RatRelease):
""" Base package installer for rat release 2."""
def __init__(self, name, system, tar_name):
""" Initlaise, take extra dependencies."""
super(RatRelease2, self).__init__(name, system, "root-5.28.00", "geant4.9.4.p01", "scons-2.1.0",
tar_name)
self._clhep_dep = "clhep-2.1.0.1"
self._curl_dep = "curl-7.26.0"
self._bzip_dep = "bzip2-1.0.6"
def _is_installed(self):
""" Rat releases and dev share a common install check."""
# Check rat, root, RATLib and RATDSLib
sys = os.uname()[0]
return self._system.file_exists('rat_%s-g++' % sys,
os.path.join(self.get_install_path(), "bin")) \
and self._system.file_exists('root',
os.path.join(self.get_install_path(), "bin")) \
and self._system.library_exists("librat_%s-g++" % sys,
os.path.join(self.get_install_path(), "lib")) \
and self._system.library_exists("libRATEvent_%s-g++" % sys,
os.path.join(self.get_install_path(), "lib"))
def _get_dependencies(self):
""" Return the extra dependencies."""
return [self._clhep_dep, self._curl_dep, self._bzip_dep]
def _write_env_file(self):
""" Add the extra info to the env file."""
self._env_file.add_source(self._dependency_paths[self._geant_dep], "env")
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._clhep_dep],
"lib"))
if self._dependency_paths[self._curl_dep] is not None: # Conditional Package
self._env_file.append_path(os.path.join(self._dependency_paths[self._curl_dep], "bin"))
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._curl_dep], "lib"))
if self._dependency_paths[self._bzip_dep] is not None: # Conditional Package
self._env_file.add_environment("BZIPROOT", self._dependency_paths[self._bzip_dep])
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._bzip_dep],
"lib"))
# Must patch the rat config/EXTERNALS file if BZIPROOT
externals_file = open(os.path.join(self.get_install_path(), "config/EXTERNAL.scons"), "r")
text = externals_file.read()
exterbalsFile.close()
externals_file = open(os.path.join(self.get_install_path(), "config/EXTERNAL.scons"), "w")
text = text.replace("ext_deps['bz2']['path'] = None",
"ext_deps['bz2']['path'] = os.environ['BZIPROOT']")
externals_file.write(text)
externals_file.close()
class RatRelease0and1(rat.RatRelease):
""" Base package installer for rat releases 0, 1."""
def __init__(self, name, system, tar_name):
""" Initlaise, take extra dependencies."""
super(RatRelease0and1, self).__init__(name, system, "root-5.24.00", "geant4.9.2.p02",
"scons-1.2.0", tar_name)
self._clhep_dep = "clhep-2.0.4.2"
def _is_installed(self):
""" Rat releases and dev share a common install check."""
# Check rat, root, RATLib and RATDSLib
sys = os.uname()[0]
return self._system.file_exists('rat_%s-g++' % sys,
os.path.join(self.get_install_path(), "bin")) \
and self._system.file_exists('root',
os.path.join(self.get_install_path(), "bin")) \
and self._system.library_exists("librat_%s-g++" % sys,
os.path.join(self.get_install_path(), "lib")) \
and self._system.library_exists("libRATEvent_%s-g++" % sys,
os.path.join(self.get_install_path(), "lib"))
def _get_dependencies(self):
""" Return the extra dependencies."""
return [self._clhep_dep]
def _write_env_file(self):
""" Add the extra info to the env file."""
self._env_file.add_source(self._dependency_paths[self._geant_dep], "env")
self._env_file.append_library_path(os.path.join(self._dependency_paths[self._clhep_dep],
"lib"))
| 65.22884
| 123
| 0.603085
| 2,611
| 20,808
| 4.451551
| 0.065875
| 0.103588
| 0.140583
| 0.17018
| 0.932117
| 0.929106
| 0.913103
| 0.897187
| 0.873441
| 0.872838
| 0
| 0.017365
| 0.272155
| 20,808
| 318
| 124
| 65.433962
| 0.750083
| 0.125433
| 0
| 0.862205
| 0
| 0
| 0.076613
| 0.01233
| 0
| 0
| 0
| 0
| 0
| 1
| 0.110236
| false
| 0
| 0.007874
| 0
| 0.200787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c74149919833aca52f6026a70e9c345df4e08ee7
| 1,938
|
py
|
Python
|
rroll.py
|
scp-localhost/perimeterPig
|
ed53842713d39d492f8fe08a208a79c8a5af3e8f
|
[
"MIT"
] | null | null | null |
rroll.py
|
scp-localhost/perimeterPig
|
ed53842713d39d492f8fe08a208a79c8a5af3e8f
|
[
"MIT"
] | null | null | null |
rroll.py
|
scp-localhost/perimeterPig
|
ed53842713d39d492f8fe08a208a79c8a5af3e8f
|
[
"MIT"
] | null | null | null |
rroll = ["","We're no strangers to love",
"You know the rules and so do I",
"A full commitment's what I'm thinking of",
"You wouldn't get this from any other guy",
"I just wanna tell you how I'm feeling",
"Gotta make you understand",
"Never gonna give you up",
"Never gonna let you down",
"Never gonna run around and desert you",
"Never gonna make you cry",
"Never gonna say goodbye",
"Never gonna tell a lie and hurt you",
"We've known each other for so long",
"Your heart's been aching but you're too shy to say it",
"Inside we both know what's been going on",
"We know the game and we're gonna play it",
"And if you ask me how I'm feeling",
"Don't tell me you're too blind to see",
"Never gonna give you up",
"Never gonna let you down",
"Never gonna run around and desert you",
"Never gonna make you cry",
"Never gonna say goodbye",
"Never gonna tell a lie and hurt you",
"Never gonna give you up",
"Never gonna let you down",
"Never gonna run around and desert you",
"Never gonna make you cry",
"Never gonna say goodbye",
"Never gonna tell a lie and hurt you",
"Never gonna give, never gonna give",
"(Give you up)",
"(Ooh) Never gonna give, never gonna give",
"(Give you up)",
"We've known each other for so long",
"Your heart's been aching but you're too shy to say it",
"Inside we both know what's been going on",
"We know the game and we're gonna play it",
"I just wanna tell you how I'm feeling",
"Gotta make you understand",
"Never gonna give you up",
"Never gonna let you down",
"Never gonna run around and desert you",
"Never gonna make you cry",
"Never gonna say goodbye",
"Never gonna tell a lie and hurt you",
"Never gonna give you up",
"Never gonna let you down",
"Never gonna run around and desert you",
"Never gonna make you cry",
"Never gonna say goodbye",
"Never gonna tell a lie and hurt you",
"Never gonna give you up",
"Never gonna let you down",
"Never gonna run around and desert you",
"Never gonna make you cry"]
| 34
| 56
| 0.715686
| 369
| 1,938
| 3.758808
| 0.214092
| 0.273973
| 0.100937
| 0.07354
| 0.878154
| 0.878154
| 0.878154
| 0.878154
| 0.878154
| 0.834895
| 0
| 0
| 0.181115
| 1,938
| 56
| 57
| 34.607143
| 0.873976
| 0
| 0
| 0.839286
| 0
| 0
| 0.878225
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c78af98c2c6a282ce4776cb9d0c32a3209929eeb
| 14,770
|
py
|
Python
|
google/ads/google_ads/v6/proto/services/keyword_plan_service_pb2_grpc.py
|
jphanwebstaurant/google-ads-python
|
600812b2afcc4d57f00b47dfe436620ce50bfe9b
|
[
"Apache-2.0"
] | 1
|
2021-04-09T04:28:47.000Z
|
2021-04-09T04:28:47.000Z
|
google/ads/google_ads/v6/proto/services/keyword_plan_service_pb2_grpc.py
|
jphanwebstaurant/google-ads-python
|
600812b2afcc4d57f00b47dfe436620ce50bfe9b
|
[
"Apache-2.0"
] | null | null | null |
google/ads/google_ads/v6/proto/services/keyword_plan_service_pb2_grpc.py
|
jphanwebstaurant/google-ads-python
|
600812b2afcc4d57f00b47dfe436620ce50bfe9b
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.ads.google_ads.v6.proto.resources import keyword_plan_pb2 as google_dot_ads_dot_googleads__v6_dot_proto_dot_resources_dot_keyword__plan__pb2
from google.ads.google_ads.v6.proto.services import keyword_plan_service_pb2 as google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2
class KeywordPlanServiceStub(object):
"""Proto file describing the keyword plan service.
Service to manage keyword plans.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetKeywordPlan = channel.unary_unary(
'/google.ads.googleads.v6.services.KeywordPlanService/GetKeywordPlan',
request_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GetKeywordPlanRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_resources_dot_keyword__plan__pb2.KeywordPlan.FromString,
)
self.MutateKeywordPlans = channel.unary_unary(
'/google.ads.googleads.v6.services.KeywordPlanService/MutateKeywordPlans',
request_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.MutateKeywordPlansRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.MutateKeywordPlansResponse.FromString,
)
self.GenerateForecastCurve = channel.unary_unary(
'/google.ads.googleads.v6.services.KeywordPlanService/GenerateForecastCurve',
request_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastCurveRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastCurveResponse.FromString,
)
self.GenerateForecastTimeSeries = channel.unary_unary(
'/google.ads.googleads.v6.services.KeywordPlanService/GenerateForecastTimeSeries',
request_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastTimeSeriesRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastTimeSeriesResponse.FromString,
)
self.GenerateForecastMetrics = channel.unary_unary(
'/google.ads.googleads.v6.services.KeywordPlanService/GenerateForecastMetrics',
request_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastMetricsRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastMetricsResponse.FromString,
)
self.GenerateHistoricalMetrics = channel.unary_unary(
'/google.ads.googleads.v6.services.KeywordPlanService/GenerateHistoricalMetrics',
request_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateHistoricalMetricsRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateHistoricalMetricsResponse.FromString,
)
class KeywordPlanServiceServicer(object):
"""Proto file describing the keyword plan service.
Service to manage keyword plans.
"""
def GetKeywordPlan(self, request, context):
"""Returns the requested plan in full detail.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def MutateKeywordPlans(self, request, context):
"""Creates, updates, or removes keyword plans. Operation statuses are
returned.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GenerateForecastCurve(self, request, context):
"""Returns the requested Keyword Plan forecast curve.
Only the bidding strategy is considered for generating forecast curve.
The bidding strategy value specified in the plan is ignored.
To generate a forecast at a value specified in the plan, use
KeywordPlanService.GenerateForecastMetrics.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GenerateForecastTimeSeries(self, request, context):
"""Returns a forecast in the form of a time series for the Keyword Plan over
the next 52 weeks.
(1) Forecasts closer to the current date are generally more accurate than
further out.
(2) The forecast reflects seasonal trends using current and
prior traffic patterns. The forecast period of the plan is ignored.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GenerateForecastMetrics(self, request, context):
"""Returns the requested Keyword Plan forecasts.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GenerateHistoricalMetrics(self, request, context):
"""Returns the requested Keyword Plan historical metrics.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_KeywordPlanServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetKeywordPlan': grpc.unary_unary_rpc_method_handler(
servicer.GetKeywordPlan,
request_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GetKeywordPlanRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_resources_dot_keyword__plan__pb2.KeywordPlan.SerializeToString,
),
'MutateKeywordPlans': grpc.unary_unary_rpc_method_handler(
servicer.MutateKeywordPlans,
request_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.MutateKeywordPlansRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.MutateKeywordPlansResponse.SerializeToString,
),
'GenerateForecastCurve': grpc.unary_unary_rpc_method_handler(
servicer.GenerateForecastCurve,
request_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastCurveRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastCurveResponse.SerializeToString,
),
'GenerateForecastTimeSeries': grpc.unary_unary_rpc_method_handler(
servicer.GenerateForecastTimeSeries,
request_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastTimeSeriesRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastTimeSeriesResponse.SerializeToString,
),
'GenerateForecastMetrics': grpc.unary_unary_rpc_method_handler(
servicer.GenerateForecastMetrics,
request_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastMetricsRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastMetricsResponse.SerializeToString,
),
'GenerateHistoricalMetrics': grpc.unary_unary_rpc_method_handler(
servicer.GenerateHistoricalMetrics,
request_deserializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateHistoricalMetricsRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateHistoricalMetricsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v6.services.KeywordPlanService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class KeywordPlanService(object):
"""Proto file describing the keyword plan service.
Service to manage keyword plans.
"""
@staticmethod
def GetKeywordPlan(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.KeywordPlanService/GetKeywordPlan',
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GetKeywordPlanRequest.SerializeToString,
google_dot_ads_dot_googleads__v6_dot_proto_dot_resources_dot_keyword__plan__pb2.KeywordPlan.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def MutateKeywordPlans(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.KeywordPlanService/MutateKeywordPlans',
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.MutateKeywordPlansRequest.SerializeToString,
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.MutateKeywordPlansResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GenerateForecastCurve(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.KeywordPlanService/GenerateForecastCurve',
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastCurveRequest.SerializeToString,
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastCurveResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GenerateForecastTimeSeries(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.KeywordPlanService/GenerateForecastTimeSeries',
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastTimeSeriesRequest.SerializeToString,
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastTimeSeriesResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GenerateForecastMetrics(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.KeywordPlanService/GenerateForecastMetrics',
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastMetricsRequest.SerializeToString,
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateForecastMetricsResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GenerateHistoricalMetrics(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.KeywordPlanService/GenerateHistoricalMetrics',
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateHistoricalMetricsRequest.SerializeToString,
google_dot_ads_dot_googleads__v6_dot_proto_dot_services_dot_keyword__plan__service__pb2.GenerateHistoricalMetricsResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
| 58.149606
| 181
| 0.748206
| 1,495
| 14,770
| 6.84214
| 0.111037
| 0.054844
| 0.044579
| 0.055724
| 0.819239
| 0.814742
| 0.806628
| 0.753837
| 0.73976
| 0.702806
| 0
| 0.008176
| 0.19675
| 14,770
| 253
| 182
| 58.379447
| 0.854012
| 0.089506
| 0
| 0.481283
| 1
| 0
| 0.101572
| 0.078295
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074866
| false
| 0
| 0.016043
| 0.032086
| 0.139037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c793647d27d578231ba4eae29800bc905e9e300d
| 24,173
|
py
|
Python
|
util/data/gen/cfgmgr32.dll.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
util/data/gen/cfgmgr32.dll.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
util/data/gen/cfgmgr32.dll.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
symbols = []
exports = [{'type': 'function', 'name': 'CMP_GetBlockedDriverInfo', 'address': '0x7ffb3bf37f80'}, {'type': 'function', 'name': 'CMP_GetServerSideDeviceInstallFlags', 'address': '0x7ffb3bf37ff0'}, {'type': 'function', 'name': 'CMP_Init_Detection', 'address': '0x7ffb3bf38060'}, {'type': 'function', 'name': 'CMP_RegisterServiceNotification', 'address': '0x7ffb3bf38700'}, {'type': 'function', 'name': 'CMP_Register_Notification', 'address': '0x7ffb3bf24590'}, {'type': 'function', 'name': 'CMP_Report_LogOn', 'address': '0x7ffb3bf38080'}, {'type': 'function', 'name': 'CMP_WaitNoPendingInstallEvents', 'address': '0x7ffb3bf31740'}, {'type': 'function', 'name': 'CMP_WaitServicesAvailable', 'address': '0x7ffb3bf380a0'}, {'type': 'function', 'name': 'CM_Add_Driver_PackageW', 'address': '0x7ffb3bf38720'}, {'type': 'function', 'name': 'CM_Add_Driver_Package_ExW', 'address': '0x7ffb3bf38860'}, {'type': 'function', 'name': 'CM_Add_Empty_Log_Conf', 'address': '0x7ffb3bf38db0'}, {'type': 'function', 'name': 'CM_Add_Empty_Log_Conf_Ex', 'address': '0x7ffb3bf38f00'}, {'type': 'function', 'name': 'CM_Add_IDA', 'address': '0x7ffb3bf395e0'}, {'type': 'function', 'name': 'CM_Add_IDW', 'address': '0x7ffb3bf395f0'}, {'type': 'function', 'name': 'CM_Add_ID_ExA', 'address': '0x7ffb3bf396f0'}, {'type': 'function', 'name': 'CM_Add_ID_ExW', 'address': '0x7ffb3bf39790'}, {'type': 'function', 'name': 'CM_Add_Range', 'address': '0x7ffb3bf3ab50'}, {'type': 'function', 'name': 'CM_Add_Res_Des', 'address': '0x7ffb3bf3c070'}, {'type': 'function', 'name': 'CM_Add_Res_Des_Ex', 'address': '0x7ffb3bf3c320'}, {'type': 'function', 'name': 'CM_Apply_PowerScheme', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Connect_MachineA', 'address': '0x7ffb3bf380c0'}, {'type': 'function', 'name': 'CM_Connect_MachineW', 'address': '0x7ffb3bf38140'}, {'type': 'function', 'name': 'CM_Create_DevNodeA', 'address': '0x7ffb3bf397c0'}, {'type': 'function', 'name': 'CM_Create_DevNodeW', 'address': '0x7ffb3bf397e0'}, {'type': 'function', 'name': 'CM_Create_DevNode_ExA', 'address': '0x7ffb3bf399c0'}, {'type': 'function', 'name': 'CM_Create_DevNode_ExW', 'address': '0x7ffb3bf39a70'}, {'type': 'function', 'name': 'CM_Create_Range_List', 'address': '0x7ffb3bf3ac60'}, {'type': 'function', 'name': 'CM_Delete_Class_Key', 'address': '0x7ffb3bf3d390'}, {'type': 'function', 'name': 'CM_Delete_Class_Key_Ex', 'address': '0x7ffb3bf3d400'}, {'type': 'function', 'name': 'CM_Delete_DevNode_Key', 'address': '0x7ffb3bf3d430'}, {'type': 'function', 'name': 'CM_Delete_DevNode_Key_Ex', 'address': '0x7ffb3bf3d540'}, {'type': 'function', 'name': 'CM_Delete_Device_Interface_KeyA', 'address': '0x7ffb3bf3d570'}, {'type': 'function', 'name': 'CM_Delete_Device_Interface_KeyW', 'address': '0x7ffb3bf3d580'}, {'type': 'function', 'name': 'CM_Delete_Device_Interface_Key_ExA', 'address': '0x7ffb3bf3d5e0'}, {'type': 'function', 'name': 'CM_Delete_Device_Interface_Key_ExW', 'address': '0x7ffb3bf3d670'}, {'type': 'function', 'name': 'CM_Delete_Driver_PackageW', 'address': '0x7ffb3bf38980'}, {'type': 'function', 'name': 'CM_Delete_Driver_Package_ExW', 'address': '0x7ffb3bf38a80'}, {'type': 'function', 'name': 'CM_Delete_PowerScheme', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Delete_Range', 'address': '0x7ffb3bf3ad70'}, {'type': 'function', 'name': 'CM_Detect_Resource_Conflict', 'address': '0x7ffb3bf3c370'}, {'type': 'function', 'name': 'CM_Detect_Resource_Conflict_Ex', 'address': '0x7ffb3bf3c3a0'}, {'type': 'function', 'name': 'CM_Disable_DevNode', 'address': '0x7ffb3bf39ab0'}, {'type': 'function', 'name': 'CM_Disable_DevNode_Ex', 'address': '0x7ffb3bf39be0'}, {'type': 'function', 'name': 'CM_Disconnect_Machine', 'address': '0x7ffb3bf381c0'}, {'type': 'function', 'name': 'CM_Dup_Range_List', 'address': '0x7ffb3bf3af50'}, {'type': 'function', 'name': 'CM_Duplicate_PowerScheme', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Enable_DevNode', 'address': '0x7ffb3bf39c10'}, {'type': 'function', 'name': 'CM_Enable_DevNode_Ex', 'address': '0x7ffb3bf39d30'}, {'type': 'function', 'name': 'CM_Enumerate_Classes', 'address': '0x7ffb3bf3d6a0'}, {'type': 'function', 'name': 'CM_Enumerate_Classes_Ex', 'address': '0x7ffb3bf3d710'}, {'type': 'function', 'name': 'CM_Enumerate_EnumeratorsA', 'address': '0x7ffb3bf3fa80'}, {'type': 'function', 'name': 'CM_Enumerate_EnumeratorsW', 'address': '0x7ffb3bf3faa0'}, {'type': 'function', 'name': 'CM_Enumerate_Enumerators_ExA', 'address': '0x7ffb3bf3fb10'}, {'type': 'function', 'name': 'CM_Enumerate_Enumerators_ExW', 'address': '0x7ffb3bf3fbc0'}, {'type': 'function', 'name': 'CM_Find_Range', 'address': '0x7ffb3bf3b090'}, {'type': 'function', 'name': 'CM_First_Range', 'address': '0x7ffb3bf3b2a0'}, {'type': 'function', 'name': 'CM_Free_Log_Conf', 'address': '0x7ffb3bf38f40'}, {'type': 'function', 'name': 'CM_Free_Log_Conf_Ex', 'address': '0x7ffb3bf39090'}, {'type': 'function', 'name': 'CM_Free_Log_Conf_Handle', 'address': '0x7ffb3bf390c0'}, {'type': 'function', 'name': 'CM_Free_Range_List', 'address': '0x7ffb3bf3b3c0'}, {'type': 'function', 'name': 'CM_Free_Res_Des', 'address': '0x7ffb3bf3c590'}, {'type': 'function', 'name': 'CM_Free_Res_Des_Ex', 'address': '0x7ffb3bf3c750'}, {'type': 'function', 'name': 'CM_Free_Res_Des_Handle', 'address': '0x7ffb3bf3c780'}, {'type': 'function', 'name': 'CM_Free_Resource_Conflict_Handle', 'address': '0x7ffb3bf402a0'}, {'type': 'function', 'name': 'CM_Get_Child', 'address': '0x7ffb3bf22c70'}, {'type': 'function', 'name': 'CM_Get_Child_Ex', 'address': '0x7ffb3bf3fc00'}, {'type': 'function', 'name': 'CM_Get_Class_Key_NameA', 'address': '0x7ffb3bf3d740'}, {'type': 'function', 'name': 'CM_Get_Class_Key_NameW', 'address': '0x7ffb3bf3d760'}, {'type': 'function', 'name': 'CM_Get_Class_Key_Name_ExA', 'address': '0x7ffb3bf3d810'}, {'type': 'function', 'name': 'CM_Get_Class_Key_Name_ExW', 'address': '0x7ffb3bf3d8b0'}, {'type': 'function', 'name': 'CM_Get_Class_NameA', 'address': '0x7ffb3bf3d8c0'}, {'type': 'function', 'name': 'CM_Get_Class_NameW', 'address': '0x7ffb3bf3d8e0'}, {'type': 'function', 'name': 'CM_Get_Class_Name_ExA', 'address': '0x7ffb3bf3da00'}, {'type': 'function', 'name': 'CM_Get_Class_Name_ExW', 'address': '0x7ffb3bf3dab0'}, {'type': 'function', 'name': 'CM_Get_Class_PropertyW', 'address': '0x7ffb3bf27270'}, {'type': 'function', 'name': 'CM_Get_Class_Property_ExW', 'address': '0x7ffb3bf27230'}, {'type': 'function', 'name': 'CM_Get_Class_Property_Keys', 'address': '0x7ffb3bf40ce0'}, {'type': 'function', 'name': 'CM_Get_Class_Property_Keys_Ex', 'address': '0x7ffb3bf40dc0'}, {'type': 'function', 'name': 'CM_Get_Class_Registry_PropertyA', 'address': '0x7ffb3bf3daf0'}, {'type': 'function', 'name': 'CM_Get_Class_Registry_PropertyW', 'address': '0x7ffb3bf3dc90'}, {'type': 'function', 'name': 'CM_Get_Depth', 'address': '0x7ffb3bf3fc30'}, {'type': 'function', 'name': 'CM_Get_Depth_Ex', 'address': '0x7ffb3bf3fd30'}, {'type': 'function', 'name': 'CM_Get_DevNode_Custom_PropertyA', 'address': '0x7ffb3bf3dfa0'}, {'type': 'function', 'name': 'CM_Get_DevNode_Custom_PropertyW', 'address': '0x7ffb3bf26d80'}, {'type': 'function', 'name': 'CM_Get_DevNode_Custom_Property_ExA', 'address': '0x7ffb3bf21150'}, {'type': 'function', 'name': 'CM_Get_DevNode_Custom_Property_ExW', 'address': '0x7ffb3bf21400'}, {'type': 'function', 'name': 'CM_Get_DevNode_PropertyW', 'address': '0x7ffb3bf28ea0'}, {'type': 'function', 'name': 'CM_Get_DevNode_Property_ExW', 'address': '0x7ffb3bf27960'}, {'type': 'function', 'name': 'CM_Get_DevNode_Property_Keys', 'address': '0x7ffb3bf40e00'}, {'type': 'function', 'name': 'CM_Get_DevNode_Property_Keys_Ex', 'address': '0x7ffb3bf40f20'}, {'type': 'function', 'name': 'CM_Get_DevNode_Registry_PropertyA', 'address': '0x7ffb3bf3dfd0'}, {'type': 'function', 'name': 'CM_Get_DevNode_Registry_PropertyW', 'address': '0x7ffb3bf285e0'}, {'type': 'function', 'name': 'CM_Get_DevNode_Registry_Property_ExA', 'address': '0x7ffb3bf3e000'}, {'type': 'function', 'name': 'CM_Get_DevNode_Registry_Property_ExW', 'address': '0x7ffb3bf31530'}, {'type': 'function', 'name': 'CM_Get_DevNode_Status', 'address': '0x7ffb3bf26520'}, {'type': 'function', 'name': 'CM_Get_DevNode_Status_Ex', 'address': '0x7ffb3bf39d60'}, {'type': 'function', 'name': 'CM_Get_Device_IDA', 'address': '0x7ffb3bf26820'}, {'type': 'function', 'name': 'CM_Get_Device_IDW', 'address': '0x7ffb3bf307b0'}, {'type': 'function', 'name': 'CM_Get_Device_ID_ExA', 'address': '0x7ffb3bf3fd60'}, {'type': 'function', 'name': 'CM_Get_Device_ID_ExW', 'address': '0x7ffb3bf26ba0'}, {'type': 'function', 'name': 'CM_Get_Device_ID_ListA', 'address': '0x7ffb3bf3fe30'}, {'type': 'function', 'name': 'CM_Get_Device_ID_ListW', 'address': '0x7ffb3bf2de10'}, {'type': 'function', 'name': 'CM_Get_Device_ID_List_ExA', 'address': '0x7ffb3bf3fe50'}, {'type': 'function', 'name': 'CM_Get_Device_ID_List_ExW', 'address': '0x7ffb3bf2dde0'}, {'type': 'function', 'name': 'CM_Get_Device_ID_List_SizeA', 'address': '0x7ffb3bf3ffa0'}, {'type': 'function', 'name': 'CM_Get_Device_ID_List_SizeW', 'address': '0x7ffb3bf2e410'}, {'type': 'function', 'name': 'CM_Get_Device_ID_List_Size_ExA', 'address': '0x7ffb3bf3ffb0'}, {'type': 'function', 'name': 'CM_Get_Device_ID_List_Size_ExW', 'address': '0x7ffb3bf40070'}, {'type': 'function', 'name': 'CM_Get_Device_ID_Size', 'address': '0x7ffb3bf400a0'}, {'type': 'function', 'name': 'CM_Get_Device_ID_Size_Ex', 'address': '0x7ffb3bf2fc20'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_AliasA', 'address': '0x7ffb3bf3e1d0'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_AliasW', 'address': '0x7ffb3bf30400'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_Alias_ExA', 'address': '0x7ffb3bf3e200'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_Alias_ExW', 'address': '0x7ffb3bf303d0'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_ListA', 'address': '0x7ffb3bf3e3b0'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_ListW', 'address': '0x7ffb3bf2eac0'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_List_ExA', 'address': '0x7ffb3bf3e3e0'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_List_ExW', 'address': '0x7ffb3bf2ea90'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_List_SizeA', 'address': '0x7ffb3bf3e540'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_List_SizeW', 'address': '0x7ffb3bf2f800'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_List_Size_ExA', 'address': '0x7ffb3bf3e560'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_List_Size_ExW', 'address': '0x7ffb3bf2f7d0'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_PropertyW', 'address': '0x7ffb3bf27820'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_Property_ExW', 'address': '0x7ffb3bf277e0'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_Property_KeysW', 'address': '0x7ffb3bf40f60'}, {'type': 'function', 'name': 'CM_Get_Device_Interface_Property_Keys_ExW', 'address': '0x7ffb3bf40ff0'}, {'type': 'function', 'name': 'CM_Get_First_Log_Conf', 'address': '0x7ffb3bf217c0'}, {'type': 'function', 'name': 'CM_Get_First_Log_Conf_Ex', 'address': '0x7ffb3bf39150'}, {'type': 'function', 'name': 'CM_Get_Global_State', 'address': '0x7ffb3bf381e0'}, {'type': 'function', 'name': 'CM_Get_Global_State_Ex', 'address': '0x7ffb3bf38270'}, {'type': 'function', 'name': 'CM_Get_HW_Prof_FlagsA', 'address': '0x7ffb3bf41180'}, {'type': 'function', 'name': 'CM_Get_HW_Prof_FlagsW', 'address': '0x7ffb3bf411a0'}, {'type': 'function', 'name': 'CM_Get_HW_Prof_Flags_ExA', 'address': '0x7ffb3bf41270'}, {'type': 'function', 'name': 'CM_Get_HW_Prof_Flags_ExW', 'address': '0x7ffb3bf41310'}, {'type': 'function', 'name': 'CM_Get_Hardware_Profile_InfoA', 'address': '0x7ffb3bf41350'}, {'type': 'function', 'name': 'CM_Get_Hardware_Profile_InfoW', 'address': '0x7ffb3bf41360'}, {'type': 'function', 'name': 'CM_Get_Hardware_Profile_Info_ExA', 'address': '0x7ffb3bf413d0'}, {'type': 'function', 'name': 'CM_Get_Hardware_Profile_Info_ExW', 'address': '0x7ffb3bf414a0'}, {'type': 'function', 'name': 'CM_Get_Log_Conf_Priority', 'address': '0x7ffb3bf39180'}, {'type': 'function', 'name': 'CM_Get_Log_Conf_Priority_Ex', 'address': '0x7ffb3bf392d0'}, {'type': 'function', 'name': 'CM_Get_Next_Log_Conf', 'address': '0x7ffb3bf39300'}, {'type': 'function', 'name': 'CM_Get_Next_Log_Conf_Ex', 'address': '0x7ffb3bf39460'}, {'type': 'function', 'name': 'CM_Get_Next_Res_Des', 'address': '0x7ffb3bf3c810'}, {'type': 'function', 'name': 'CM_Get_Next_Res_Des_Ex', 'address': '0x7ffb3bf3c9d0'}, {'type': 'function', 'name': 'CM_Get_Parent', 'address': '0x7ffb3bf27430'}, {'type': 'function', 'name': 'CM_Get_Parent_Ex', 'address': '0x7ffb3bf400b0'}, {'type': 'function', 'name': 'CM_Get_Res_Des_Data', 'address': '0x7ffb3bf3ca10'}, {'type': 'function', 'name': 'CM_Get_Res_Des_Data_Ex', 'address': '0x7ffb3bf3cbd0'}, {'type': 'function', 'name': 'CM_Get_Res_Des_Data_Size', 'address': '0x7ffb3bf3cc10'}, {'type': 'function', 'name': 'CM_Get_Res_Des_Data_Size_Ex', 'address': '0x7ffb3bf3cdd0'}, {'type': 'function', 'name': 'CM_Get_Resource_Conflict_Count', 'address': '0x7ffb3bf40300'}, {'type': 'function', 'name': 'CM_Get_Resource_Conflict_DetailsA', 'address': '0x7ffb3bf40370'}, {'type': 'function', 'name': 'CM_Get_Resource_Conflict_DetailsW', 'address': '0x7ffb3bf404e0'}, {'type': 'function', 'name': 'CM_Get_Sibling', 'address': '0x7ffb3bf31a00'}, {'type': 'function', 'name': 'CM_Get_Sibling_Ex', 'address': '0x7ffb3bf400e0'}, {'type': 'function', 'name': 'CM_Get_Version', 'address': '0x7ffb3bf382a0'}, {'type': 'function', 'name': 'CM_Get_Version_Ex', 'address': '0x7ffb3bf382b0'}, {'type': 'function', 'name': 'CM_Import_PowerScheme', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Install_DevNodeW', 'address': '0x7ffb3bf41940'}, {'type': 'function', 'name': 'CM_Install_DevNode_ExW', 'address': '0x7ffb3bf41ad0'}, {'type': 'function', 'name': 'CM_Install_DriverW', 'address': '0x7ffb3bf38b60'}, {'type': 'function', 'name': 'CM_Intersect_Range_List', 'address': '0x7ffb3bf3b4e0'}, {'type': 'function', 'name': 'CM_Invert_Range_List', 'address': '0x7ffb3bf3b740'}, {'type': 'function', 'name': 'CM_Is_Dock_Station_Present', 'address': '0x7ffb3bf414d0'}, {'type': 'function', 'name': 'CM_Is_Dock_Station_Present_Ex', 'address': '0x7ffb3bf415a0'}, {'type': 'function', 'name': 'CM_Is_Version_Available', 'address': '0x7ffb3bf382d0'}, {'type': 'function', 'name': 'CM_Is_Version_Available_Ex', 'address': '0x7ffb3bf38300'}, {'type': 'function', 'name': 'CM_Locate_DevNodeA', 'address': '0x7ffb3bf40110'}, {'type': 'function', 'name': 'CM_Locate_DevNodeW', 'address': '0x7ffb3bf27980'}, {'type': 'function', 'name': 'CM_Locate_DevNode_ExA', 'address': '0x7ffb3bf40120'}, {'type': 'function', 'name': 'CM_Locate_DevNode_ExW', 'address': '0x7ffb3bf27940'}, {'type': 'function', 'name': 'CM_MapCrToSpErr', 'address': '0x7ffb3bf30370'}, {'type': 'function', 'name': 'CM_MapCrToWin32Err', 'address': '0x7ffb3bf302f0'}, {'type': 'function', 'name': 'CM_Merge_Range_List', 'address': '0x7ffb3bf3b900'}, {'type': 'function', 'name': 'CM_Modify_Res_Des', 'address': '0x7ffb3bf3ce00'}, {'type': 'function', 'name': 'CM_Modify_Res_Des_Ex', 'address': '0x7ffb3bf3d0b0'}, {'type': 'function', 'name': 'CM_Move_DevNode', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Move_DevNode_Ex', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Next_Range', 'address': '0x7ffb3bf3bc30'}, {'type': 'function', 'name': 'CM_Open_Class_KeyA', 'address': '0x7ffb3bf3e6f0'}, {'type': 'function', 'name': 'CM_Open_Class_KeyW', 'address': '0x7ffb3bf22270'}, {'type': 'function', 'name': 'CM_Open_Class_Key_ExA', 'address': '0x7ffb3bf3e720'}, {'type': 'function', 'name': 'CM_Open_Class_Key_ExW', 'address': '0x7ffb3bf3e7e0'}, {'type': 'function', 'name': 'CM_Open_DevNode_Key', 'address': '0x7ffb3bf268f0'}, {'type': 'function', 'name': 'CM_Open_DevNode_Key_Ex', 'address': '0x7ffb3bf241b0'}, {'type': 'function', 'name': 'CM_Open_Device_Interface_KeyA', 'address': '0x7ffb3bf3e830'}, {'type': 'function', 'name': 'CM_Open_Device_Interface_KeyW', 'address': '0x7ffb3bf30af0'}, {'type': 'function', 'name': 'CM_Open_Device_Interface_Key_ExA', 'address': '0x7ffb3bf3e860'}, {'type': 'function', 'name': 'CM_Open_Device_Interface_Key_ExW', 'address': '0x7ffb3bf30ac0'}, {'type': 'function', 'name': 'CM_Query_And_Remove_SubTreeA', 'address': '0x7ffb3bf39da0'}, {'type': 'function', 'name': 'CM_Query_And_Remove_SubTreeW', 'address': '0x7ffb3bf39dd0'}, {'type': 'function', 'name': 'CM_Query_And_Remove_SubTree_ExA', 'address': '0x7ffb3bf39f40'}, {'type': 'function', 'name': 'CM_Query_And_Remove_SubTree_ExW', 'address': '0x7ffb3bf3a080'}, {'type': 'function', 'name': 'CM_Query_Arbitrator_Free_Data', 'address': '0x7ffb3bf38320'}, {'type': 'function', 'name': 'CM_Query_Arbitrator_Free_Data_Ex', 'address': '0x7ffb3bf38440'}, {'type': 'function', 'name': 'CM_Query_Arbitrator_Free_Size', 'address': '0x7ffb3bf38480'}, {'type': 'function', 'name': 'CM_Query_Arbitrator_Free_Size_Ex', 'address': '0x7ffb3bf385a0'}, {'type': 'function', 'name': 'CM_Query_Remove_SubTree', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Query_Remove_SubTree_Ex', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Query_Resource_Conflict_List', 'address': '0x7ffb3bf40820'}, {'type': 'function', 'name': 'CM_Reenumerate_DevNode', 'address': '0x7ffb3bf3a0c0'}, {'type': 'function', 'name': 'CM_Reenumerate_DevNode_Ex', 'address': '0x7ffb3bf3a1f0'}, {'type': 'function', 'name': 'CM_Register_Device_Driver', 'address': '0x7ffb3bf3a220'}, {'type': 'function', 'name': 'CM_Register_Device_Driver_Ex', 'address': '0x7ffb3bf3a320'}, {'type': 'function', 'name': 'CM_Register_Device_InterfaceA', 'address': '0x7ffb3bf3e910'}, {'type': 'function', 'name': 'CM_Register_Device_InterfaceW', 'address': '0x7ffb3bf3e940'}, {'type': 'function', 'name': 'CM_Register_Device_Interface_ExA', 'address': '0x7ffb3bf3ea80'}, {'type': 'function', 'name': 'CM_Register_Device_Interface_ExW', 'address': '0x7ffb3bf3ec40'}, {'type': 'function', 'name': 'CM_Register_Notification', 'address': '0x7ffb3bf23a30'}, {'type': 'function', 'name': 'CM_Remove_SubTree', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Remove_SubTree_Ex', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Request_Device_EjectA', 'address': '0x7ffb3bf3a350'}, {'type': 'function', 'name': 'CM_Request_Device_EjectW', 'address': '0x7ffb3bf3a380'}, {'type': 'function', 'name': 'CM_Request_Device_Eject_ExA', 'address': '0x7ffb3bf3a4e0'}, {'type': 'function', 'name': 'CM_Request_Device_Eject_ExW', 'address': '0x7ffb3bf3a620'}, {'type': 'function', 'name': 'CM_Request_Eject_PC', 'address': '0x7ffb3bf415d0'}, {'type': 'function', 'name': 'CM_Request_Eject_PC_Ex', 'address': '0x7ffb3bf416a0'}, {'type': 'function', 'name': 'CM_RestoreAll_DefaultPowerSchemes', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Restore_DefaultPowerScheme', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Run_Detection', 'address': '0x7ffb3bf385e0'}, {'type': 'function', 'name': 'CM_Run_Detection_Ex', 'address': '0x7ffb3bf38600'}, {'type': 'function', 'name': 'CM_Set_ActiveScheme', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'CM_Set_Class_PropertyW', 'address': '0x7ffb3bf41030'}, {'type': 'function', 'name': 'CM_Set_Class_Property_ExW', 'address': '0x7ffb3bf41130'}, {'type': 'function', 'name': 'CM_Set_Class_Registry_PropertyA', 'address': '0x7ffb3bf3ec90'}, {'type': 'function', 'name': 'CM_Set_Class_Registry_PropertyW', 'address': '0x7ffb3bf3ee00'}, {'type': 'function', 'name': 'CM_Set_DevNode_Problem', 'address': '0x7ffb3bf3a660'}, {'type': 'function', 'name': 'CM_Set_DevNode_Problem_Ex', 'address': '0x7ffb3bf3a7a0'}, {'type': 'function', 'name': 'CM_Set_DevNode_PropertyW', 'address': '0x7ffb3bf32a40'}, {'type': 'function', 'name': 'CM_Set_DevNode_Property_ExW', 'address': '0x7ffb3bf32a00'}, {'type': 'function', 'name': 'CM_Set_DevNode_Registry_PropertyA', 'address': '0x7ffb3bf3f1a0'}, {'type': 'function', 'name': 'CM_Set_DevNode_Registry_PropertyW', 'address': '0x7ffb3bf3f1d0'}, {'type': 'function', 'name': 'CM_Set_DevNode_Registry_Property_ExA', 'address': '0x7ffb3bf3f5b0'}, {'type': 'function', 'name': 'CM_Set_DevNode_Registry_Property_ExW', 'address': '0x7ffb3bf3f740'}, {'type': 'function', 'name': 'CM_Set_Device_Interface_PropertyW', 'address': '0x7ffb3bf32cc0'}, {'type': 'function', 'name': 'CM_Set_Device_Interface_Property_ExW', 'address': '0x7ffb3bf32c80'}, {'type': 'function', 'name': 'CM_Set_HW_Prof', 'address': '0x7ffb3bf416d0'}, {'type': 'function', 'name': 'CM_Set_HW_Prof_Ex', 'address': '0x7ffb3bf416f0'}, {'type': 'function', 'name': 'CM_Set_HW_Prof_FlagsA', 'address': '0x7ffb3bf41720'}, {'type': 'function', 'name': 'CM_Set_HW_Prof_FlagsW', 'address': '0x7ffb3bf41740'}, {'type': 'function', 'name': 'CM_Set_HW_Prof_Flags_ExA', 'address': '0x7ffb3bf41860'}, {'type': 'function', 'name': 'CM_Set_HW_Prof_Flags_ExW', 'address': '0x7ffb3bf41900'}, {'type': 'function', 'name': 'CM_Setup_DevNode', 'address': '0x7ffb3bf3a7d0'}, {'type': 'function', 'name': 'CM_Setup_DevNode_Ex', 'address': '0x7ffb3bf3a900'}, {'type': 'function', 'name': 'CM_Test_Range_Available', 'address': '0x7ffb3bf3bd50'}, {'type': 'function', 'name': 'CM_Uninstall_DevNode', 'address': '0x7ffb3bf3a930'}, {'type': 'function', 'name': 'CM_Uninstall_DevNode_Ex', 'address': '0x7ffb3bf3aa20'}, {'type': 'function', 'name': 'CM_Uninstall_DriverW', 'address': '0x7ffb3bf38c70'}, {'type': 'function', 'name': 'CM_Unregister_Device_InterfaceA', 'address': '0x7ffb3bf3f780'}, {'type': 'function', 'name': 'CM_Unregister_Device_InterfaceW', 'address': '0x7ffb3bf3f790'}, {'type': 'function', 'name': 'CM_Unregister_Device_Interface_ExA', 'address': '0x7ffb3bf3f7f0'}, {'type': 'function', 'name': 'CM_Unregister_Device_Interface_ExW', 'address': '0x7ffb3bf3f8c0'}, {'type': 'function', 'name': 'CM_Unregister_Notification', 'address': '0x7ffb3bf23720'}, {'type': 'function', 'name': 'CM_Write_UserPowerKey', 'address': '0x7ffb3bf38710'}, {'type': 'function', 'name': 'DevCloseObjectQuery', 'address': '0x7ffb3bf30620'}, {'type': 'function', 'name': 'DevCreateObjectQuery', 'address': '0x7ffb3bf23b30'}, {'type': 'function', 'name': 'DevCreateObjectQueryEx', 'address': '0x7ffb3bf22190'}, {'type': 'function', 'name': 'DevCreateObjectQueryFromId', 'address': '0x7ffb3bf32f20'}, {'type': 'function', 'name': 'DevCreateObjectQueryFromIdEx', 'address': '0x7ffb3bf32f90'}, {'type': 'function', 'name': 'DevCreateObjectQueryFromIds', 'address': '0x7ffb3bf4bf50'}, {'type': 'function', 'name': 'DevCreateObjectQueryFromIdsEx', 'address': '0x7ffb3bf4bfc0'}, {'type': 'function', 'name': 'DevFindProperty', 'address': '0x7ffb3bf2fb40'}, {'type': 'function', 'name': 'DevFreeObjectProperties', 'address': '0x7ffb3bf2f9c0'}, {'type': 'function', 'name': 'DevFreeObjects', 'address': '0x7ffb3bf30270'}, {'type': 'function', 'name': 'DevGetObjectProperties', 'address': '0x7ffb3bf24eb0'}, {'type': 'function', 'name': 'DevGetObjectPropertiesEx', 'address': '0x7ffb3bf4c0e0'}, {'type': 'function', 'name': 'DevGetObjects', 'address': '0x7ffb3bf21c10'}, {'type': 'function', 'name': 'DevGetObjectsEx', 'address': '0x7ffb3bf21c70'}, {'type': 'function', 'name': 'DevSetObjectProperties', 'address': '0x7ffb3bf307f0'}, {'type': 'function', 'name': 'SwDeviceClose', 'address': '0x7ffb3bf330d0'}, {'type': 'function', 'name': 'SwDeviceCreate', 'address': '0x7ffb3bf31e00'}, {'type': 'function', 'name': 'SwDeviceGetLifetime', 'address': '0x7ffb3bf4d2b0'}, {'type': 'function', 'name': 'SwDeviceInterfacePropertySet', 'address': '0x7ffb3bf328a0'}, {'type': 'function', 'name': 'SwDeviceInterfaceRegister', 'address': '0x7ffb3bf32630'}, {'type': 'function', 'name': 'SwDeviceInterfaceSetState', 'address': '0x7ffb3bf324f0'}, {'type': 'function', 'name': 'SwDevicePropertySet', 'address': '0x7ffb3bf31cc0'}, {'type': 'function', 'name': 'SwDeviceSetLifetime', 'address': '0x7ffb3bf33080'}, {'type': 'function', 'name': 'SwMemFree', 'address': '0x7ffb3bf238f0'}]
| 12,086.5
| 24,160
| 0.700616
| 2,567
| 24,173
| 6.24815
| 0.174133
| 0.207993
| 0.277324
| 0.276077
| 0.500218
| 0.457385
| 0.348775
| 0.160858
| 0.021198
| 0
| 0
| 0.08533
| 0.069168
| 24,173
| 2
| 24,160
| 12,086.5
| 0.627483
| 0
| 0
| 0
| 0
| 0
| 0.700008
| 0.212625
| 0
| 0
| 0.160999
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
401e47b75f88d664d3b7917c802b1de28dbac134
| 4,340
|
py
|
Python
|
iv_plan/examples/recognition_sample.py
|
ryhanai/iv-plan-hironx
|
2f89293a55df4608cb35e6a9676db97b9e486e7d
|
[
"BSD-3-Clause"
] | null | null | null |
iv_plan/examples/recognition_sample.py
|
ryhanai/iv-plan-hironx
|
2f89293a55df4608cb35e6a9676db97b9e486e7d
|
[
"BSD-3-Clause"
] | null | null | null |
iv_plan/examples/recognition_sample.py
|
ryhanai/iv-plan-hironx
|
2f89293a55df4608cb35e6a9676db97b9e486e7d
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
def detect(name='box0'):
if rr:
Tleye_cb = rr.detect(camera='leye')
bl,bh,bw = 97,66,57
Tcb_box = FRAME(xyzabc=[12,-8,-bw/2.0,0,0,pi])
Tleye_box = Tleye_cb*Tcb_box
r.set_joint_angles(rr.get_joint_angles())
print 'leye->target:', Tleye_box
Twld_box = r.get_link('HEAD_JOINT1_Link').where()*r.Thd_leye*Tleye_box
print 'world->target:', Twld_box
# 認識位置の可視化
env.delete_object(name)
bx = visual.box(length=bl, height=bh, width=bw, color=(1,0,1))
obj = PartsObjectWithName(vbody=bx,name=name)
env.insert_object(obj, Twld_box, env.get_world())
return Twld_box
else:
obj = env.get_object(name)
if obj:
frm2 = obj.where()
print "world->target:", frm2
return frm2
else:
print "not detected"
return None
def detect_rhand():
'''ARマーカが貼られた箱の認識(複数対応)'''
if rr:
res = rr.detect(camera='rhand')
bl,bh,bw = 97,66,57
Tmk_box = FRAME(xyzabc=[0,0,-bh/2.0,pi/2,0,0])
frms = []
r.set_joint_angles(rr.get_joint_angles())
for objnum,Tcam_mk in res:
Tcam_box = Tcam_mk*Tmk_box
print 'rhand->target:', Tcam_box
Twld_box = r.get_link('RARM_JOINT5_Link').where()*r.Trh_cam*Tcam_box
print 'world->target:', Twld_box
frms.append((objnum,Twld_box))
# 認識位置の可視化
name = 'box'+str(objnum)
env.delete_object(name)
bx = visual.box(length=bl, height=bh, width=bw, color=(1,0,1))
obj = PartsObjectWithName(vbody=bx,name=name)
env.insert_object(obj, Twld_box, env.get_world())
return frms
else:
# box*という名前の物体を検出する
# *の部分がマーカ番号
def detected(obj):
x,y,z = obj.where().vec
return z > 700 and re.match('box*', obj.name)
detected_objs = [x for x in env.get_objects() if detected(x)]
return [(int(re.sub('box', '', x.name)), x.where()) for x in detected_objs]
def detect_rhand2():
'''ARマーカが貼られた箱の認識(複数対応)'''
if rr:
res = rr.detect(camera='rhand')
bl,bh,bw = 97,66,57
Tmk_box = FRAME(xyzabc=[0,0,-bw/2.0,0,0,0])
frms = []
r.set_joint_angles(rr.get_joint_angles())
for objnum,Tcam_mk in res:
Tcam_box = Tcam_mk*Tmk_box
print 'rhand->target:', Tcam_box
Twld_box = r.get_link('RARM_JOINT5_Link').where()*r.Trh_cam*Tcam_box
print 'world->target:', Twld_box
frms.append((objnum,Twld_box))
# 認識位置の可視化
name = 'box'+str(objnum)
env.delete_object(name)
bx = visual.box(length=bl, height=bh, width=bw, color=(1,0,1))
obj = PartsObjectWithName(vbody=bx,name=name)
env.insert_object(obj, Twld_box, env.get_world())
return frms
else:
# box*という名前の物体を検出する
# *の部分がマーカ番号
def detected(obj):
x,y,z = obj.where().vec
return z > 700 and re.match('box*', obj.name)
detected_objs = [x for x in env.get_objects() if detected(x)]
return [(int(re.sub('box', '', x.name)), x.where()) for x in detected_objs]
def look_for_boxes(name='box0'):
'''右手ハンドの向きを変えて箱(マーカ)を探す'''
f0 = r.fk()
objfrms = [None,None]
for i in range(1,2)+range(2,-4,-1):
f = f0 * FRAME(xyzabc=[0,0,0,0,0,pi/16*i])
js = r.ik(f)[0]
r.set_arm_joint_angles(js)
sync(duration=1.5)
for objnum, objfrm in detect_rhand():
print 'marker %d found'%objnum
if objnum < 2:
objfrms[objnum] = objfrm
print objfrms
if objfrms[0] and objfrms[1]:
return objfrms
return None
def look_for_boxes2(num):
'''右手ハンドの向きを変えて箱(マーカ)を探す'''
f0 = r.fk()
objfrms = [None,None]
for i in range(1,2)+range(2,-4,-1):
f = f0 * FRAME(xyzabc=[0,0,0,0,0,pi/16*i])
js = r.ik(f)[0]
r.set_arm_joint_angles(js)
sync(duration=1.5)
for objnum, objfrm in detect_rhand2():
print 'marker %d found'%objnum
if objnum == num:
print objfrm
return objfrm
return None
| 32.38806
| 83
| 0.547005
| 617
| 4,340
| 3.706645
| 0.192869
| 0.013992
| 0.011806
| 0.008745
| 0.798426
| 0.793616
| 0.76913
| 0.74202
| 0.728465
| 0.728465
| 0
| 0.034425
| 0.310599
| 4,340
| 133
| 84
| 32.631579
| 0.729947
| 0.024424
| 0
| 0.721154
| 0
| 0
| 0.055596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.115385
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4064f6078d85a087105e863af864b409aa634284
| 16,784
|
py
|
Python
|
sdk/python/pulumi_gcp/sourcerepo/repository.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/sourcerepo/repository.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/sourcerepo/repository.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['RepositoryArgs', 'Repository']
@pulumi.input_type
class RepositoryArgs:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_configs: Optional[pulumi.Input[Sequence[pulumi.Input['RepositoryPubsubConfigArgs']]]] = None):
"""
The set of arguments for constructing a Repository resource.
:param pulumi.Input[str] name: Resource name of the repository, of the form `{{repo}}`.
The repo name may contain slashes. eg, `name/with/slash`
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[Sequence[pulumi.Input['RepositoryPubsubConfigArgs']]] pubsub_configs: How this repository publishes a change in the repository through Cloud Pub/Sub.
Keyed by the topic names.
Structure is documented below.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if pubsub_configs is not None:
pulumi.set(__self__, "pubsub_configs", pubsub_configs)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name of the repository, of the form `{{repo}}`.
The repo name may contain slashes. eg, `name/with/slash`
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="pubsubConfigs")
def pubsub_configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RepositoryPubsubConfigArgs']]]]:
"""
How this repository publishes a change in the repository through Cloud Pub/Sub.
Keyed by the topic names.
Structure is documented below.
"""
return pulumi.get(self, "pubsub_configs")
@pubsub_configs.setter
def pubsub_configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RepositoryPubsubConfigArgs']]]]):
pulumi.set(self, "pubsub_configs", value)
@pulumi.input_type
class _RepositoryState:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_configs: Optional[pulumi.Input[Sequence[pulumi.Input['RepositoryPubsubConfigArgs']]]] = None,
size: Optional[pulumi.Input[int]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Repository resources.
:param pulumi.Input[str] name: Resource name of the repository, of the form `{{repo}}`.
The repo name may contain slashes. eg, `name/with/slash`
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[Sequence[pulumi.Input['RepositoryPubsubConfigArgs']]] pubsub_configs: How this repository publishes a change in the repository through Cloud Pub/Sub.
Keyed by the topic names.
Structure is documented below.
:param pulumi.Input[int] size: The disk usage of the repo, in bytes.
:param pulumi.Input[str] url: URL to clone the repository from Google Cloud Source Repositories.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if pubsub_configs is not None:
pulumi.set(__self__, "pubsub_configs", pubsub_configs)
if size is not None:
pulumi.set(__self__, "size", size)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name of the repository, of the form `{{repo}}`.
The repo name may contain slashes. eg, `name/with/slash`
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="pubsubConfigs")
def pubsub_configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RepositoryPubsubConfigArgs']]]]:
"""
How this repository publishes a change in the repository through Cloud Pub/Sub.
Keyed by the topic names.
Structure is documented below.
"""
return pulumi.get(self, "pubsub_configs")
@pubsub_configs.setter
def pubsub_configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RepositoryPubsubConfigArgs']]]]):
pulumi.set(self, "pubsub_configs", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[int]]:
"""
The disk usage of the repo, in bytes.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
URL to clone the repository from Google Cloud Source Repositories.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
class Repository(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RepositoryPubsubConfigArgs']]]]] = None,
__props__=None):
"""
A repository (or repo) is a Git repository storing versioned source content.
To get more information about Repository, see:
* [API documentation](https://cloud.google.com/source-repositories/docs/reference/rest/v1/projects.repos)
* How-to Guides
* [Official Documentation](https://cloud.google.com/source-repositories/)
## Example Usage
### Sourcerepo Repository Basic
```python
import pulumi
import pulumi_gcp as gcp
my_repo = gcp.sourcerepo.Repository("my-repo")
```
### Sourcerepo Repository Full
```python
import pulumi
import pulumi_gcp as gcp
test_account = gcp.service_account.Account("testAccount",
account_id="my-account",
display_name="Test Service Account")
topic = gcp.pubsub.Topic("topic")
my_repo = gcp.sourcerepo.Repository("my-repo", pubsub_configs=[gcp.sourcerepo.RepositoryPubsubConfigArgs(
topic=topic.id,
message_format="JSON",
service_account_email=test_account.email,
)])
```
## Import
Repository can be imported using any of these accepted formats
```sh
$ pulumi import gcp:sourcerepo/repository:Repository default projects/{{project}}/repos/{{name}}
```
```sh
$ pulumi import gcp:sourcerepo/repository:Repository default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: Resource name of the repository, of the form `{{repo}}`.
The repo name may contain slashes. eg, `name/with/slash`
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RepositoryPubsubConfigArgs']]]] pubsub_configs: How this repository publishes a change in the repository through Cloud Pub/Sub.
Keyed by the topic names.
Structure is documented below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[RepositoryArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A repository (or repo) is a Git repository storing versioned source content.
To get more information about Repository, see:
* [API documentation](https://cloud.google.com/source-repositories/docs/reference/rest/v1/projects.repos)
* How-to Guides
* [Official Documentation](https://cloud.google.com/source-repositories/)
## Example Usage
### Sourcerepo Repository Basic
```python
import pulumi
import pulumi_gcp as gcp
my_repo = gcp.sourcerepo.Repository("my-repo")
```
### Sourcerepo Repository Full
```python
import pulumi
import pulumi_gcp as gcp
test_account = gcp.service_account.Account("testAccount",
account_id="my-account",
display_name="Test Service Account")
topic = gcp.pubsub.Topic("topic")
my_repo = gcp.sourcerepo.Repository("my-repo", pubsub_configs=[gcp.sourcerepo.RepositoryPubsubConfigArgs(
topic=topic.id,
message_format="JSON",
service_account_email=test_account.email,
)])
```
## Import
Repository can be imported using any of these accepted formats
```sh
$ pulumi import gcp:sourcerepo/repository:Repository default projects/{{project}}/repos/{{name}}
```
```sh
$ pulumi import gcp:sourcerepo/repository:Repository default {{name}}
```
:param str resource_name: The name of the resource.
:param RepositoryArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RepositoryArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RepositoryPubsubConfigArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RepositoryArgs.__new__(RepositoryArgs)
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["pubsub_configs"] = pubsub_configs
__props__.__dict__["size"] = None
__props__.__dict__["url"] = None
super(Repository, __self__).__init__(
'gcp:sourcerepo/repository:Repository',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RepositoryPubsubConfigArgs']]]]] = None,
size: Optional[pulumi.Input[int]] = None,
url: Optional[pulumi.Input[str]] = None) -> 'Repository':
"""
Get an existing Repository resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: Resource name of the repository, of the form `{{repo}}`.
The repo name may contain slashes. eg, `name/with/slash`
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RepositoryPubsubConfigArgs']]]] pubsub_configs: How this repository publishes a change in the repository through Cloud Pub/Sub.
Keyed by the topic names.
Structure is documented below.
:param pulumi.Input[int] size: The disk usage of the repo, in bytes.
:param pulumi.Input[str] url: URL to clone the repository from Google Cloud Source Repositories.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RepositoryState.__new__(_RepositoryState)
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["pubsub_configs"] = pubsub_configs
__props__.__dict__["size"] = size
__props__.__dict__["url"] = url
return Repository(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name of the repository, of the form `{{repo}}`.
The repo name may contain slashes. eg, `name/with/slash`
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="pubsubConfigs")
def pubsub_configs(self) -> pulumi.Output[Optional[Sequence['outputs.RepositoryPubsubConfig']]]:
"""
How this repository publishes a change in the repository through Cloud Pub/Sub.
Keyed by the topic names.
Structure is documented below.
"""
return pulumi.get(self, "pubsub_configs")
@property
@pulumi.getter
def size(self) -> pulumi.Output[int]:
"""
The disk usage of the repo, in bytes.
"""
return pulumi.get(self, "size")
@property
@pulumi.getter
def url(self) -> pulumi.Output[str]:
"""
URL to clone the repository from Google Cloud Source Repositories.
"""
return pulumi.get(self, "url")
| 40.346154
| 195
| 0.630779
| 1,918
| 16,784
| 5.355579
| 0.108446
| 0.072819
| 0.064739
| 0.047118
| 0.823403
| 0.804809
| 0.782321
| 0.77463
| 0.769665
| 0.759833
| 0
| 0.000244
| 0.266742
| 16,784
| 415
| 196
| 40.443373
| 0.834403
| 0.427073
| 0
| 0.633333
| 1
| 0
| 0.096653
| 0.036381
| 0
| 0
| 0
| 0
| 0
| 1
| 0.155556
| false
| 0.005556
| 0.038889
| 0
| 0.288889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4094a2713fcccf917d7487e398807e3f8c2da23d
| 162
|
py
|
Python
|
src/ctc/db/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 94
|
2022-02-15T19:34:49.000Z
|
2022-03-26T19:26:22.000Z
|
src/ctc/db/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-03-03T02:58:47.000Z
|
2022-03-11T18:41:05.000Z
|
src/ctc/db/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-02-15T17:53:07.000Z
|
2022-03-17T19:14:17.000Z
|
from .management import *
from .schemas import *
from .connect_utils import *
from .intake_utils import *
from .query_utils import *
from .schema_utils import *
| 20.25
| 28
| 0.771605
| 22
| 162
| 5.5
| 0.409091
| 0.413223
| 0.371901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154321
| 162
| 7
| 29
| 23.142857
| 0.883212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
40ac2bb6f3a5c73bc1a829f3d78cddcaa1a6ce3d
| 214,349
|
py
|
Python
|
multiple-languages/python/ros-cdk-polardb-1.0.4/src/ros_cdk_polardb/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
multiple-languages/python/ros-cdk-polardb-1.0.4/src/ros_cdk_polardb/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
multiple-languages/python/ros-cdk-polardb-1.0.4/src/ros_cdk_polardb/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
'''
## Aliyun ROS POLARDB Construct Library
This module is part of the AliCloud ROS Cloud Development Kit (ROS CDK) project.
```python
import * as POLARDB from '@alicloud/ros-cdk-polardb';
```
'''
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from ._jsii import *
import ros_cdk_core
class Account(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.Account",
):
'''A ROS resource type: ``ALIYUN::POLARDB::Account``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "AccountProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::POLARDB::Account``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
class AccountPrivilege(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.AccountPrivilege",
):
'''A ROS resource type: ``ALIYUN::POLARDB::AccountPrivilege``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "AccountPrivilegeProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::POLARDB::AccountPrivilege``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.AccountPrivilegeProps",
jsii_struct_bases=[],
name_mapping={
"account_name": "accountName",
"account_privilege": "accountPrivilege",
"db_cluster_id": "dbClusterId",
"db_name": "dbName",
},
)
class AccountPrivilegeProps:
def __init__(
self,
*,
account_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
account_privilege: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::AccountPrivilege``.
:param account_name: Property accountName: The name of the database account to be granted access permissions.
:param account_privilege: Property accountPrivilege: The permissions of the database account on the database. Valid values: - ReadWrite: has read and write permissions on the database. - ReadOnly: has the read-only permission on the database. - DMLOnly: runs only data manipulation language (DML) statements. - DDLOnly: runs only data definition language (DDL) statements. The number of account permissions specified by the AccountPrivilege parameter must be the same as that of database names specified by the DBName parameter. Each account permission must correspond to a database name in sequence. Separate multiple permissions with a comma (,).
:param db_cluster_id: Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster to which a database account belongs.
:param db_name: Property dbName: The name of the database whose access permissions are to be granted to the database account. You can grant access permissions on one or more databases to the database account. Separate multiple databases with a comma (,).
'''
self._values: typing.Dict[str, typing.Any] = {
"account_name": account_name,
"account_privilege": account_privilege,
"db_cluster_id": db_cluster_id,
"db_name": db_name,
}
@builtins.property
def account_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property accountName: The name of the database account to be granted access permissions.'''
result = self._values.get("account_name")
assert result is not None, "Required property 'account_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def account_privilege(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property accountPrivilege: The permissions of the database account on the database.
Valid values:
- ReadWrite: has read and write permissions on the database.
- ReadOnly: has the read-only permission on the database.
- DMLOnly: runs only data manipulation language (DML) statements.
- DDLOnly: runs only data definition language (DDL) statements.
The number of account permissions specified by the AccountPrivilege parameter must be the same as that of database names specified by the DBName parameter. Each account permission must correspond to a database name in sequence.
Separate multiple permissions with a comma (,).
'''
result = self._values.get("account_privilege")
assert result is not None, "Required property 'account_privilege' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster to which a database account belongs.'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbName: The name of the database whose access permissions are to be granted to the database account.
You can grant access permissions on one or more databases to the database account.
Separate multiple databases with a comma (,).
'''
result = self._values.get("db_name")
assert result is not None, "Required property 'db_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AccountPrivilegeProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.AccountProps",
jsii_struct_bases=[],
name_mapping={
"account_name": "accountName",
"account_password": "accountPassword",
"db_cluster_id": "dbClusterId",
"account_description": "accountDescription",
"account_privilege": "accountPrivilege",
"account_type": "accountType",
"db_name": "dbName",
},
)
class AccountProps:
def __init__(
self,
*,
account_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
account_password: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
account_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
account_privilege: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
account_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
db_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::Account``.
:param account_name: Property accountName: The name of the database account. The name must comply with the following rules: - It must start with a lowercase letter and consist of lowercase letters, digits, and underscores (_). - It can be up to 16 characters in length.
:param account_password: Property accountPassword: The password of the database account. The password must comply with the following rules: - It must consist of uppercase letters, lowercase letters, digits, and special characters. - Special characters include exclamation points (!), number signs (#), dollar signs ($), percent signs (%), carets (^), ampersands (&), asterisks (*), parentheses (()), underscores (_), plus signs (+), hyphens (-), and equal signs (=). - It must be 8 to 32 characters in length.
:param db_cluster_id: Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a database account is to be created.
:param account_description: Property accountDescription: The description of the database account. The description must comply with the following rules: - It cannot start with http:// or https://. - It must be 2 to 256 characters in length.
:param account_privilege: Property accountPrivilege: The permissions of the database account on the database. Valid values: ReadWrite: has read and write permissions on the database. ReadOnly: has the read-only permission on the database. DMLOnly: runs only data manipulation language (DML) statements. DDLOnly: runs only data definition language (DDL) statements. Default value: ReadWrite. Separate multiple permissions with a comma (,).
:param account_type: Property accountType: The type of the database account. Valid values: - Normal: standard account - Super: privileged account Default value: Super. Currently, POLARDB for PostgreSQL and POLARDB compatible with Oracle do not support standard accounts. You can create only one privileged account for an ApsaraDB for POLARDB cluster.
:param db_name: Property dbName: The name of the database whose access permissions are to be granted to the database account. Separate multiple databases with a comma (,).
'''
self._values: typing.Dict[str, typing.Any] = {
"account_name": account_name,
"account_password": account_password,
"db_cluster_id": db_cluster_id,
}
if account_description is not None:
self._values["account_description"] = account_description
if account_privilege is not None:
self._values["account_privilege"] = account_privilege
if account_type is not None:
self._values["account_type"] = account_type
if db_name is not None:
self._values["db_name"] = db_name
@builtins.property
def account_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property accountName: The name of the database account.
The name must comply with the following rules:
- It must start with a lowercase letter and consist of lowercase letters, digits, and underscores (_).
- It can be up to 16 characters in length.
'''
result = self._values.get("account_name")
assert result is not None, "Required property 'account_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def account_password(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property accountPassword: The password of the database account.
The password must comply with the following rules:
- It must consist of uppercase letters, lowercase letters, digits, and special characters.
- Special characters include exclamation points (!), number signs (#), dollar signs ($), percent signs (%), carets (^), ampersands (&), asterisks (*), parentheses (()), underscores (_), plus signs (+), hyphens (-), and equal signs (=).
- It must be 8 to 32 characters in length.
'''
result = self._values.get("account_password")
assert result is not None, "Required property 'account_password' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a database account is to be created.'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def account_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property accountDescription: The description of the database account.
The description must comply with the following rules:
- It cannot start with http:// or https://.
- It must be 2 to 256 characters in length.
'''
result = self._values.get("account_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def account_privilege(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property accountPrivilege: The permissions of the database account on the database.
Valid values:
ReadWrite: has read and write permissions on the database.
ReadOnly: has the read-only permission on the database.
DMLOnly: runs only data manipulation language (DML) statements.
DDLOnly: runs only data definition language (DDL) statements.
Default value: ReadWrite.
Separate multiple permissions with a comma (,).
'''
result = self._values.get("account_privilege")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def account_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property accountType: The type of the database account.
Valid values:
- Normal: standard account
- Super: privileged account
Default value: Super.
Currently, POLARDB for PostgreSQL and POLARDB compatible with Oracle do not support standard accounts.
You can create only one privileged account for an ApsaraDB for POLARDB cluster.
'''
result = self._values.get("account_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def db_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property dbName: The name of the database whose access permissions are to be granted to the database account.
Separate multiple databases with a comma (,).
'''
result = self._values.get("db_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AccountProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DBCluster(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.DBCluster",
):
'''A ROS resource type: ``ALIYUN::POLARDB::DBCluster``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "DBClusterProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBCluster``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrClusterConnectionString")
def attr_cluster_connection_string(self) -> ros_cdk_core.IResolvable:
'''Attribute ClusterConnectionString: The cluster connection string of the db cluster.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrClusterConnectionString"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrClusterEndpointId")
def attr_cluster_endpoint_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ClusterEndpointId: The cluster endpoint ID of the db cluster.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrClusterEndpointId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrCustomConnectionStrings")
def attr_custom_connection_strings(self) -> ros_cdk_core.IResolvable:
'''Attribute CustomConnectionStrings: The custom connection strings of the db cluster.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrCustomConnectionStrings"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrCustomEndpointIds")
def attr_custom_endpoint_ids(self) -> ros_cdk_core.IResolvable:
'''Attribute CustomEndpointIds: The custom endpoint IDs of the db cluster.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrCustomEndpointIds"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDbClusterId")
def attr_db_cluster_id(self) -> ros_cdk_core.IResolvable:
'''Attribute DBClusterId: The ID of the ApsaraDB for POLARDB cluster.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDbClusterId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDbNodeIds")
def attr_db_node_ids(self) -> ros_cdk_core.IResolvable:
'''Attribute DBNodeIds: The ID list of cluster nodes.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDbNodeIds"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrOrderId")
def attr_order_id(self) -> ros_cdk_core.IResolvable:
'''Attribute OrderId: The Order ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrOrderId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrPrimaryConnectionString")
def attr_primary_connection_string(self) -> ros_cdk_core.IResolvable:
'''Attribute PrimaryConnectionString: The primary connection string of the db cluster.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrPrimaryConnectionString"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrPrimaryEndpointId")
def attr_primary_endpoint_id(self) -> ros_cdk_core.IResolvable:
'''Attribute PrimaryEndpointId: The primary endpoint ID of the db cluster.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrPrimaryEndpointId"))
class DBClusterAccessWhiteList(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.DBClusterAccessWhiteList",
):
'''A ROS resource type: ``ALIYUN::POLARDB::DBClusterAccessWhiteList``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "DBClusterAccessWhiteListProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBClusterAccessWhiteList``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDbClusterId")
def attr_db_cluster_id(self) -> ros_cdk_core.IResolvable:
'''Attribute DBClusterId: The ID of the ApsaraDB for POLARDB cluster whose IP address whitelist is to be modified.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDbClusterId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.DBClusterAccessWhiteListProps",
jsii_struct_bases=[],
name_mapping={
"db_cluster_id": "dbClusterId",
"security_ips": "securityIps",
"db_cluster_ip_array_name": "dbClusterIpArrayName",
},
)
class DBClusterAccessWhiteListProps:
def __init__(
self,
*,
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
security_ips: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_cluster_ip_array_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBClusterAccessWhiteList``.
:param db_cluster_id: Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster whose IP address whitelist is to be modified.
:param security_ips: Property securityIps: The IP addresses to be added to the IP address whitelist group to be modified. Each whitelist group can contain a maximum of 1,000 IP addresses. Separate multiple IP addresses with a comma (,). The following two formats are supported: IP address: for example, 10.23.12.24. Classless inter-domain routing (CIDR) block: for example, 10.23.12.24/24, where the suffix /24 indicates the number of bits for the prefix of the IP address. The suffix ranges from 1 to 32.
:param db_cluster_ip_array_name: Property dbClusterIpArrayName: The name of the IP address whitelist group. If you do not specify this parameter, the Default whitelist group is modified by default. Note You can create up to 50 whitelist groups for an ApsaraDB for POLARDB cluster.
'''
self._values: typing.Dict[str, typing.Any] = {
"db_cluster_id": db_cluster_id,
"security_ips": security_ips,
}
if db_cluster_ip_array_name is not None:
self._values["db_cluster_ip_array_name"] = db_cluster_ip_array_name
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster whose IP address whitelist is to be modified.'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def security_ips(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property securityIps: The IP addresses to be added to the IP address whitelist group to be modified.
Each
whitelist group can contain a maximum of 1,000 IP addresses. Separate multiple IP
addresses with a comma (,). The following two formats are supported:
IP address: for example, 10.23.12.24.
Classless inter-domain routing (CIDR) block: for example, 10.23.12.24/24, where the
suffix /24 indicates the number of bits for the prefix of the IP address. The suffix
ranges from 1 to 32.
'''
result = self._values.get("security_ips")
assert result is not None, "Required property 'security_ips' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_cluster_ip_array_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property dbClusterIpArrayName: The name of the IP address whitelist group.
If you do not specify this parameter,
the Default whitelist group is modified by default.
Note You can create up to 50 whitelist groups for an ApsaraDB for POLARDB cluster.
'''
result = self._values.get("db_cluster_ip_array_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DBClusterAccessWhiteListProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DBClusterEndpoint(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.DBClusterEndpoint",
):
'''A ROS resource type: ``ALIYUN::POLARDB::DBClusterEndpoint``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "DBClusterEndpointProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBClusterEndpoint``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAddresses")
def attr_addresses(self) -> ros_cdk_core.IResolvable:
'''Attribute Addresses: The address items of the db cluster endpoint.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAddresses"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrConnectionString")
def attr_connection_string(self) -> ros_cdk_core.IResolvable:
'''Attribute ConnectionString: The first connection string of the db cluster endpoint.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrConnectionString"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDbEndpointId")
def attr_db_endpoint_id(self) -> ros_cdk_core.IResolvable:
'''Attribute DBEndpointId: DB cluster endpoint ID.
E.g. pe-xxxxxxxx.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDbEndpointId"))
class DBClusterEndpointAddress(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.DBClusterEndpointAddress",
):
'''A ROS resource type: ``ALIYUN::POLARDB::DBClusterEndpointAddress``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "DBClusterEndpointAddressProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBClusterEndpointAddress``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAddress")
def attr_address(self) -> ros_cdk_core.IResolvable:
'''Attribute Address: The details of the endpoint address.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAddress"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrConnectionString")
def attr_connection_string(self) -> ros_cdk_core.IResolvable:
'''Attribute ConnectionString: The connection string of the endpoint address.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrConnectionString"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.DBClusterEndpointAddressProps",
jsii_struct_bases=[],
name_mapping={
"db_cluster_id": "dbClusterId",
"db_endpoint_id": "dbEndpointId",
"connection_string_prefix": "connectionStringPrefix",
"net_type": "netType",
},
)
class DBClusterEndpointAddressProps:
def __init__(
self,
*,
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_endpoint_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
connection_string_prefix: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
net_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBClusterEndpointAddress``.
:param db_cluster_id: Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a public connection point is to be created.
:param db_endpoint_id: Property dbEndpointId: The ID of the cluster connection point.
:param connection_string_prefix: Property connectionStringPrefix: The prefix of the connection string. The prefix must comply with the following rules: It must start with a letter and consist of lowercase letters, digits, and hyphens(-), cannot end with a dash. The length is 6~30 characters.
:param net_type: Property netType: The network type of the connection string. If set to Public, ROS will create, modify and delete Public address for you. If set to Private, ROS will only modify Private address for you. Default to Public.
'''
self._values: typing.Dict[str, typing.Any] = {
"db_cluster_id": db_cluster_id,
"db_endpoint_id": db_endpoint_id,
}
if connection_string_prefix is not None:
self._values["connection_string_prefix"] = connection_string_prefix
if net_type is not None:
self._values["net_type"] = net_type
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a public connection point is to be created.'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_endpoint_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbEndpointId: The ID of the cluster connection point.'''
result = self._values.get("db_endpoint_id")
assert result is not None, "Required property 'db_endpoint_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def connection_string_prefix(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property connectionStringPrefix: The prefix of the connection string.
The prefix must comply with the following rules:
It must start with a letter and consist of lowercase letters, digits, and hyphens(-), cannot end with a dash.
The length is 6~30 characters.
'''
result = self._values.get("connection_string_prefix")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def net_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property netType: The network type of the connection string.
If set to Public, ROS will create, modify and delete Public address for you.
If set to Private, ROS will only modify Private address for you.
Default to Public.
'''
result = self._values.get("net_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DBClusterEndpointAddressProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.DBClusterEndpointProps",
jsii_struct_bases=[],
name_mapping={
"db_cluster_id": "dbClusterId",
"auto_add_new_nodes": "autoAddNewNodes",
"endpoint_config": "endpointConfig",
"endpoint_type": "endpointType",
"nodes": "nodes",
"read_write_mode": "readWriteMode",
},
)
class DBClusterEndpointProps:
def __init__(
self,
*,
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
auto_add_new_nodes: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
endpoint_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBClusterEndpoint.EndpointConfigProperty"]] = None,
endpoint_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
nodes: typing.Optional[typing.Union[typing.Sequence[typing.Any], ros_cdk_core.IResolvable]] = None,
read_write_mode: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBClusterEndpoint``.
:param db_cluster_id: Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a custom connection point is to be created.
:param auto_add_new_nodes: Property autoAddNewNodes: Specifies whether a newly added node is automatically added to this connection point. Valid values: Enable, Disable. Default value: Disable.
:param endpoint_config: Property endpointConfig:.
:param endpoint_type: Property endpointType: The type of the cluster connection point. Set this parameter to Custom.
:param nodes: Property nodes: The nodes to be added to this connection point to process read requests from this connection point. Add at least two nodes. If you do not specify this parameter, all nodes of the cluster are added to this connection point by default.
:param read_write_mode: Property readWriteMode: The read/write mode of the cluster connection point. Valid values: ReadWrite: receives and forwards read and write requests (automatic read-write splitting). ReadOnly: receives and forwards only read requests. Default value: ReadOnly.
'''
self._values: typing.Dict[str, typing.Any] = {
"db_cluster_id": db_cluster_id,
}
if auto_add_new_nodes is not None:
self._values["auto_add_new_nodes"] = auto_add_new_nodes
if endpoint_config is not None:
self._values["endpoint_config"] = endpoint_config
if endpoint_type is not None:
self._values["endpoint_type"] = endpoint_type
if nodes is not None:
self._values["nodes"] = nodes
if read_write_mode is not None:
self._values["read_write_mode"] = read_write_mode
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a custom connection point is to be created.'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def auto_add_new_nodes(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property autoAddNewNodes: Specifies whether a newly added node is automatically added to this connection point.
Valid values: Enable, Disable.
Default value: Disable.
'''
result = self._values.get("auto_add_new_nodes")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def endpoint_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBClusterEndpoint.EndpointConfigProperty"]]:
'''Property endpointConfig:.'''
result = self._values.get("endpoint_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBClusterEndpoint.EndpointConfigProperty"]], result)
@builtins.property
def endpoint_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property endpointType: The type of the cluster connection point.
Set this parameter to Custom.
'''
result = self._values.get("endpoint_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def nodes(
self,
) -> typing.Optional[typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable]]:
'''Property nodes: The nodes to be added to this connection point to process read requests from this connection point.
Add at least two nodes.
If you do not specify this parameter, all nodes of the cluster are added to this connection point by default.
'''
result = self._values.get("nodes")
return typing.cast(typing.Optional[typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable]], result)
@builtins.property
def read_write_mode(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property readWriteMode: The read/write mode of the cluster connection point.
Valid values:
ReadWrite: receives and forwards read and write requests (automatic read-write splitting).
ReadOnly: receives and forwards only read requests.
Default value: ReadOnly.
'''
result = self._values.get("read_write_mode")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DBClusterEndpointProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.DBClusterProps",
jsii_struct_bases=[],
name_mapping={
"db_node_class": "dbNodeClass",
"db_type": "dbType",
"db_version": "dbVersion",
"pay_type": "payType",
"auto_renew_period": "autoRenewPeriod",
"backup_retention_policy_on_cluster_deletion": "backupRetentionPolicyOnClusterDeletion",
"clone_data_point": "cloneDataPoint",
"cluster_network_type": "clusterNetworkType",
"creation_category": "creationCategory",
"creation_option": "creationOption",
"db_cluster_description": "dbClusterDescription",
"db_cluster_parameters": "dbClusterParameters",
"default_time_zone": "defaultTimeZone",
"gdn_id": "gdnId",
"lower_case_table_names": "lowerCaseTableNames",
"maintain_time": "maintainTime",
"period": "period",
"renewal_status": "renewalStatus",
"resource_group_id": "resourceGroupId",
"security_group_ids": "securityGroupIds",
"security_ip_list": "securityIpList",
"source_resource_id": "sourceResourceId",
"tags": "tags",
"tde_status": "tdeStatus",
"vpc_id": "vpcId",
"v_switch_id": "vSwitchId",
"zone_id": "zoneId",
},
)
class DBClusterProps:
def __init__(
self,
*,
db_node_class: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_version: typing.Union[builtins.str, ros_cdk_core.IResolvable],
pay_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
auto_renew_period: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
backup_retention_policy_on_cluster_deletion: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
clone_data_point: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
cluster_network_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
creation_category: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
creation_option: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
db_cluster_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
db_cluster_parameters: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBCluster.DBClusterParametersProperty"]] = None,
default_time_zone: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
gdn_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
lower_case_table_names: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
maintain_time: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
period: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
renewal_status: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
resource_group_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
security_group_ids: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]] = None,
security_ip_list: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
source_resource_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
tags: typing.Optional[typing.Sequence["RosDBCluster.TagsProperty"]] = None,
tde_status: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
vpc_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
v_switch_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
zone_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBCluster``.
:param db_node_class: Property dbNodeClass: The node specifications of the cluster. For more information, see Specifications and pricing.
:param db_type: Property dbType: Database type, value: MySQL PostgreSQL Oracle.
:param db_version: Property dbVersion: The version of the database. Valid values: MySQL: 5.6, 5.7 or 8.0 PostgreSQL: 11 Oracle: 11
:param pay_type: Property payType: The billing method of the cluster. Valid values: Postpaid: pay-as-you-go Prepaid: subscription
:param auto_renew_period: Property autoRenewPeriod: Set the cluster auto renewal time. Valid values: 1, 2, 3, 6, 12, 24, 36. Default to 1.
:param backup_retention_policy_on_cluster_deletion: Property backupRetentionPolicyOnClusterDeletion: The backup set retention policy when deleting a cluster, the value range is as follows: ALL: Keep all backups permanently. LATEST: Permanently keep the last backup (automatic backup before deletion). NONE: The backup set is not retained when the cluster is deleted. When creating a cluster, the default value is NONE, that is, the backup set is not retained when the cluster is deleted. Note: This parameter takes effect only when the value of DBType is MySQL.
:param clone_data_point: Property cloneDataPoint: The time point of data to be cloned. Valid values: LATEST: clones data of the latest time point. : clones historical backup data. Specify the ID of the specific backup set. : clones data of a historical time point. Specify the specific time in the yyyy-MM-ddTHH:mm:ssZ format. The time must be in UTC. Default value: LATEST. Note This parameter takes effect only when the DBType parameter is set to MySQL, the DBVersion parameter is set to 5.6, and the CreationOption parameter is set to CloneFromRDS or CloneFromPolarDB. If the CreationOption parameter is set to CloneFromRDS, the value of this parameter must be LATEST.
:param cluster_network_type: Property clusterNetworkType: The network type of the cluster. Currently, only VPC is supported. Default value: VPC.
:param creation_category: Property creationCategory: Cluster series. The value could be Normal (standard version).
:param creation_option: Property creationOption: The method for creating an ApsaraDB for POLARDB cluster. Valid values: Normal: creates an ApsaraDB for POLARDB cluster. CloneFromPolarDB: clones data from an existing ApsaraDB for POLARDB cluster to a new ApsaraDB for POLARDB cluster. CloneFromRDS: clones data from an existing ApsaraDB for RDS instance to a new ApsaraDB for POLARDB cluster. MigrationFromRDS: migrates data from an existing ApsaraDB for RDS instance to a new ApsaraDB for POLARDB cluster. The created ApsaraDB for POLARDB cluster is in read-only mode and has binary logs enabled by default. CreateGdnStandby: Create a secondary cluster. Default value: Normal. Note: When DBType is MySQL and DBVersion is 5.6, this parameter can be specified as CloneFromRDS or MigrationFromRDS. When DBType is MySQL and DBVersion is 8.0, this parameter can be specified as CreateGdnStandby.
:param db_cluster_description: Property dbClusterDescription: The description of the cluster. The description must comply with the following rules: It must start with a Chinese character or an English letter. It can contain Chinese and English characters, digits, underscores (_), and hyphens (-). It cannot start with http:// or https://. It must be 2 to 256 characters in length.
:param db_cluster_parameters: Property dbClusterParameters: Modifies the parameters of a the PolarDB cluster.
:param default_time_zone: Property defaultTimeZone: Set up a time zone (UTC), the value range is as follows: System: The default time zone is the same as the time zone where the region is located. This is default value. Other pickable value range is from -12:00 to +13:00, for example, 00:00. Note: This parameter takes effect only when DBType is MySQL.
:param gdn_id: Property gdnId: The ID of the Global Database Network (GDN). Note: This parameter is required when the CreationOption is CreateGdnStandby.
:param lower_case_table_names: Property lowerCaseTableNames: Whether the table name is case sensitive, the value range is as follows: 1: Not case sensitive0: case sensitive The default value is 1. Note: This parameter takes effect only when the value of DBType is MySQL.
:param maintain_time: Property maintainTime: The maintainable time of the cluster: Format: HH: mmZ- HH: mmZ. Example: 16:00Z-17:00Z, which means 0 to 1 (UTC+08:00) for routine maintenance.
:param period: Property period: The subscription period of the cluster in month. Valid values: 1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36.
:param renewal_status: Property renewalStatus: The auto renewal status of the cluster Valid values: AutoRenewal: automatically renews the cluster. Normal: manually renews the cluster. NotRenewal: does not renew the cluster. Default value: Normal. Note If this parameter is set to NotRenewal, the system does not send a reminder for expiration, but only sends an SMS message three days before the cluster expires to remind you that the cluster is not renewed.
:param resource_group_id: Property resourceGroupId: The ID of the resource group.
:param security_group_ids: Property securityGroupIds: The ID of the security group. You can add up to three security groups to a cluster.
:param security_ip_list: Property securityIpList: The whitelist of the Apsara PolarDB cluster.
:param source_resource_id: Property sourceResourceId: The ID of the source RDS instance or source POLARDB cluster. Note This parameter takes effect only when the DBType parameter is set to MySQL and the DBVersion parameter is set to 5.6. This parameter is required if the CreationOption parameter is not set to Normal.
:param tags: Property tags: Tags to attach to instance. Max support 20 tags to add during create instance. Each tag with two properties Key and Value, and Key is required.
:param tde_status: Property tdeStatus: Specifies whether to enable Transparent Data Encryption (TDE). Valid values: true: enable TDE false: disable TDE (default) Note: The parameter takes effect only when DBType is PostgreSQL or Oracle. You cannot disable TDE after it is enabled.
:param vpc_id: Property vpcId: The ID of the VPC to connect to.
:param v_switch_id: Property vSwitchId: The ID of the VSwitch to connect to.
:param zone_id: Property zoneId: The zone ID of the cluster. You can call the DescribeRegions operation to query available zones.
'''
self._values: typing.Dict[str, typing.Any] = {
"db_node_class": db_node_class,
"db_type": db_type,
"db_version": db_version,
"pay_type": pay_type,
}
if auto_renew_period is not None:
self._values["auto_renew_period"] = auto_renew_period
if backup_retention_policy_on_cluster_deletion is not None:
self._values["backup_retention_policy_on_cluster_deletion"] = backup_retention_policy_on_cluster_deletion
if clone_data_point is not None:
self._values["clone_data_point"] = clone_data_point
if cluster_network_type is not None:
self._values["cluster_network_type"] = cluster_network_type
if creation_category is not None:
self._values["creation_category"] = creation_category
if creation_option is not None:
self._values["creation_option"] = creation_option
if db_cluster_description is not None:
self._values["db_cluster_description"] = db_cluster_description
if db_cluster_parameters is not None:
self._values["db_cluster_parameters"] = db_cluster_parameters
if default_time_zone is not None:
self._values["default_time_zone"] = default_time_zone
if gdn_id is not None:
self._values["gdn_id"] = gdn_id
if lower_case_table_names is not None:
self._values["lower_case_table_names"] = lower_case_table_names
if maintain_time is not None:
self._values["maintain_time"] = maintain_time
if period is not None:
self._values["period"] = period
if renewal_status is not None:
self._values["renewal_status"] = renewal_status
if resource_group_id is not None:
self._values["resource_group_id"] = resource_group_id
if security_group_ids is not None:
self._values["security_group_ids"] = security_group_ids
if security_ip_list is not None:
self._values["security_ip_list"] = security_ip_list
if source_resource_id is not None:
self._values["source_resource_id"] = source_resource_id
if tags is not None:
self._values["tags"] = tags
if tde_status is not None:
self._values["tde_status"] = tde_status
if vpc_id is not None:
self._values["vpc_id"] = vpc_id
if v_switch_id is not None:
self._values["v_switch_id"] = v_switch_id
if zone_id is not None:
self._values["zone_id"] = zone_id
@builtins.property
def db_node_class(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbNodeClass: The node specifications of the cluster.
For more information, see Specifications and pricing.
'''
result = self._values.get("db_node_class")
assert result is not None, "Required property 'db_node_class' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbType: Database type, value: MySQL PostgreSQL Oracle.'''
result = self._values.get("db_type")
assert result is not None, "Required property 'db_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_version(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbVersion: The version of the database.
Valid values:
MySQL: 5.6, 5.7 or 8.0
PostgreSQL: 11
Oracle: 11
'''
result = self._values.get("db_version")
assert result is not None, "Required property 'db_version' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def pay_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property payType: The billing method of the cluster.
Valid values:
Postpaid: pay-as-you-go
Prepaid: subscription
'''
result = self._values.get("pay_type")
assert result is not None, "Required property 'pay_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def auto_renew_period(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property autoRenewPeriod: Set the cluster auto renewal time.
Valid values: 1, 2, 3, 6, 12, 24, 36. Default to 1.
'''
result = self._values.get("auto_renew_period")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def backup_retention_policy_on_cluster_deletion(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property backupRetentionPolicyOnClusterDeletion: The backup set retention policy when deleting a cluster, the value range is as follows: ALL: Keep all backups permanently.
LATEST: Permanently keep the last backup (automatic backup before deletion).
NONE: The backup set is not retained when the cluster is deleted.
When creating a cluster, the default value is NONE, that is, the backup set is not retained when the cluster is deleted.
Note: This parameter takes effect only when the value of DBType is MySQL.
'''
result = self._values.get("backup_retention_policy_on_cluster_deletion")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def clone_data_point(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property cloneDataPoint: The time point of data to be cloned.
Valid values:
LATEST: clones data of the latest time point.
: clones historical backup data. Specify the ID of the specific backup set.
: clones data of a historical time point. Specify the specific time in
the yyyy-MM-ddTHH:mm:ssZ format. The time must be in UTC.
Default value: LATEST.
Note
This parameter takes effect only when the DBType parameter is set to MySQL, the DBVersion parameter is set to 5.6, and the CreationOption parameter is set to CloneFromRDS or CloneFromPolarDB.
If the CreationOption parameter is set to CloneFromRDS, the value of this parameter must be LATEST.
'''
result = self._values.get("clone_data_point")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def cluster_network_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property clusterNetworkType: The network type of the cluster.
Currently, only VPC is supported. Default value: VPC.
'''
result = self._values.get("cluster_network_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def creation_category(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property creationCategory: Cluster series.
The value could be Normal (standard version).
'''
result = self._values.get("creation_category")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def creation_option(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property creationOption: The method for creating an ApsaraDB for POLARDB cluster.
Valid values:
Normal: creates an ApsaraDB for POLARDB cluster.
CloneFromPolarDB: clones data from an existing ApsaraDB for POLARDB cluster to a new ApsaraDB for POLARDB cluster.
CloneFromRDS: clones data from an existing ApsaraDB for RDS instance to a new ApsaraDB
for POLARDB cluster.
MigrationFromRDS: migrates data from an existing ApsaraDB for RDS instance to a new ApsaraDB for POLARDB cluster. The created ApsaraDB for POLARDB cluster is in read-only mode and has binary logs enabled by default.
CreateGdnStandby: Create a secondary cluster.
Default value: Normal.
Note:
When DBType is MySQL and DBVersion is 5.6, this parameter can be specified as CloneFromRDS or MigrationFromRDS.
When DBType is MySQL and DBVersion is 8.0, this parameter can be specified as CreateGdnStandby.
'''
result = self._values.get("creation_option")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def db_cluster_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property dbClusterDescription: The description of the cluster.
The description must comply with the following rules:
It must start with a Chinese character or an English letter.
It can contain Chinese and English characters, digits, underscores (_), and hyphens (-).
It cannot start with http:// or https://.
It must be 2 to 256 characters in length.
'''
result = self._values.get("db_cluster_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def db_cluster_parameters(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBCluster.DBClusterParametersProperty"]]:
'''Property dbClusterParameters: Modifies the parameters of a the PolarDB cluster.'''
result = self._values.get("db_cluster_parameters")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBCluster.DBClusterParametersProperty"]], result)
@builtins.property
def default_time_zone(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property defaultTimeZone: Set up a time zone (UTC), the value range is as follows: System: The default time zone is the same as the time zone where the region is located.
This is default value.
Other pickable value range is from -12:00 to +13:00, for example, 00:00.
Note: This parameter takes effect only when DBType is MySQL.
'''
result = self._values.get("default_time_zone")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def gdn_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property gdnId: The ID of the Global Database Network (GDN).
Note: This parameter is required when the CreationOption is CreateGdnStandby.
'''
result = self._values.get("gdn_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def lower_case_table_names(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property lowerCaseTableNames: Whether the table name is case sensitive, the value range is as follows: 1: Not case sensitive0: case sensitive The default value is 1.
Note: This parameter takes effect only when the value of DBType is MySQL.
'''
result = self._values.get("lower_case_table_names")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def maintain_time(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property maintainTime: The maintainable time of the cluster: Format: HH: mmZ- HH: mmZ.
Example: 16:00Z-17:00Z, which means 0 to 1 (UTC+08:00) for routine maintenance.
'''
result = self._values.get("maintain_time")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def period(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property period: The subscription period of the cluster in month.
Valid values: 1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36.
'''
result = self._values.get("period")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def renewal_status(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property renewalStatus: The auto renewal status of the cluster Valid values: AutoRenewal: automatically renews the cluster.
Normal: manually renews the cluster.
NotRenewal: does not renew the cluster.
Default value: Normal.
Note If this parameter is set to NotRenewal, the system does not send a reminder for expiration,
but only sends an SMS message three days before the cluster expires to remind you
that the cluster is not renewed.
'''
result = self._values.get("renewal_status")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def resource_group_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property resourceGroupId: The ID of the resource group.'''
result = self._values.get("resource_group_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def security_group_ids(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]]:
'''Property securityGroupIds: The ID of the security group.
You can add up to three security groups to a cluster.
'''
result = self._values.get("security_group_ids")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]], result)
@builtins.property
def security_ip_list(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property securityIpList: The whitelist of the Apsara PolarDB cluster.'''
result = self._values.get("security_ip_list")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def source_resource_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property sourceResourceId: The ID of the source RDS instance or source POLARDB cluster.
Note
This parameter takes effect only when the DBType parameter is set to MySQL and the DBVersion parameter is set to 5.6.
This parameter is required if the CreationOption parameter is not set to Normal.
'''
result = self._values.get("source_resource_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def tags(self) -> typing.Optional[typing.List["RosDBCluster.TagsProperty"]]:
'''Property tags: Tags to attach to instance.
Max support 20 tags to add during create instance. Each tag with two properties Key and Value, and Key is required.
'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.List["RosDBCluster.TagsProperty"]], result)
@builtins.property
def tde_status(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property tdeStatus: Specifies whether to enable Transparent Data Encryption (TDE).
Valid values:
true: enable TDE
false: disable TDE (default)
Note: The parameter takes effect only when DBType is PostgreSQL or Oracle. You cannot disable TDE after it is enabled.
'''
result = self._values.get("tde_status")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def vpc_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property vpcId: The ID of the VPC to connect to.'''
result = self._values.get("vpc_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def v_switch_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property vSwitchId: The ID of the VSwitch to connect to.'''
result = self._values.get("v_switch_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def zone_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property zoneId: The zone ID of the cluster.
You can call the DescribeRegions operation to query available zones.
'''
result = self._values.get("zone_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DBClusterProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DBInstance(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.DBInstance",
):
'''A ROS resource type: ``ALIYUN::POLARDB::DBInstance``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "DBInstanceProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBInstance``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.DBInstanceProps",
jsii_struct_bases=[],
name_mapping={
"character_set_name": "characterSetName",
"db_cluster_id": "dbClusterId",
"db_name": "dbName",
"account_name": "accountName",
"account_privilege": "accountPrivilege",
"db_description": "dbDescription",
},
)
class DBInstanceProps:
def __init__(
self,
*,
character_set_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
account_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
account_privilege: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
db_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBInstance``.
:param character_set_name: Property characterSetName: The character set of the database. For more information, see Character sets.
:param db_cluster_id: Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a database is to be created.
:param db_name: Property dbName: The name of the database to be created. The name must comply with the following rules: It must start with a lowercase letter and consist of lowercase letters, digits, hyphens (-), and underscores (_). It must end with a letter or a digit. It can be up to 64 characters in length.
:param account_name: Property accountName: The name of the database account to be used.
:param account_privilege: Property accountPrivilege: The permissions of the database account on the database. Valid values: ReadWrite: has read and write permissions on the database. ReadOnly: has the read-only permission on the database. DMLOnly: runs only data manipulation language (DML) statements. DDLOnly: runs only data definition language (DDL) statements. Default value: ReadWrite.
:param db_description: Property dbDescription: The description of the database. Valid values: It cannot start with http:// or https://. It must be 2 to 256 characters in length.
'''
self._values: typing.Dict[str, typing.Any] = {
"character_set_name": character_set_name,
"db_cluster_id": db_cluster_id,
"db_name": db_name,
}
if account_name is not None:
self._values["account_name"] = account_name
if account_privilege is not None:
self._values["account_privilege"] = account_privilege
if db_description is not None:
self._values["db_description"] = db_description
@builtins.property
def character_set_name(
self,
) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property characterSetName: The character set of the database.
For more information, see Character sets.
'''
result = self._values.get("character_set_name")
assert result is not None, "Required property 'character_set_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a database is to be created.'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbName: The name of the database to be created.
The name must comply with the following rules:
It must start with a lowercase letter and consist of lowercase letters, digits, hyphens
(-), and underscores (_).
It must end with a letter or a digit. It can be up to 64 characters in length.
'''
result = self._values.get("db_name")
assert result is not None, "Required property 'db_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def account_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property accountName: The name of the database account to be used.'''
result = self._values.get("account_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def account_privilege(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property accountPrivilege: The permissions of the database account on the database.
Valid values:
ReadWrite: has read and write permissions on the database.
ReadOnly: has the read-only permission on the database.
DMLOnly: runs only data manipulation language (DML) statements.
DDLOnly: runs only data definition language (DDL) statements.
Default value: ReadWrite.
'''
result = self._values.get("account_privilege")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def db_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property dbDescription: The description of the database.
Valid values:
It cannot start with http:// or https://.
It must be 2 to 256 characters in length.
'''
result = self._values.get("db_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DBInstanceProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DBNodes(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.DBNodes",
):
'''A ROS resource type: ``ALIYUN::POLARDB::DBNodes``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "DBNodesProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBNodes``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDbNodeIds")
def attr_db_node_ids(self) -> ros_cdk_core.IResolvable:
'''Attribute DBNodeIds: The ID list of added cluster nodes.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDbNodeIds"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrOrderIds")
def attr_order_ids(self) -> ros_cdk_core.IResolvable:
'''Attribute OrderIds: The order ID list of added cluster nodes.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrOrderIds"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.DBNodesProps",
jsii_struct_bases=[],
name_mapping={"amount": "amount", "db_cluster_id": "dbClusterId"},
)
class DBNodesProps:
def __init__(
self,
*,
amount: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBNodes``.
:param amount: Property amount: Number of nodes to be added to cluster.
:param db_cluster_id: Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster to be added nodes to.
'''
self._values: typing.Dict[str, typing.Any] = {
"amount": amount,
"db_cluster_id": db_cluster_id,
}
@builtins.property
def amount(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''Property amount: Number of nodes to be added to cluster.'''
result = self._values.get("amount")
assert result is not None, "Required property 'amount' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property dbClusterId: The ID of the ApsaraDB for POLARDB cluster to be added nodes to.'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DBNodesProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosAccount(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.RosAccount",
):
'''A ROS template type: ``ALIYUN::POLARDB::Account``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosAccountProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::POLARDB::Account``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="accountName")
def account_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
accountName: The name of the database account. The name must comply with the following rules:
- It must start with a lowercase letter and consist of lowercase letters, digits, and underscores (_).
- It can be up to 16 characters in length.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "accountName"))
@account_name.setter
def account_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "accountName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="accountPassword")
def account_password(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
accountPassword: The password of the database account. The password must comply with the following rules:
- It must consist of uppercase letters, lowercase letters, digits, and special characters.
- Special characters include exclamation points (!), number signs (#), dollar signs ($), percent signs (%), carets (^), ampersands (&), asterisks (*), parentheses (()), underscores (_), plus signs (+), hyphens (-), and equal signs (=).
- It must be 8 to 32 characters in length.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "accountPassword"))
@account_password.setter
def account_password(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "accountPassword", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbClusterId")
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a database account is to be created.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbClusterId"))
@db_cluster_id.setter
def db_cluster_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbClusterId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
'''A factory method that creates a new instance of this class from an object containing the properties of this ROS resource.'''
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="accountDescription")
def account_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
accountDescription: The description of the database account. The description must comply with the following rules:
- It cannot start with http:// or https://.
- It must be 2 to 256 characters in length.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "accountDescription"))
@account_description.setter
def account_description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "accountDescription", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="accountPrivilege")
def account_privilege(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
accountPrivilege: The permissions of the database account on the database. Valid values:
ReadWrite: has read and write permissions on the database.
ReadOnly: has the read-only permission on the database.
DMLOnly: runs only data manipulation language (DML) statements.
DDLOnly: runs only data definition language (DDL) statements.
Default value: ReadWrite.
Separate multiple permissions with a comma (,).
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "accountPrivilege"))
@account_privilege.setter
def account_privilege(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "accountPrivilege", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="accountType")
def account_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
accountType: The type of the database account. Valid values:
- Normal: standard account
- Super: privileged account
Default value: Super.
Currently, POLARDB for PostgreSQL and POLARDB compatible with Oracle do not support standard accounts.
You can create only one privileged account for an ApsaraDB for POLARDB cluster.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "accountType"))
@account_type.setter
def account_type(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "accountType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbName")
def db_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: dbName: The name of the database whose access permissions are to be granted to the database account. Separate multiple databases with a comma (,).
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "dbName"))
@db_name.setter
def db_name(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "dbName", value)
class RosAccountPrivilege(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.RosAccountPrivilege",
):
'''A ROS template type: ``ALIYUN::POLARDB::AccountPrivilege``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosAccountPrivilegeProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::POLARDB::AccountPrivilege``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="accountName")
def account_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: accountName: The name of the database account to be granted access permissions.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "accountName"))
@account_name.setter
def account_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "accountName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="accountPrivilege")
def account_privilege(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
accountPrivilege: The permissions of the database account on the database. Valid values:
- ReadWrite: has read and write permissions on the database.
- ReadOnly: has the read-only permission on the database.
- DMLOnly: runs only data manipulation language (DML) statements.
- DDLOnly: runs only data definition language (DDL) statements.
The number of account permissions specified by the AccountPrivilege parameter must be the same as that of database names specified by the DBName parameter. Each account permission must correspond to a database name in sequence.
Separate multiple permissions with a comma (,).
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "accountPrivilege"))
@account_privilege.setter
def account_privilege(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "accountPrivilege", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbClusterId")
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster to which a database account belongs.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbClusterId"))
@db_cluster_id.setter
def db_cluster_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbClusterId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbName")
def db_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
dbName: The name of the database whose access permissions are to be granted to the database account.
You can grant access permissions on one or more databases to the database account.
Separate multiple databases with a comma (,).
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbName"))
@db_name.setter
def db_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
'''A factory method that creates a new instance of this class from an object containing the properties of this ROS resource.'''
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosAccountPrivilegeProps",
jsii_struct_bases=[],
name_mapping={
"account_name": "accountName",
"account_privilege": "accountPrivilege",
"db_cluster_id": "dbClusterId",
"db_name": "dbName",
},
)
class RosAccountPrivilegeProps:
def __init__(
self,
*,
account_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
account_privilege: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::AccountPrivilege``.
:param account_name:
:param account_privilege:
:param db_cluster_id:
:param db_name:
'''
self._values: typing.Dict[str, typing.Any] = {
"account_name": account_name,
"account_privilege": account_privilege,
"db_cluster_id": db_cluster_id,
"db_name": db_name,
}
@builtins.property
def account_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: accountName: The name of the database account to be granted access permissions.
'''
result = self._values.get("account_name")
assert result is not None, "Required property 'account_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def account_privilege(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
accountPrivilege: The permissions of the database account on the database. Valid values:
- ReadWrite: has read and write permissions on the database.
- ReadOnly: has the read-only permission on the database.
- DMLOnly: runs only data manipulation language (DML) statements.
- DDLOnly: runs only data definition language (DDL) statements.
The number of account permissions specified by the AccountPrivilege parameter must be the same as that of database names specified by the DBName parameter. Each account permission must correspond to a database name in sequence.
Separate multiple permissions with a comma (,).
'''
result = self._values.get("account_privilege")
assert result is not None, "Required property 'account_privilege' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster to which a database account belongs.
'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
dbName: The name of the database whose access permissions are to be granted to the database account.
You can grant access permissions on one or more databases to the database account.
Separate multiple databases with a comma (,).
'''
result = self._values.get("db_name")
assert result is not None, "Required property 'db_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosAccountPrivilegeProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosAccountProps",
jsii_struct_bases=[],
name_mapping={
"account_name": "accountName",
"account_password": "accountPassword",
"db_cluster_id": "dbClusterId",
"account_description": "accountDescription",
"account_privilege": "accountPrivilege",
"account_type": "accountType",
"db_name": "dbName",
},
)
class RosAccountProps:
def __init__(
self,
*,
account_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
account_password: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
account_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
account_privilege: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
account_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
db_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::Account``.
:param account_name:
:param account_password:
:param db_cluster_id:
:param account_description:
:param account_privilege:
:param account_type:
:param db_name:
'''
self._values: typing.Dict[str, typing.Any] = {
"account_name": account_name,
"account_password": account_password,
"db_cluster_id": db_cluster_id,
}
if account_description is not None:
self._values["account_description"] = account_description
if account_privilege is not None:
self._values["account_privilege"] = account_privilege
if account_type is not None:
self._values["account_type"] = account_type
if db_name is not None:
self._values["db_name"] = db_name
@builtins.property
def account_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
accountName: The name of the database account. The name must comply with the following rules:
- It must start with a lowercase letter and consist of lowercase letters, digits, and underscores (_).
- It can be up to 16 characters in length.
'''
result = self._values.get("account_name")
assert result is not None, "Required property 'account_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def account_password(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
accountPassword: The password of the database account. The password must comply with the following rules:
- It must consist of uppercase letters, lowercase letters, digits, and special characters.
- Special characters include exclamation points (!), number signs (#), dollar signs ($), percent signs (%), carets (^), ampersands (&), asterisks (*), parentheses (()), underscores (_), plus signs (+), hyphens (-), and equal signs (=).
- It must be 8 to 32 characters in length.
'''
result = self._values.get("account_password")
assert result is not None, "Required property 'account_password' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a database account is to be created.
'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def account_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
accountDescription: The description of the database account. The description must comply with the following rules:
- It cannot start with http:// or https://.
- It must be 2 to 256 characters in length.
'''
result = self._values.get("account_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def account_privilege(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
accountPrivilege: The permissions of the database account on the database. Valid values:
ReadWrite: has read and write permissions on the database.
ReadOnly: has the read-only permission on the database.
DMLOnly: runs only data manipulation language (DML) statements.
DDLOnly: runs only data definition language (DDL) statements.
Default value: ReadWrite.
Separate multiple permissions with a comma (,).
'''
result = self._values.get("account_privilege")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def account_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
accountType: The type of the database account. Valid values:
- Normal: standard account
- Super: privileged account
Default value: Super.
Currently, POLARDB for PostgreSQL and POLARDB compatible with Oracle do not support standard accounts.
You can create only one privileged account for an ApsaraDB for POLARDB cluster.
'''
result = self._values.get("account_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def db_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: dbName: The name of the database whose access permissions are to be granted to the database account. Separate multiple databases with a comma (,).
'''
result = self._values.get("db_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosAccountProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosDBCluster(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.RosDBCluster",
):
'''A ROS template type: ``ALIYUN::POLARDB::DBCluster``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosDBClusterProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBCluster``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrClusterConnectionString")
def attr_cluster_connection_string(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ClusterConnectionString: The cluster connection string of the db cluster.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrClusterConnectionString"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrClusterEndpointId")
def attr_cluster_endpoint_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ClusterEndpointId: The cluster endpoint ID of the db cluster.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrClusterEndpointId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrCustomConnectionStrings")
def attr_custom_connection_strings(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: CustomConnectionStrings: The custom connection strings of the db cluster.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrCustomConnectionStrings"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrCustomEndpointIds")
def attr_custom_endpoint_ids(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: CustomEndpointIds: The custom endpoint IDs of the db cluster.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrCustomEndpointIds"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDbClusterId")
def attr_db_cluster_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: DBClusterId: The ID of the ApsaraDB for POLARDB cluster.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDbClusterId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDbNodeIds")
def attr_db_node_ids(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: DBNodeIds: The ID list of cluster nodes.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDbNodeIds"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrOrderId")
def attr_order_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: OrderId: The Order ID.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrOrderId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrPrimaryConnectionString")
def attr_primary_connection_string(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: PrimaryConnectionString: The primary connection string of the db cluster.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrPrimaryConnectionString"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrPrimaryEndpointId")
def attr_primary_endpoint_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: PrimaryEndpointId: The primary endpoint ID of the db cluster.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrPrimaryEndpointId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbNodeClass")
def db_node_class(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbNodeClass: The node specifications of the cluster. For more information, see Specifications and pricing.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbNodeClass"))
@db_node_class.setter
def db_node_class(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbNodeClass", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbType")
def db_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
dbType: Database type, value:
MySQL
PostgreSQL
Oracle
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbType"))
@db_type.setter
def db_type(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbVersion")
def db_version(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
dbVersion: The version of the database. Valid values:
MySQL: 5.6, 5.7 or 8.0
PostgreSQL: 11
Oracle: 11
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbVersion"))
@db_version.setter
def db_version(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbVersion", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="payType")
def pay_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
payType: The billing method of the cluster. Valid values:
Postpaid: pay-as-you-go
Prepaid: subscription
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "payType"))
@pay_type.setter
def pay_type(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "payType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="autoRenewPeriod")
def auto_renew_period(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: autoRenewPeriod: Set the cluster auto renewal time. Valid values: 1, 2, 3, 6, 12, 24, 36. Default to 1.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "autoRenewPeriod"))
@auto_renew_period.setter
def auto_renew_period(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "autoRenewPeriod", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="backupRetentionPolicyOnClusterDeletion")
def backup_retention_policy_on_cluster_deletion(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
backupRetentionPolicyOnClusterDeletion: The backup set retention policy when deleting a cluster, the value range is as follows:
ALL: Keep all backups permanently.
LATEST: Permanently keep the last backup (automatic backup before deletion).
NONE: The backup set is not retained when the cluster is deleted.
When creating a cluster, the default value is NONE, that is, the backup set is not retained when the cluster is deleted.
Note: This parameter takes effect only when the value of DBType is MySQL.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "backupRetentionPolicyOnClusterDeletion"))
@backup_retention_policy_on_cluster_deletion.setter
def backup_retention_policy_on_cluster_deletion(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "backupRetentionPolicyOnClusterDeletion", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="cloneDataPoint")
def clone_data_point(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
cloneDataPoint: The time point of data to be cloned. Valid values:
LATEST: clones data of the latest time point.
: clones historical backup data. Specify the ID of the specific backup set.
: clones data of a historical time point. Specify the specific time in
the yyyy-MM-ddTHH:mm:ssZ format. The time must be in UTC.
Default value: LATEST.
Note
This parameter takes effect only when the DBType parameter is set to MySQL, the DBVersion parameter is set to 5.6, and the CreationOption parameter is set to CloneFromRDS or CloneFromPolarDB.
If the CreationOption parameter is set to CloneFromRDS, the value of this parameter must be LATEST.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "cloneDataPoint"))
@clone_data_point.setter
def clone_data_point(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "cloneDataPoint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="clusterNetworkType")
def cluster_network_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: clusterNetworkType: The network type of the cluster. Currently, only VPC is supported. Default value: VPC.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "clusterNetworkType"))
@cluster_network_type.setter
def cluster_network_type(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "clusterNetworkType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="creationCategory")
def creation_category(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: creationCategory: Cluster series. The value could be Normal (standard version).
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "creationCategory"))
@creation_category.setter
def creation_category(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "creationCategory", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="creationOption")
def creation_option(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
creationOption: The method for creating an ApsaraDB for POLARDB cluster. Valid values:
Normal: creates an ApsaraDB for POLARDB cluster.
CloneFromPolarDB: clones data from an existing ApsaraDB for POLARDB cluster to a new ApsaraDB for POLARDB cluster.
CloneFromRDS: clones data from an existing ApsaraDB for RDS instance to a new ApsaraDB
for POLARDB cluster.
MigrationFromRDS: migrates data from an existing ApsaraDB for RDS instance to a new ApsaraDB for POLARDB cluster. The created ApsaraDB for POLARDB cluster is in read-only mode and has binary logs enabled by default.
CreateGdnStandby: Create a secondary cluster.
Default value: Normal.
Note:
When DBType is MySQL and DBVersion is 5.6, this parameter can be specified as CloneFromRDS or MigrationFromRDS.
When DBType is MySQL and DBVersion is 8.0, this parameter can be specified as CreateGdnStandby.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "creationOption"))
@creation_option.setter
def creation_option(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "creationOption", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbClusterDescription")
def db_cluster_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
dbClusterDescription: The description of the cluster. The description must comply with the following rules:
It must start with a Chinese character or an English letter.
It can contain Chinese and English characters, digits, underscores (_), and hyphens (-).
It cannot start with http:// or https://.
It must be 2 to 256 characters in length.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "dbClusterDescription"))
@db_cluster_description.setter
def db_cluster_description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "dbClusterDescription", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbClusterParameters")
def db_cluster_parameters(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBCluster.DBClusterParametersProperty"]]:
'''
:Property: dbClusterParameters: Modifies the parameters of a the PolarDB cluster.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBCluster.DBClusterParametersProperty"]], jsii.get(self, "dbClusterParameters"))
@db_cluster_parameters.setter
def db_cluster_parameters(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBCluster.DBClusterParametersProperty"]],
) -> None:
jsii.set(self, "dbClusterParameters", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="defaultTimeZone")
def default_time_zone(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
defaultTimeZone: Set up a time zone (UTC), the value range is as follows:
System: The default time zone is the same as the time zone where the region is located. This is default value.
Other pickable value range is from -12:00 to +13:00, for example, 00:00.
Note: This parameter takes effect only when DBType is MySQL.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "defaultTimeZone"))
@default_time_zone.setter
def default_time_zone(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "defaultTimeZone", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="gdnId")
def gdn_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
gdnId: The ID of the Global Database Network (GDN).
Note: This parameter is required when the CreationOption is CreateGdnStandby.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "gdnId"))
@gdn_id.setter
def gdn_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "gdnId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="lowerCaseTableNames")
def lower_case_table_names(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property:
lowerCaseTableNames: Whether the table name is case sensitive, the value range is as follows:
1: Not case sensitive0: case sensitive
The default value is 1.
Note: This parameter takes effect only when the value of DBType is MySQL.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "lowerCaseTableNames"))
@lower_case_table_names.setter
def lower_case_table_names(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "lowerCaseTableNames", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="maintainTime")
def maintain_time(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
maintainTime: The maintainable time of the cluster:
Format: HH: mmZ- HH: mmZ.
Example: 16:00Z-17:00Z, which means 0 to 1 (UTC+08:00) for routine maintenance.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "maintainTime"))
@maintain_time.setter
def maintain_time(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "maintainTime", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="period")
def period(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: period: The subscription period of the cluster in month. Valid values: 1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "period"))
@period.setter
def period(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "period", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="renewalStatus")
def renewal_status(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
renewalStatus: The auto renewal status of the cluster Valid values:
AutoRenewal: automatically renews the cluster.
Normal: manually renews the cluster.
NotRenewal: does not renew the cluster.
Default value: Normal.
Note If this parameter is set to NotRenewal, the system does not send a reminder for expiration,
but only sends an SMS message three days before the cluster expires to remind you
that the cluster is not renewed.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "renewalStatus"))
@renewal_status.setter
def renewal_status(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "renewalStatus", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="resourceGroupId")
def resource_group_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: resourceGroupId: The ID of the resource group.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "resourceGroupId"))
@resource_group_id.setter
def resource_group_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "resourceGroupId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="securityGroupIds")
def security_group_ids(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]]:
'''
:Property:
securityGroupIds: The ID of the security group.
You can add up to three security groups to a cluster.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]], jsii.get(self, "securityGroupIds"))
@security_group_ids.setter
def security_group_ids(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]],
) -> None:
jsii.set(self, "securityGroupIds", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="securityIpList")
def security_ip_list(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: securityIpList: The whitelist of the Apsara PolarDB cluster.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "securityIpList"))
@security_ip_list.setter
def security_ip_list(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "securityIpList", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="sourceResourceId")
def source_resource_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
sourceResourceId: The ID of the source RDS instance or source POLARDB cluster.
Note
This parameter takes effect only when the DBType parameter is set to MySQL and the DBVersion parameter is set to 5.6.
This parameter is required if the CreationOption parameter is not set to Normal.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "sourceResourceId"))
@source_resource_id.setter
def source_resource_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "sourceResourceId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="tags")
def tags(self) -> typing.Optional[typing.List["RosDBCluster.TagsProperty"]]:
'''
:Property: tags: Tags to attach to instance. Max support 20 tags to add during create instance. Each tag with two properties Key and Value, and Key is required.
'''
return typing.cast(typing.Optional[typing.List["RosDBCluster.TagsProperty"]], jsii.get(self, "tags"))
@tags.setter
def tags(
self,
value: typing.Optional[typing.List["RosDBCluster.TagsProperty"]],
) -> None:
jsii.set(self, "tags", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="tdeStatus")
def tde_status(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property:
tdeStatus: Specifies whether to enable Transparent Data Encryption (TDE). Valid values:
true: enable TDE
false: disable TDE (default)
Note: The parameter takes effect only when DBType is PostgreSQL or Oracle. You cannot disable TDE after it is enabled.
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "tdeStatus"))
@tde_status.setter
def tde_status(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "tdeStatus", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="vpcId")
def vpc_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: vpcId: The ID of the VPC to connect to.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "vpcId"))
@vpc_id.setter
def vpc_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "vpcId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="vSwitchId")
def v_switch_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: vSwitchId: The ID of the VSwitch to connect to.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "vSwitchId"))
@v_switch_id.setter
def v_switch_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "vSwitchId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="zoneId")
def zone_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: zoneId: The zone ID of the cluster. You can call the DescribeRegions operation to query available zones.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "zoneId"))
@zone_id.setter
def zone_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "zoneId", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosDBCluster.DBClusterParametersProperty",
jsii_struct_bases=[],
name_mapping={"effective_time": "effectiveTime", "parameters": "parameters"},
)
class DBClusterParametersProperty:
def __init__(
self,
*,
effective_time: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
parameters: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param effective_time:
:param parameters:
'''
self._values: typing.Dict[str, typing.Any] = {}
if effective_time is not None:
self._values["effective_time"] = effective_time
if parameters is not None:
self._values["parameters"] = parameters
@builtins.property
def effective_time(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
effectiveTime: The time when the modified values of parameters take effect. Valid values:
- Auto: The system automatically determines how the modified values of parameters take effect.
If all the modified values of parameters can take effect without a cluster restart,
they immediately take effect. If a cluster restart is required to make the modified values
of some parameters take effect, all of them take effect after a cluster restart
is performed within the maintenance window.
- Immediately: If all the modified values of parameters can take effect without a
cluster restart, the modifications immediately take effect. If a cluster restart is
required to make the modified values of some parameters take effect,
the cluster is immediately restarted for the modifications to take effect.
- MaintainTime: The modified values of parameters take effect within the maintenance window.
All the modified values of parameters take effect within the maintenance window.
Default value: Auto.
'''
result = self._values.get("effective_time")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def parameters(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
parameters: The JSON string that consists of parameters and values.
The parameter values are strings, for example,
{"auto_increment_increment":"1","character_set_filesystem":"utf8"}.
You can call the DescribeDBClusterParameters operation to
view the parameters of the PolarDB cluster.
'''
result = self._values.get("parameters")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DBClusterParametersProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosDBCluster.TagsProperty",
jsii_struct_bases=[],
name_mapping={"key": "key", "value": "value"},
)
class TagsProperty:
def __init__(
self,
*,
key: typing.Union[builtins.str, ros_cdk_core.IResolvable],
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param key:
:param value:
'''
self._values: typing.Dict[str, typing.Any] = {
"key": key,
}
if value is not None:
self._values["value"] = value
@builtins.property
def key(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: key: undefined
'''
result = self._values.get("key")
assert result is not None, "Required property 'key' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def value(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: value: undefined
'''
result = self._values.get("value")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "TagsProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosDBClusterAccessWhiteList(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.RosDBClusterAccessWhiteList",
):
'''A ROS template type: ``ALIYUN::POLARDB::DBClusterAccessWhiteList``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosDBClusterAccessWhiteListProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBClusterAccessWhiteList``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDbClusterId")
def attr_db_cluster_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: DBClusterId: The ID of the ApsaraDB for POLARDB cluster whose IP address whitelist is to be modified.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDbClusterId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbClusterId")
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster whose IP address whitelist is to be modified.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbClusterId"))
@db_cluster_id.setter
def db_cluster_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbClusterId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="securityIps")
def security_ips(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
securityIps: The IP addresses to be added to the IP address whitelist group to be modified. Each
whitelist group can contain a maximum of 1,000 IP addresses. Separate multiple IP
addresses with a comma (,). The following two formats are supported:
IP address: for example, 10.23.12.24.
Classless inter-domain routing (CIDR) block: for example, 10.23.12.24/24, where the
suffix /24 indicates the number of bits for the prefix of the IP address. The suffix
ranges from 1 to 32.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "securityIps"))
@security_ips.setter
def security_ips(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "securityIps", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbClusterIpArrayName")
def db_cluster_ip_array_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
dbClusterIpArrayName: The name of the IP address whitelist group. If you do not specify this parameter,
the Default whitelist group is modified by default.
Note You can create up to 50 whitelist groups for an ApsaraDB for POLARDB cluster.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "dbClusterIpArrayName"))
@db_cluster_ip_array_name.setter
def db_cluster_ip_array_name(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "dbClusterIpArrayName", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosDBClusterAccessWhiteListProps",
jsii_struct_bases=[],
name_mapping={
"db_cluster_id": "dbClusterId",
"security_ips": "securityIps",
"db_cluster_ip_array_name": "dbClusterIpArrayName",
},
)
class RosDBClusterAccessWhiteListProps:
def __init__(
self,
*,
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
security_ips: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_cluster_ip_array_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBClusterAccessWhiteList``.
:param db_cluster_id:
:param security_ips:
:param db_cluster_ip_array_name:
'''
self._values: typing.Dict[str, typing.Any] = {
"db_cluster_id": db_cluster_id,
"security_ips": security_ips,
}
if db_cluster_ip_array_name is not None:
self._values["db_cluster_ip_array_name"] = db_cluster_ip_array_name
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster whose IP address whitelist is to be modified.
'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def security_ips(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
securityIps: The IP addresses to be added to the IP address whitelist group to be modified. Each
whitelist group can contain a maximum of 1,000 IP addresses. Separate multiple IP
addresses with a comma (,). The following two formats are supported:
IP address: for example, 10.23.12.24.
Classless inter-domain routing (CIDR) block: for example, 10.23.12.24/24, where the
suffix /24 indicates the number of bits for the prefix of the IP address. The suffix
ranges from 1 to 32.
'''
result = self._values.get("security_ips")
assert result is not None, "Required property 'security_ips' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_cluster_ip_array_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
dbClusterIpArrayName: The name of the IP address whitelist group. If you do not specify this parameter,
the Default whitelist group is modified by default.
Note You can create up to 50 whitelist groups for an ApsaraDB for POLARDB cluster.
'''
result = self._values.get("db_cluster_ip_array_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosDBClusterAccessWhiteListProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosDBClusterEndpoint(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.RosDBClusterEndpoint",
):
'''A ROS template type: ``ALIYUN::POLARDB::DBClusterEndpoint``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosDBClusterEndpointProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBClusterEndpoint``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAddresses")
def attr_addresses(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Addresses: The address items of the db cluster endpoint.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAddresses"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrConnectionString")
def attr_connection_string(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ConnectionString: The first connection string of the db cluster endpoint.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrConnectionString"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDbEndpointId")
def attr_db_endpoint_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: DBEndpointId: DB cluster endpoint ID. E.g. pe-xxxxxxxx.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDbEndpointId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbClusterId")
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a custom connection point is to be created.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbClusterId"))
@db_cluster_id.setter
def db_cluster_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbClusterId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="autoAddNewNodes")
def auto_add_new_nodes(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
autoAddNewNodes: Specifies whether a newly added node is automatically added to this connection point.
Valid values: Enable, Disable.
Default value: Disable.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "autoAddNewNodes"))
@auto_add_new_nodes.setter
def auto_add_new_nodes(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "autoAddNewNodes", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="endpointConfig")
def endpoint_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBClusterEndpoint.EndpointConfigProperty"]]:
'''
:Property: endpointConfig:
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBClusterEndpoint.EndpointConfigProperty"]], jsii.get(self, "endpointConfig"))
@endpoint_config.setter
def endpoint_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosDBClusterEndpoint.EndpointConfigProperty"]],
) -> None:
jsii.set(self, "endpointConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="endpointType")
def endpoint_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: endpointType: The type of the cluster connection point. Set this parameter to Custom.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "endpointType"))
@endpoint_type.setter
def endpoint_type(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "endpointType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="nodes")
def nodes(
self,
) -> typing.Optional[typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable]]:
'''
:Property:
nodes: The nodes to be added to this connection point to process read requests from this connection point. Add at least two nodes.
If you do not specify this parameter, all nodes of the cluster are added to this connection point by default.
'''
return typing.cast(typing.Optional[typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable]], jsii.get(self, "nodes"))
@nodes.setter
def nodes(
self,
value: typing.Optional[typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "nodes", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="readWriteMode")
def read_write_mode(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
readWriteMode: The read/write mode of the cluster connection point. Valid values:
ReadWrite: receives and forwards read and write requests (automatic read-write splitting).
ReadOnly: receives and forwards only read requests.
Default value: ReadOnly.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "readWriteMode"))
@read_write_mode.setter
def read_write_mode(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "readWriteMode", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosDBClusterEndpoint.EndpointConfigProperty",
jsii_struct_bases=[],
name_mapping={"consist_level": "consistLevel"},
)
class EndpointConfigProperty:
def __init__(
self,
*,
consist_level: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param consist_level:
'''
self._values: typing.Dict[str, typing.Any] = {}
if consist_level is not None:
self._values["consist_level"] = consist_level
@builtins.property
def consist_level(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
consistLevel: The consistency level of the cluster connection point. Valid values:
0: eventual consistency
1: session consistency
For example, {"ConsistLevel": "0"}.
Note If the ReadWriteMode parameter is set to ReadOnly, the value of this parameter must be 0.
'''
result = self._values.get("consist_level")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "EndpointConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosDBClusterEndpointAddress(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.RosDBClusterEndpointAddress",
):
'''A ROS template type: ``ALIYUN::POLARDB::DBClusterEndpointAddress``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosDBClusterEndpointAddressProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBClusterEndpointAddress``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAddress")
def attr_address(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Address: The details of the endpoint address.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAddress"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrConnectionString")
def attr_connection_string(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ConnectionString: The connection string of the endpoint address.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrConnectionString"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbClusterId")
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a public connection point is to be created.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbClusterId"))
@db_cluster_id.setter
def db_cluster_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbClusterId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbEndpointId")
def db_endpoint_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbEndpointId: The ID of the cluster connection point.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbEndpointId"))
@db_endpoint_id.setter
def db_endpoint_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbEndpointId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="connectionStringPrefix")
def connection_string_prefix(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
connectionStringPrefix: The prefix of the connection string. The prefix must comply with the following rules:
It must start with a letter and consist of lowercase letters, digits, and hyphens(-), cannot end with a dash.
The length is 6~30 characters.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "connectionStringPrefix"))
@connection_string_prefix.setter
def connection_string_prefix(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "connectionStringPrefix", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="netType")
def net_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
netType: The network type of the connection string.
If set to Public, ROS will create, modify and delete Public address for you.
If set to Private, ROS will only modify Private address for you.
Default to Public.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "netType"))
@net_type.setter
def net_type(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "netType", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosDBClusterEndpointAddressProps",
jsii_struct_bases=[],
name_mapping={
"db_cluster_id": "dbClusterId",
"db_endpoint_id": "dbEndpointId",
"connection_string_prefix": "connectionStringPrefix",
"net_type": "netType",
},
)
class RosDBClusterEndpointAddressProps:
def __init__(
self,
*,
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_endpoint_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
connection_string_prefix: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
net_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBClusterEndpointAddress``.
:param db_cluster_id:
:param db_endpoint_id:
:param connection_string_prefix:
:param net_type:
'''
self._values: typing.Dict[str, typing.Any] = {
"db_cluster_id": db_cluster_id,
"db_endpoint_id": db_endpoint_id,
}
if connection_string_prefix is not None:
self._values["connection_string_prefix"] = connection_string_prefix
if net_type is not None:
self._values["net_type"] = net_type
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a public connection point is to be created.
'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_endpoint_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbEndpointId: The ID of the cluster connection point.
'''
result = self._values.get("db_endpoint_id")
assert result is not None, "Required property 'db_endpoint_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def connection_string_prefix(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
connectionStringPrefix: The prefix of the connection string. The prefix must comply with the following rules:
It must start with a letter and consist of lowercase letters, digits, and hyphens(-), cannot end with a dash.
The length is 6~30 characters.
'''
result = self._values.get("connection_string_prefix")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def net_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
netType: The network type of the connection string.
If set to Public, ROS will create, modify and delete Public address for you.
If set to Private, ROS will only modify Private address for you.
Default to Public.
'''
result = self._values.get("net_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosDBClusterEndpointAddressProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosDBClusterEndpointProps",
jsii_struct_bases=[],
name_mapping={
"db_cluster_id": "dbClusterId",
"auto_add_new_nodes": "autoAddNewNodes",
"endpoint_config": "endpointConfig",
"endpoint_type": "endpointType",
"nodes": "nodes",
"read_write_mode": "readWriteMode",
},
)
class RosDBClusterEndpointProps:
def __init__(
self,
*,
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
auto_add_new_nodes: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
endpoint_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosDBClusterEndpoint.EndpointConfigProperty]] = None,
endpoint_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
nodes: typing.Optional[typing.Union[typing.Sequence[typing.Any], ros_cdk_core.IResolvable]] = None,
read_write_mode: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBClusterEndpoint``.
:param db_cluster_id:
:param auto_add_new_nodes:
:param endpoint_config:
:param endpoint_type:
:param nodes:
:param read_write_mode:
'''
self._values: typing.Dict[str, typing.Any] = {
"db_cluster_id": db_cluster_id,
}
if auto_add_new_nodes is not None:
self._values["auto_add_new_nodes"] = auto_add_new_nodes
if endpoint_config is not None:
self._values["endpoint_config"] = endpoint_config
if endpoint_type is not None:
self._values["endpoint_type"] = endpoint_type
if nodes is not None:
self._values["nodes"] = nodes
if read_write_mode is not None:
self._values["read_write_mode"] = read_write_mode
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a custom connection point is to be created.
'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def auto_add_new_nodes(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
autoAddNewNodes: Specifies whether a newly added node is automatically added to this connection point.
Valid values: Enable, Disable.
Default value: Disable.
'''
result = self._values.get("auto_add_new_nodes")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def endpoint_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosDBClusterEndpoint.EndpointConfigProperty]]:
'''
:Property: endpointConfig:
'''
result = self._values.get("endpoint_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosDBClusterEndpoint.EndpointConfigProperty]], result)
@builtins.property
def endpoint_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: endpointType: The type of the cluster connection point. Set this parameter to Custom.
'''
result = self._values.get("endpoint_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def nodes(
self,
) -> typing.Optional[typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable]]:
'''
:Property:
nodes: The nodes to be added to this connection point to process read requests from this connection point. Add at least two nodes.
If you do not specify this parameter, all nodes of the cluster are added to this connection point by default.
'''
result = self._values.get("nodes")
return typing.cast(typing.Optional[typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable]], result)
@builtins.property
def read_write_mode(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
readWriteMode: The read/write mode of the cluster connection point. Valid values:
ReadWrite: receives and forwards read and write requests (automatic read-write splitting).
ReadOnly: receives and forwards only read requests.
Default value: ReadOnly.
'''
result = self._values.get("read_write_mode")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosDBClusterEndpointProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosDBClusterProps",
jsii_struct_bases=[],
name_mapping={
"db_node_class": "dbNodeClass",
"db_type": "dbType",
"db_version": "dbVersion",
"pay_type": "payType",
"auto_renew_period": "autoRenewPeriod",
"backup_retention_policy_on_cluster_deletion": "backupRetentionPolicyOnClusterDeletion",
"clone_data_point": "cloneDataPoint",
"cluster_network_type": "clusterNetworkType",
"creation_category": "creationCategory",
"creation_option": "creationOption",
"db_cluster_description": "dbClusterDescription",
"db_cluster_parameters": "dbClusterParameters",
"default_time_zone": "defaultTimeZone",
"gdn_id": "gdnId",
"lower_case_table_names": "lowerCaseTableNames",
"maintain_time": "maintainTime",
"period": "period",
"renewal_status": "renewalStatus",
"resource_group_id": "resourceGroupId",
"security_group_ids": "securityGroupIds",
"security_ip_list": "securityIpList",
"source_resource_id": "sourceResourceId",
"tags": "tags",
"tde_status": "tdeStatus",
"vpc_id": "vpcId",
"v_switch_id": "vSwitchId",
"zone_id": "zoneId",
},
)
class RosDBClusterProps:
def __init__(
self,
*,
db_node_class: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_version: typing.Union[builtins.str, ros_cdk_core.IResolvable],
pay_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
auto_renew_period: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
backup_retention_policy_on_cluster_deletion: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
clone_data_point: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
cluster_network_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
creation_category: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
creation_option: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
db_cluster_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
db_cluster_parameters: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosDBCluster.DBClusterParametersProperty]] = None,
default_time_zone: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
gdn_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
lower_case_table_names: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
maintain_time: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
period: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
renewal_status: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
resource_group_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
security_group_ids: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]] = None,
security_ip_list: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
source_resource_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
tags: typing.Optional[typing.Sequence[RosDBCluster.TagsProperty]] = None,
tde_status: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
vpc_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
v_switch_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
zone_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBCluster``.
:param db_node_class:
:param db_type:
:param db_version:
:param pay_type:
:param auto_renew_period:
:param backup_retention_policy_on_cluster_deletion:
:param clone_data_point:
:param cluster_network_type:
:param creation_category:
:param creation_option:
:param db_cluster_description:
:param db_cluster_parameters:
:param default_time_zone:
:param gdn_id:
:param lower_case_table_names:
:param maintain_time:
:param period:
:param renewal_status:
:param resource_group_id:
:param security_group_ids:
:param security_ip_list:
:param source_resource_id:
:param tags:
:param tde_status:
:param vpc_id:
:param v_switch_id:
:param zone_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"db_node_class": db_node_class,
"db_type": db_type,
"db_version": db_version,
"pay_type": pay_type,
}
if auto_renew_period is not None:
self._values["auto_renew_period"] = auto_renew_period
if backup_retention_policy_on_cluster_deletion is not None:
self._values["backup_retention_policy_on_cluster_deletion"] = backup_retention_policy_on_cluster_deletion
if clone_data_point is not None:
self._values["clone_data_point"] = clone_data_point
if cluster_network_type is not None:
self._values["cluster_network_type"] = cluster_network_type
if creation_category is not None:
self._values["creation_category"] = creation_category
if creation_option is not None:
self._values["creation_option"] = creation_option
if db_cluster_description is not None:
self._values["db_cluster_description"] = db_cluster_description
if db_cluster_parameters is not None:
self._values["db_cluster_parameters"] = db_cluster_parameters
if default_time_zone is not None:
self._values["default_time_zone"] = default_time_zone
if gdn_id is not None:
self._values["gdn_id"] = gdn_id
if lower_case_table_names is not None:
self._values["lower_case_table_names"] = lower_case_table_names
if maintain_time is not None:
self._values["maintain_time"] = maintain_time
if period is not None:
self._values["period"] = period
if renewal_status is not None:
self._values["renewal_status"] = renewal_status
if resource_group_id is not None:
self._values["resource_group_id"] = resource_group_id
if security_group_ids is not None:
self._values["security_group_ids"] = security_group_ids
if security_ip_list is not None:
self._values["security_ip_list"] = security_ip_list
if source_resource_id is not None:
self._values["source_resource_id"] = source_resource_id
if tags is not None:
self._values["tags"] = tags
if tde_status is not None:
self._values["tde_status"] = tde_status
if vpc_id is not None:
self._values["vpc_id"] = vpc_id
if v_switch_id is not None:
self._values["v_switch_id"] = v_switch_id
if zone_id is not None:
self._values["zone_id"] = zone_id
@builtins.property
def db_node_class(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbNodeClass: The node specifications of the cluster. For more information, see Specifications and pricing.
'''
result = self._values.get("db_node_class")
assert result is not None, "Required property 'db_node_class' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
dbType: Database type, value:
MySQL
PostgreSQL
Oracle
'''
result = self._values.get("db_type")
assert result is not None, "Required property 'db_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_version(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
dbVersion: The version of the database. Valid values:
MySQL: 5.6, 5.7 or 8.0
PostgreSQL: 11
Oracle: 11
'''
result = self._values.get("db_version")
assert result is not None, "Required property 'db_version' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def pay_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
payType: The billing method of the cluster. Valid values:
Postpaid: pay-as-you-go
Prepaid: subscription
'''
result = self._values.get("pay_type")
assert result is not None, "Required property 'pay_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def auto_renew_period(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: autoRenewPeriod: Set the cluster auto renewal time. Valid values: 1, 2, 3, 6, 12, 24, 36. Default to 1.
'''
result = self._values.get("auto_renew_period")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def backup_retention_policy_on_cluster_deletion(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
backupRetentionPolicyOnClusterDeletion: The backup set retention policy when deleting a cluster, the value range is as follows:
ALL: Keep all backups permanently.
LATEST: Permanently keep the last backup (automatic backup before deletion).
NONE: The backup set is not retained when the cluster is deleted.
When creating a cluster, the default value is NONE, that is, the backup set is not retained when the cluster is deleted.
Note: This parameter takes effect only when the value of DBType is MySQL.
'''
result = self._values.get("backup_retention_policy_on_cluster_deletion")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def clone_data_point(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
cloneDataPoint: The time point of data to be cloned. Valid values:
LATEST: clones data of the latest time point.
: clones historical backup data. Specify the ID of the specific backup set.
: clones data of a historical time point. Specify the specific time in
the yyyy-MM-ddTHH:mm:ssZ format. The time must be in UTC.
Default value: LATEST.
Note
This parameter takes effect only when the DBType parameter is set to MySQL, the DBVersion parameter is set to 5.6, and the CreationOption parameter is set to CloneFromRDS or CloneFromPolarDB.
If the CreationOption parameter is set to CloneFromRDS, the value of this parameter must be LATEST.
'''
result = self._values.get("clone_data_point")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def cluster_network_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: clusterNetworkType: The network type of the cluster. Currently, only VPC is supported. Default value: VPC.
'''
result = self._values.get("cluster_network_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def creation_category(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: creationCategory: Cluster series. The value could be Normal (standard version).
'''
result = self._values.get("creation_category")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def creation_option(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
creationOption: The method for creating an ApsaraDB for POLARDB cluster. Valid values:
Normal: creates an ApsaraDB for POLARDB cluster.
CloneFromPolarDB: clones data from an existing ApsaraDB for POLARDB cluster to a new ApsaraDB for POLARDB cluster.
CloneFromRDS: clones data from an existing ApsaraDB for RDS instance to a new ApsaraDB
for POLARDB cluster.
MigrationFromRDS: migrates data from an existing ApsaraDB for RDS instance to a new ApsaraDB for POLARDB cluster. The created ApsaraDB for POLARDB cluster is in read-only mode and has binary logs enabled by default.
CreateGdnStandby: Create a secondary cluster.
Default value: Normal.
Note:
When DBType is MySQL and DBVersion is 5.6, this parameter can be specified as CloneFromRDS or MigrationFromRDS.
When DBType is MySQL and DBVersion is 8.0, this parameter can be specified as CreateGdnStandby.
'''
result = self._values.get("creation_option")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def db_cluster_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
dbClusterDescription: The description of the cluster. The description must comply with the following rules:
It must start with a Chinese character or an English letter.
It can contain Chinese and English characters, digits, underscores (_), and hyphens (-).
It cannot start with http:// or https://.
It must be 2 to 256 characters in length.
'''
result = self._values.get("db_cluster_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def db_cluster_parameters(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosDBCluster.DBClusterParametersProperty]]:
'''
:Property: dbClusterParameters: Modifies the parameters of a the PolarDB cluster.
'''
result = self._values.get("db_cluster_parameters")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosDBCluster.DBClusterParametersProperty]], result)
@builtins.property
def default_time_zone(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
defaultTimeZone: Set up a time zone (UTC), the value range is as follows:
System: The default time zone is the same as the time zone where the region is located. This is default value.
Other pickable value range is from -12:00 to +13:00, for example, 00:00.
Note: This parameter takes effect only when DBType is MySQL.
'''
result = self._values.get("default_time_zone")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def gdn_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
gdnId: The ID of the Global Database Network (GDN).
Note: This parameter is required when the CreationOption is CreateGdnStandby.
'''
result = self._values.get("gdn_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def lower_case_table_names(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property:
lowerCaseTableNames: Whether the table name is case sensitive, the value range is as follows:
1: Not case sensitive0: case sensitive
The default value is 1.
Note: This parameter takes effect only when the value of DBType is MySQL.
'''
result = self._values.get("lower_case_table_names")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def maintain_time(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
maintainTime: The maintainable time of the cluster:
Format: HH: mmZ- HH: mmZ.
Example: 16:00Z-17:00Z, which means 0 to 1 (UTC+08:00) for routine maintenance.
'''
result = self._values.get("maintain_time")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def period(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: period: The subscription period of the cluster in month. Valid values: 1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 24, 36.
'''
result = self._values.get("period")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def renewal_status(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
renewalStatus: The auto renewal status of the cluster Valid values:
AutoRenewal: automatically renews the cluster.
Normal: manually renews the cluster.
NotRenewal: does not renew the cluster.
Default value: Normal.
Note If this parameter is set to NotRenewal, the system does not send a reminder for expiration,
but only sends an SMS message three days before the cluster expires to remind you
that the cluster is not renewed.
'''
result = self._values.get("renewal_status")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def resource_group_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: resourceGroupId: The ID of the resource group.
'''
result = self._values.get("resource_group_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def security_group_ids(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]]:
'''
:Property:
securityGroupIds: The ID of the security group.
You can add up to three security groups to a cluster.
'''
result = self._values.get("security_group_ids")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]], result)
@builtins.property
def security_ip_list(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: securityIpList: The whitelist of the Apsara PolarDB cluster.
'''
result = self._values.get("security_ip_list")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def source_resource_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
sourceResourceId: The ID of the source RDS instance or source POLARDB cluster.
Note
This parameter takes effect only when the DBType parameter is set to MySQL and the DBVersion parameter is set to 5.6.
This parameter is required if the CreationOption parameter is not set to Normal.
'''
result = self._values.get("source_resource_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def tags(self) -> typing.Optional[typing.List[RosDBCluster.TagsProperty]]:
'''
:Property: tags: Tags to attach to instance. Max support 20 tags to add during create instance. Each tag with two properties Key and Value, and Key is required.
'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.List[RosDBCluster.TagsProperty]], result)
@builtins.property
def tde_status(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property:
tdeStatus: Specifies whether to enable Transparent Data Encryption (TDE). Valid values:
true: enable TDE
false: disable TDE (default)
Note: The parameter takes effect only when DBType is PostgreSQL or Oracle. You cannot disable TDE after it is enabled.
'''
result = self._values.get("tde_status")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def vpc_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: vpcId: The ID of the VPC to connect to.
'''
result = self._values.get("vpc_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def v_switch_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: vSwitchId: The ID of the VSwitch to connect to.
'''
result = self._values.get("v_switch_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def zone_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: zoneId: The zone ID of the cluster. You can call the DescribeRegions operation to query available zones.
'''
result = self._values.get("zone_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosDBClusterProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosDBInstance(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.RosDBInstance",
):
'''A ROS template type: ``ALIYUN::POLARDB::DBInstance``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosDBInstanceProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBInstance``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="characterSetName")
def character_set_name(
self,
) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: characterSetName: The character set of the database. For more information, see Character sets.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "characterSetName"))
@character_set_name.setter
def character_set_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "characterSetName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbClusterId")
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a database is to be created.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbClusterId"))
@db_cluster_id.setter
def db_cluster_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbClusterId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbName")
def db_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
dbName: The name of the database to be created. The name must comply with the following rules:
It must start with a lowercase letter and consist of lowercase letters, digits, hyphens
(-), and underscores (_).
It must end with a letter or a digit. It can be up to 64 characters in length.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbName"))
@db_name.setter
def db_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
'''A factory method that creates a new instance of this class from an object containing the properties of this ROS resource.'''
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="accountName")
def account_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: accountName: The name of the database account to be used.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "accountName"))
@account_name.setter
def account_name(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "accountName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="accountPrivilege")
def account_privilege(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
accountPrivilege: The permissions of the database account on the database. Valid values:
ReadWrite: has read and write permissions on the database.
ReadOnly: has the read-only permission on the database.
DMLOnly: runs only data manipulation language (DML) statements.
DDLOnly: runs only data definition language (DDL) statements.
Default value: ReadWrite.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "accountPrivilege"))
@account_privilege.setter
def account_privilege(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "accountPrivilege", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbDescription")
def db_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
dbDescription: The description of the database. Valid values:
It cannot start with http:// or https://.
It must be 2 to 256 characters in length.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "dbDescription"))
@db_description.setter
def db_description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "dbDescription", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosDBInstanceProps",
jsii_struct_bases=[],
name_mapping={
"character_set_name": "characterSetName",
"db_cluster_id": "dbClusterId",
"db_name": "dbName",
"account_name": "accountName",
"account_privilege": "accountPrivilege",
"db_description": "dbDescription",
},
)
class RosDBInstanceProps:
def __init__(
self,
*,
character_set_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
db_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
account_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
account_privilege: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
db_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBInstance``.
:param character_set_name:
:param db_cluster_id:
:param db_name:
:param account_name:
:param account_privilege:
:param db_description:
'''
self._values: typing.Dict[str, typing.Any] = {
"character_set_name": character_set_name,
"db_cluster_id": db_cluster_id,
"db_name": db_name,
}
if account_name is not None:
self._values["account_name"] = account_name
if account_privilege is not None:
self._values["account_privilege"] = account_privilege
if db_description is not None:
self._values["db_description"] = db_description
@builtins.property
def character_set_name(
self,
) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: characterSetName: The character set of the database. For more information, see Character sets.
'''
result = self._values.get("character_set_name")
assert result is not None, "Required property 'character_set_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster for which a database is to be created.
'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def db_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
dbName: The name of the database to be created. The name must comply with the following rules:
It must start with a lowercase letter and consist of lowercase letters, digits, hyphens
(-), and underscores (_).
It must end with a letter or a digit. It can be up to 64 characters in length.
'''
result = self._values.get("db_name")
assert result is not None, "Required property 'db_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def account_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: accountName: The name of the database account to be used.
'''
result = self._values.get("account_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def account_privilege(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
accountPrivilege: The permissions of the database account on the database. Valid values:
ReadWrite: has read and write permissions on the database.
ReadOnly: has the read-only permission on the database.
DMLOnly: runs only data manipulation language (DML) statements.
DDLOnly: runs only data definition language (DDL) statements.
Default value: ReadWrite.
'''
result = self._values.get("account_privilege")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def db_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
dbDescription: The description of the database. Valid values:
It cannot start with http:// or https://.
It must be 2 to 256 characters in length.
'''
result = self._values.get("db_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosDBInstanceProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosDBNodes(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-polardb.RosDBNodes",
):
'''A ROS template type: ``ALIYUN::POLARDB::DBNodes``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosDBNodesProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::POLARDB::DBNodes``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDbNodeIds")
def attr_db_node_ids(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: DBNodeIds: The ID list of added cluster nodes.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDbNodeIds"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrOrderIds")
def attr_order_ids(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: OrderIds: The order ID list of added cluster nodes.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrOrderIds"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="amount")
def amount(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: amount: Number of nodes to be added to cluster.
'''
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], jsii.get(self, "amount"))
@amount.setter
def amount(
self,
value: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "amount", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="dbClusterId")
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster to be added nodes to.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "dbClusterId"))
@db_cluster_id.setter
def db_cluster_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "dbClusterId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-polardb.RosDBNodesProps",
jsii_struct_bases=[],
name_mapping={"amount": "amount", "db_cluster_id": "dbClusterId"},
)
class RosDBNodesProps:
def __init__(
self,
*,
amount: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
db_cluster_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''Properties for defining a ``ALIYUN::POLARDB::DBNodes``.
:param amount:
:param db_cluster_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"amount": amount,
"db_cluster_id": db_cluster_id,
}
@builtins.property
def amount(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: amount: Number of nodes to be added to cluster.
'''
result = self._values.get("amount")
assert result is not None, "Required property 'amount' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
@builtins.property
def db_cluster_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: dbClusterId: The ID of the ApsaraDB for POLARDB cluster to be added nodes to.
'''
result = self._values.get("db_cluster_id")
assert result is not None, "Required property 'db_cluster_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosDBNodesProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"Account",
"AccountPrivilege",
"AccountPrivilegeProps",
"AccountProps",
"DBCluster",
"DBClusterAccessWhiteList",
"DBClusterAccessWhiteListProps",
"DBClusterEndpoint",
"DBClusterEndpointAddress",
"DBClusterEndpointAddressProps",
"DBClusterEndpointProps",
"DBClusterProps",
"DBInstance",
"DBInstanceProps",
"DBNodes",
"DBNodesProps",
"RosAccount",
"RosAccountPrivilege",
"RosAccountPrivilegeProps",
"RosAccountProps",
"RosDBCluster",
"RosDBClusterAccessWhiteList",
"RosDBClusterAccessWhiteListProps",
"RosDBClusterEndpoint",
"RosDBClusterEndpointAddress",
"RosDBClusterEndpointAddressProps",
"RosDBClusterEndpointProps",
"RosDBClusterProps",
"RosDBInstance",
"RosDBInstanceProps",
"RosDBNodes",
"RosDBNodesProps",
]
publication.publish()
| 45.557705
| 898
| 0.680064
| 26,164
| 214,349
| 5.401124
| 0.022359
| 0.029042
| 0.045784
| 0.091243
| 0.943729
| 0.939419
| 0.936291
| 0.93276
| 0.930022
| 0.927913
| 0
| 0.003042
| 0.217762
| 214,349
| 4,704
| 899
| 45.567389
| 0.839765
| 0.309617
| 0
| 0.835273
| 0
| 0
| 0.124563
| 0.044018
| 0
| 0
| 0
| 0
| 0.015636
| 1
| 0.148
| false
| 0.006545
| 0.003636
| 0.025455
| 0.275273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40b8ecf012c898ba498f69af872ce326b4c6bd53
| 20,447
|
py
|
Python
|
ppq/scheduler/dispatchers.py
|
wdian/ppq
|
58bd1271ea6f0dfaf602eb72bdca63ea79f191b8
|
[
"Apache-2.0"
] | null | null | null |
ppq/scheduler/dispatchers.py
|
wdian/ppq
|
58bd1271ea6f0dfaf602eb72bdca63ea79f191b8
|
[
"Apache-2.0"
] | null | null | null |
ppq/scheduler/dispatchers.py
|
wdian/ppq
|
58bd1271ea6f0dfaf602eb72bdca63ea79f191b8
|
[
"Apache-2.0"
] | null | null | null |
from typing import Dict
from ppq.core import TargetPlatform
from ppq.IR import BaseGraph
from ppq.IR.search import SearchableGraph
from .base import (GraphDispatcher, SOI_generators, SOI_receivers,
reverse_tracing_pattern, value_tracing_pattern)
class AggresiveDispatcher(GraphDispatcher):
"""
Graph Dispatcher cuts a graph into parts, each part of graph will dispatch to a specific platform
for further execution and quantization.
For the most part, all operations within graph can be partitioned into quantable operations,
Shape-Or-Index related operations and remaining operations, all sub classes of GraphDispatcher will
give an implementation of function "dispath" to send all operations to their proper platform.
ATTENTION: platform attribute will greatly affect quantizer's quantization logic, and the execution result.
If operation is sent to a quantable platform, then its inputs and outputs will be quantized if necessary.
if operation is classified as shape-or-index related operation, then its execution will be taken with cpu.
if operation is sent to a fp32 platform, then its inputs and outputs shall never be quantized.
ATTENTION: this dispatcher will insert necessary DeviceSwitch operations
between shape-or-index operations and others.
"""
@staticmethod
def dispatch(
graph: BaseGraph,
quant_platform: TargetPlatform,
fp32_platform: TargetPlatform,
SOI_platform: TargetPlatform,
**kwargs
) -> Dict[str, TargetPlatform]:
"""
Graph Dispatcher splits a graph into parts, each part of graph will be sent to a specific platform
for further execution and quantization.
There are 3 default platform during dispatching:
quant_platform - all quantable parts of graph will be dispatched to this platform
SOI_platform - Aka. Shape or Index related operations will be dispatched to this platform.
fp32_platform - there are some operations receiving results from both quant_platform and SOI_platform,
they will be dispatched to fp32_platform.
ATTENTION: Quantization follows this dispatching,
and only the operations within quantable platform will be quantized in the future.
ATTENTION: this dispatcher will insert necessary DeviceSwitch operations between
shape-or-index operations and others.
Args:
graph (BaseGraph): graph object which going to be dispatched by this dispatcher.
quant_platform (TargetPlatform):
platform object where quantable parts will goes to.
fp32_platform (TargetPlatform):
platform object where SOI parts will goes to.
SOI_platform (TargetPlatform):
platform object where remaining parts will goes to.
Returns:
Dict[str, TargetPlatform]: [description]
"""
recivers, generators = SOI_receivers(graph), SOI_generators(graph)
search_engine, SOI_opeartions, FP32_operations = SearchableGraph(graph), set(recivers), set()
quant_operations = search_engine.opset_matching(
sp_expr = lambda x: x.is_computing_op,
rp_expr = value_tracing_pattern,
ep_expr = lambda x: x.type in {'Shape', 'TopK', 'NonMaxSuppression'} or x.is_boundary,
direction = 'down')
# remove shape operations from computing ops.
quant_operations.filter(lambda x: x.type == 'Shape')
# we assume all 'Shape', 'NonMaxSuppression', 'ConstantOfShape', 'Topk' operations are SOI generators.
shape_forward_matching = search_engine.opset_matching(
sp_expr = lambda x: x in generators and x.type not in {'Constant'},
rp_expr = value_tracing_pattern,
ep_expr = lambda x: x in recivers or x in quant_operations or x.is_boundary,
direction = 'down')
# update matchings, ready for further searching.
SOI_opeartions.update(shape_forward_matching)
generators.update(SOI_opeartions)
while True:
# there are some particular cases where a single matching can not handle.
# to cover all shape-related operations, a reverse matching is required.
shape_backward_matching = search_engine.opset_matching(
sp_expr = lambda x: x in SOI_opeartions and x.type != 'Shape' and not x in quant_operations,
rp_expr = reverse_tracing_pattern,
ep_expr = lambda x: x in generators or x in quant_operations or x.is_boundary,
direction = 'up')
if all([(op in SOI_opeartions) for op in shape_backward_matching]): break
# update matchings
SOI_opeartions.update(shape_backward_matching)
# generate dispatching table.
dispatching_table = {}
for operation in graph.operations.values():
if operation in SOI_opeartions:
dispatching_table[operation.name] = SOI_platform
elif operation in quant_operations:
dispatching_table[operation.name] = quant_platform
else:
dispatching_table[operation.name] = fp32_platform
# move Topk, Shape, NonMaxSuppression to platform as same as their input.
if operation.type in {'Shape', 'TopK', 'NonMaxSuppression'}:
if operation.inputs[0].source_op is not None:
dispatching_table[operation.name] = operation.inputs[0].source_op.platform
else: dispatching_table[operation.name] = quant_platform
# move activations to the platform same as their input.
if operation.is_linear_activation:
source_op = operation.inputs[0].source_op
if source_op is not None:
dispatching_table[operation.name] = dispatching_table[source_op.name]
return dispatching_table
class ConservativeDispatcher(GraphDispatcher):
"""
Graph Dispatcher cuts a graph into parts, each part of graph will dispatch to a specific platform
for further execution and quantization.
For the most part, all operations within graph can be partitioned into quantable operations,
Shape-Or-Index related operations and remaining operations, all sub classes of GraphDispatcher will
give an implementation of function "dispath" to send all operations to their proper platform.
Conservative Dispatcher cuts graph in a conservative way, which means it takes as much as possible opeartions
into fp32 platform.
ATTENTION: platform attribute will greatly affect quantizer's quantization logic, and the execution result.
If operation is sent to a quantable platform, then its inputs and outputs will be quantized if necessary.
if operation is classified as shape-or-index related operation, then its execution will be taken with cpu.
if operation is sent to a fp32 platform, then its inputs and outputs shall never be quantized.
ATTENTION: this dispatcher will insert necessary DeviceSwitch operations
between shape-or-index operations and others.
"""
@staticmethod
def dispatch(
graph: BaseGraph,
quant_platform: TargetPlatform,
fp32_platform: TargetPlatform,
SOI_platform: TargetPlatform, **kwargs
) -> Dict[str, TargetPlatform]:
"""
Graph Dispatcher splits a graph into parts, each part of graph will be sent to a specific platform
for further execution and quantization.
There are 3 default platform during dispatching:
quant_platform - all quantable parts of graph will be dispatched to this platform
SOI_platform - Aka. Shape or Index related operations will be dispatched to this platform.
fp32_platform - there are some operations receiving results from both quant_platform and SOI_platform,
they will be dispatched to fp32_platform.
ATTENTION: Quantization follows this dispatching,
and only the operations within quantable platform will be quantized in the future.
ATTENTION: this dispatcher will insert necessary DeviceSwitch operations between
shape-or-index operations and others.
Args:
graph (BaseGraph): graph object which going to be dispatched by this dispatcher.
quant_platform (TargetPlatform):
platform object where quantable parts will goes to.
fp32_platform (TargetPlatform):
platform object where SOI parts will goes to.
SOI_platform (TargetPlatform):
platform object where remaining parts will goes to.
Returns:
Dict[str, TargetPlatform]: [description]
"""
quantable_types = {
'Conv', 'ConvTranspose', 'Gemm', 'Relu', 'PRelu', 'Clip', 'Pad',
'Resize', 'MaxPool', 'AveragePool', 'GlobalMaxPool', 'GlobalAveragePool',
'Mul', 'Add', 'Max', 'Sub', 'Div', 'LeakyRelu', 'Split', 'Concat',
'Reshape', 'Transpose', 'Slice', 'Flatten', 'MatMul'}
recivers, generators = SOI_receivers(graph), SOI_generators(graph)
search_engine, SOI_opeartions = SearchableGraph(graph), set(recivers)
quant_operations = search_engine.opset_matching(
sp_expr = lambda x: x.is_computing_op,
rp_expr = value_tracing_pattern,
ep_expr = lambda x: (x.type not in quantable_types) or x.is_boundary,
direction = 'down')
quant_operations.filter(lambda x: x.type not in quantable_types)
computing_extensions = search_engine.opset_matching(
sp_expr = lambda x: x.is_computing_op,
rp_expr = value_tracing_pattern,
ep_expr = lambda x: x.type in {'Shape', 'TopK', 'NonMaxSuppression'} or x.is_boundary,
direction = 'down')
# we assume all 'Shape', 'NonMaxSuppression', 'ConstantOfShape', 'Topk' operations are SOI generators.
shape_forward_matching = search_engine.opset_matching(
sp_expr = lambda x: x in generators and x.type not in {'Constant'},
rp_expr = value_tracing_pattern,
ep_expr = lambda x: (x in recivers or
x in quant_operations or
x.is_boundary or
x.is_computing_op),
direction = 'down')
# remove computing operations and quant operations from matching
shape_forward_matching.filter(lambda x: x.is_computing_op or x in quant_operations)
# update matchings, ready for further searching.
SOI_opeartions.update(shape_forward_matching)
while True:
# there are some particular cases where a single matching can not handle.
# to cover all shape-related operations, a reverse matching is required.
shape_backward_matching = search_engine.opset_matching(
sp_expr = lambda x: x in SOI_opeartions and x.type != 'Shape',
rp_expr = reverse_tracing_pattern,
ep_expr = lambda x: (x in SOI_opeartions or
x in quant_operations or
x.is_boundary or
x.is_computing_op),
direction = 'up')
# remove computing operations and quant operations from matching
shape_backward_matching.filter(lambda x: x.is_computing_op or x in quant_operations)
if all([(op in SOI_opeartions) for op in shape_backward_matching]): break
# update matchings
SOI_opeartions.update(shape_backward_matching)
# generate dispatching table.
dispatching_table = {}
for operation in graph.operations.values():
if operation in SOI_opeartions and operation not in computing_extensions:
dispatching_table[operation.name] = SOI_platform
elif operation in quant_operations:
dispatching_table[operation.name] = quant_platform
else:
dispatching_table[operation.name] = fp32_platform
for operation in graph.operations.values():
# move Topk, Shape, NonMaxSuppression to the platform same as their input.
if operation.type in {'Shape', 'TopK', 'NonMaxSuppression'}:
source_op = operation.inputs[0].source_op
if source_op is not None:
dispatching_table[operation.name] = dispatching_table[source_op.name]
else: dispatching_table[operation.name] = fp32_platform
# move activations to the platform same as their input.
if operation.is_linear_activation:
source_op = operation.inputs[0].source_op
if source_op is not None:
dispatching_table[operation.name] = dispatching_table[source_op.name]
return dispatching_table
class PPLNNDispatcher(GraphDispatcher):
"""
Graph Dispatcher cuts a graph into parts, each part of graph will dispatch to a specific platform
for further execution and quantization.
For the most part, all operations within graph can be partitioned into quantable operations,
Shape-Or-Index related operations and remaining operations, all sub classes of GraphDispatcher will
give an implementation of function "dispath" to send all operations to their proper platform.
Conv only Dispatcher cuts graph in a conservative way, which means it takes as much as possible opeartions
into fp32 platform.
ATTENTION: platform attribute will greatly affect quantizer's quantization logic, and the execution result.
If operation is sent to a quantable platform, then its inputs and outputs will be quantized if necessary.
if operation is classified as shape-or-index related operation, then its execution will be taken with cpu.
if operation is sent to a fp32 platform, then its inputs and outputs shall never be quantized.
ATTENTION: this dispatcher will insert necessary DeviceSwitch operations
between shape-or-index operations and others.
"""
@staticmethod
def dispatch(
graph: BaseGraph,
quant_platform: TargetPlatform,
fp32_platform: TargetPlatform,
SOI_platform: TargetPlatform, **kwargs
) -> Dict[str, TargetPlatform]:
"""
Graph Dispatcher splits a graph into parts, each part of graph will be sent to a specific platform
for further execution and quantization.
There are 3 default platform during dispatching:
quant_platform - all quantable parts of graph will be dispatched to this platform
SOI_platform - Aka. Shape or Index related operations will be dispatched to this platform.
fp32_platform - there are some operations receiving results from both quant_platform and SOI_platform,
they will be dispatched to fp32_platform.
ATTENTION: Quantization follows this dispatching,
and only the operations within quantable platform will be quantized in the future.
ATTENTION: this dispatcher will insert necessary DeviceSwitch operations between
shape-or-index operations and others.
Args:
graph (BaseGraph): graph object which going to be dispatched by this dispatcher.
quant_platform (TargetPlatform):
platform object where quantable parts will goes to.
fp32_platform (TargetPlatform):
platform object where SOI parts will goes to.
SOI_platform (TargetPlatform):
platform object where remaining parts will goes to.
Returns:
Dict[str, TargetPlatform]: [description]
"""
quant_types = {
'Conv', 'Relu', 'PRelu', 'Clip',
'Resize', 'MaxPool', 'AveragePool', 'GlobalMaxPool', 'GlobalAveragePool',
'Mul', 'Add', 'LeakyRelu', 'Split', 'Concat',
'Transpose', 'Slice', 'Reshape', 'Flatten'}
recivers, generators = SOI_receivers(graph), SOI_generators(graph)
search_engine, SOI_opeartions = SearchableGraph(graph), set(recivers)
quant_operations = search_engine.opset_matching(
sp_expr = lambda x: x.type == 'Conv',
rp_expr = lambda x, y: value_tracing_pattern(x, y) and y.type in quant_types,
ep_expr = lambda x: x.type == 'Conv',
direction = 'down')
computing_extensions = search_engine.opset_matching(
sp_expr = lambda x: x.is_computing_op,
rp_expr = value_tracing_pattern,
ep_expr = lambda x: x.type in {'Shape', 'TopK', 'NonMaxSuppression'} or x.is_boundary,
direction = 'down')
# we assume all 'Shape', 'NonMaxSuppression', 'ConstantOfShape', 'Topk' operations are SOI generators.
shape_forward_matching = search_engine.opset_matching(
sp_expr = lambda x: x in generators and x.type not in {'Constant'},
rp_expr = value_tracing_pattern,
ep_expr = lambda x: (x in recivers or
x in quant_operations or
x.is_boundary or
x.is_computing_op),
direction = 'down')
# remove computing operations and quant operations from matching
shape_forward_matching.filter(lambda x: x.is_computing_op or x in quant_operations)
# update matchings, ready for further searching.
SOI_opeartions.update(shape_forward_matching)
while True:
# there are some particular cases where a single matching can not handle.
# to cover all shape-related operations, a reverse matching is required.
shape_backward_matching = search_engine.opset_matching(
sp_expr = lambda x: x in SOI_opeartions and x.type != 'Shape',
rp_expr = reverse_tracing_pattern,
ep_expr = lambda x: (x in SOI_opeartions or
x in quant_operations or
x.is_boundary or
x.is_computing_op),
direction = 'up')
# remove computing operations and quant operations from matching
shape_backward_matching.filter(lambda x: x.is_computing_op or x in quant_operations)
if all([(op in SOI_opeartions) for op in shape_backward_matching]): break
# update matchings
SOI_opeartions.update(shape_backward_matching)
# generate dispatching table.
dispatching_table = {}
for operation in graph.operations.values():
if operation in SOI_opeartions and operation not in computing_extensions:
dispatching_table[operation.name] = SOI_platform
elif operation in quant_operations:
dispatching_table[operation.name] = quant_platform
else:
dispatching_table[operation.name] = fp32_platform
for operation in graph.operations.values():
# move Topk, Shape, NonMaxSuppression to the platform same as their input.
if operation.type in {'Shape', 'TopK', 'NonMaxSuppression'}:
source_op = operation.inputs[0].source_op
if source_op is not None:
dispatching_table[operation.name] = dispatching_table[source_op.name]
else: dispatching_table[operation.name] = fp32_platform
# move activations to the platform same as their input.
if operation.is_linear_activation:
source_op = operation.inputs[0].source_op
if source_op is not None:
dispatching_table[operation.name] = dispatching_table[source_op.name]
return dispatching_table
| 49.870732
| 116
| 0.647723
| 2,351
| 20,447
| 5.497661
| 0.095279
| 0.039613
| 0.017331
| 0.020426
| 0.945841
| 0.94058
| 0.931605
| 0.918143
| 0.913965
| 0.910406
| 0
| 0.003887
| 0.295398
| 20,447
| 410
| 117
| 49.870732
| 0.893246
| 0.423583
| 0
| 0.819149
| 0
| 0
| 0.048609
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015957
| false
| 0
| 0.026596
| 0
| 0.074468
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
909abf01736a5d008c632daefffb869a867823f3
| 3,297
|
py
|
Python
|
tests/test_repl.py
|
rethinkdb-incubator/rethinkdb-python
|
537c47e77794d76f1832b1037f194f163c9b546e
|
[
"Apache-2.0"
] | null | null | null |
tests/test_repl.py
|
rethinkdb-incubator/rethinkdb-python
|
537c47e77794d76f1832b1037f194f163c9b546e
|
[
"Apache-2.0"
] | 30
|
2020-11-09T10:52:23.000Z
|
2022-03-25T01:14:31.000Z
|
tests/test_repl.py
|
rethinkdb-incubator/rethinkdb-python
|
537c47e77794d76f1832b1037f194f163c9b546e
|
[
"Apache-2.0"
] | null | null | null |
from unittest.mock import Mock, patch
from rethinkdb.repl import REPL_CONNECTION_ATTRIBUTE, Repl
@patch("rethinkdb.repl.threading")
def test_init(mock_threading):
"""
Test initialization of REPL object.
"""
repl = Repl()
assert repl.is_repl_active is False
assert repl.thread_data == mock_threading.local.return_value
@patch("rethinkdb.repl.threading")
def test_get_thread_data(mock_threading):
"""
Test getting thread data from the Repl object.
"""
local_thread_data = Mock()
delattr(local_thread_data, REPL_CONNECTION_ATTRIBUTE)
mock_threading.local.return_value = local_thread_data
repl = Repl()
connection = repl.get_connection()
assert connection is None
assert repl.is_repl_active is False
@patch("rethinkdb.repl.threading")
def test_get_existing_thread_data(mock_threading):
"""
Test getting existing thread data from the Repl object.
"""
expected_connection = Mock()
local_thread_data = Mock()
setattr(local_thread_data, REPL_CONNECTION_ATTRIBUTE, expected_connection)
mock_threading.local.return_value = local_thread_data
repl = Repl()
connection = repl.get_connection()
assert connection == expected_connection
@patch("rethinkdb.repl.threading")
def test_set_connection_on_thread(mock_threading):
"""
Test setting connection on thread when no previous connection was set.
"""
expected_connection = Mock()
local_thread_data = Mock()
delattr(local_thread_data, REPL_CONNECTION_ATTRIBUTE)
mock_threading.local.return_value = local_thread_data
repl = Repl()
repl.set_connection(expected_connection)
assert repl.get_connection() == expected_connection
assert repl.is_repl_active is True
@patch("rethinkdb.repl.threading")
def test_override_connection_on_thread(mock_threading):
"""
Test setting connection on thread when a previous connection was already set.
"""
original_connection = Mock()
expected_connection = Mock()
local_thread_data = Mock()
setattr(local_thread_data, REPL_CONNECTION_ATTRIBUTE, original_connection)
mock_threading.local.return_value = local_thread_data
repl = Repl()
repl.set_connection(expected_connection)
assert repl.get_connection() == expected_connection
assert repl.is_repl_active is True
@patch("rethinkdb.repl.threading")
def test_clear_thread_data(mock_threading):
"""
Test clearing the thread data.
"""
original_connection = Mock()
local_thread_data = Mock()
setattr(local_thread_data, REPL_CONNECTION_ATTRIBUTE, original_connection)
mock_threading.local.return_value = local_thread_data
repl = Repl()
repl.clear_connection()
connection = repl.get_connection()
assert connection is None
assert repl.is_repl_active is False
@patch("rethinkdb.repl.threading")
def test_clear_not_existing_thread_data(mock_threading):
"""
Test clearing the thread data.
"""
local_thread_data = Mock()
delattr(local_thread_data, REPL_CONNECTION_ATTRIBUTE)
mock_threading.local.return_value = local_thread_data
repl = Repl()
repl.clear_connection()
connection = repl.get_connection()
assert connection is None
assert repl.is_repl_active is False
| 24.604478
| 81
| 0.740673
| 409
| 3,297
| 5.650367
| 0.117359
| 0.116833
| 0.116833
| 0.098659
| 0.873648
| 0.861099
| 0.77196
| 0.728256
| 0.728256
| 0.686716
| 0
| 0
| 0.178344
| 3,297
| 133
| 82
| 24.789474
| 0.853082
| 0.105854
| 0
| 0.820896
| 0
| 0
| 0.059113
| 0.059113
| 0
| 0
| 0
| 0
| 0.19403
| 1
| 0.104478
| false
| 0
| 0.029851
| 0
| 0.134328
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90a915895001116fec389d50ea45fd5826089526
| 1,202
|
py
|
Python
|
presidio-analyzer/tests/test_assertions.py
|
ammills01/presidio
|
8a73614a26b860bb6520413f5d36c3f602643fa7
|
[
"MIT"
] | null | null | null |
presidio-analyzer/tests/test_assertions.py
|
ammills01/presidio
|
8a73614a26b860bb6520413f5d36c3f602643fa7
|
[
"MIT"
] | 1
|
2018-07-31T08:26:23.000Z
|
2018-07-31T08:26:23.000Z
|
presidio-analyzer/tests/test_assertions.py
|
ammills01/presidio
|
8a73614a26b860bb6520413f5d36c3f602643fa7
|
[
"MIT"
] | null | null | null |
from presidio_analyzer import RecognizerResult
from tests import assert_result_within_score_range
import pytest
ENTITY_TYPE = 'ANY_ENTITY'
def test_assert_result_within_score_range_uses_given_range():
result = RecognizerResult(ENTITY_TYPE, 0, 10, 0.3)
assert_result_within_score_range(result, ENTITY_TYPE, 0, 10, 0.2, 0.4)
result = RecognizerResult(ENTITY_TYPE, 0, 10, 0.1)
assert_result_within_score_range(result, ENTITY_TYPE, 0, 10, 0.05, 0.15)
result = RecognizerResult(ENTITY_TYPE, 0, 10, 0.9)
assert_result_within_score_range(result, ENTITY_TYPE, 0, 10, 0.89, 0.91)
def test_assert_result_within_score_range_uses_given_range_fails():
with pytest.raises(AssertionError):
result = RecognizerResult(ENTITY_TYPE, 0, 10, 0.3)
assert_result_within_score_range(result, ENTITY_TYPE, 0, 10, 0.4, 0.6)
with pytest.raises(AssertionError):
result = RecognizerResult(ENTITY_TYPE, 0, 10, 0)
assert_result_within_score_range(result, ENTITY_TYPE, 0, 10, 0.4, 0.6)
with pytest.raises(AssertionError):
result = RecognizerResult(ENTITY_TYPE, 0, 10, 1)
assert_result_within_score_range(result, ENTITY_TYPE, 0, 10, 0, 0.5)
| 36.424242
| 78
| 0.747088
| 181
| 1,202
| 4.629834
| 0.198895
| 0.155131
| 0.157518
| 0.186158
| 0.869928
| 0.836516
| 0.836516
| 0.750597
| 0.750597
| 0.750597
| 0
| 0.072134
| 0.15807
| 1,202
| 32
| 79
| 37.5625
| 0.755929
| 0
| 0
| 0.333333
| 0
| 0
| 0.008333
| 0
| 0
| 0
| 0
| 0
| 0.571429
| 1
| 0.095238
| false
| 0
| 0.142857
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
90dc5ed559654aa9e6d74f4737b48c9036257558
| 212
|
py
|
Python
|
inheritance/lab/project_multiple_inheritance/teacher.py
|
ivan-yosifov88/python_oop
|
82b210e427cb80dbab3b9a5c3fceab431ee60164
|
[
"MIT"
] | 1
|
2021-05-21T20:28:55.000Z
|
2021-05-21T20:28:55.000Z
|
inheritance/lab/project_multiple_inheritance/teacher.py
|
ivan-yosifov88/python_oop
|
82b210e427cb80dbab3b9a5c3fceab431ee60164
|
[
"MIT"
] | null | null | null |
inheritance/lab/project_multiple_inheritance/teacher.py
|
ivan-yosifov88/python_oop
|
82b210e427cb80dbab3b9a5c3fceab431ee60164
|
[
"MIT"
] | null | null | null |
from project_multiple_inheritance.employee import Employee
from project_multiple_inheritance.person import Person
class Teacher(Person, Employee):
@staticmethod
def teach():
return "teaching..."
| 26.5
| 58
| 0.773585
| 23
| 212
| 6.956522
| 0.608696
| 0.1375
| 0.2375
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15566
| 212
| 8
| 59
| 26.5
| 0.893855
| 0
| 0
| 0
| 0
| 0
| 0.051643
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.333333
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
293151cf6aa353367e44f9ab3dc3038983bd12d6
| 12
|
py
|
Python
|
nboost/__version__.py
|
ArpitKotecha/nboost
|
2edbc0f0ca00185633ed0b584db156014ddf0ea3
|
[
"Apache-2.0"
] | null | null | null |
nboost/__version__.py
|
ArpitKotecha/nboost
|
2edbc0f0ca00185633ed0b584db156014ddf0ea3
|
[
"Apache-2.0"
] | null | null | null |
nboost/__version__.py
|
ArpitKotecha/nboost
|
2edbc0f0ca00185633ed0b584db156014ddf0ea3
|
[
"Apache-2.0"
] | null | null | null |
"""0.0.7"""
| 6
| 11
| 0.25
| 3
| 12
| 1
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0.083333
| 12
| 1
| 12
| 12
| 0
| 0.416667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
296c03b0a615f2203536288d1312d1a715414001
| 107
|
py
|
Python
|
Beginner_Day_1_14/Day1/Day_1.3_Input_Functions/main.py
|
fredtheninja/100-Days-of-Code
|
d66b156d307c14d3b9d93559030d58294cc736bb
|
[
"MIT"
] | null | null | null |
Beginner_Day_1_14/Day1/Day_1.3_Input_Functions/main.py
|
fredtheninja/100-Days-of-Code
|
d66b156d307c14d3b9d93559030d58294cc736bb
|
[
"MIT"
] | null | null | null |
Beginner_Day_1_14/Day1/Day_1.3_Input_Functions/main.py
|
fredtheninja/100-Days-of-Code
|
d66b156d307c14d3b9d93559030d58294cc736bb
|
[
"MIT"
] | null | null | null |
#print("What is your name?")
#input("What is your name?")
print("Hello " + input("What is your name?"))
| 15.285714
| 45
| 0.626168
| 17
| 107
| 3.941176
| 0.411765
| 0.268657
| 0.447761
| 0.626866
| 0.567164
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168224
| 107
| 6
| 46
| 17.833333
| 0.752809
| 0.504673
| 0
| 0
| 0
| 0
| 0.48
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
29814acce956c61d9db4b17bae95880687b6430f
| 11,291
|
py
|
Python
|
tests/date/test_diff.py
|
seandstewart/pendulum
|
daa4b936daf3f4dfa7d211aa0ac1e9d82d5401d4
|
[
"MIT"
] | 5,049
|
2016-07-04T07:16:34.000Z
|
2022-03-31T07:41:48.000Z
|
tests/date/test_diff.py
|
seandstewart/pendulum
|
daa4b936daf3f4dfa7d211aa0ac1e9d82d5401d4
|
[
"MIT"
] | 536
|
2016-07-05T22:46:29.000Z
|
2022-03-22T12:41:54.000Z
|
tests/date/test_diff.py
|
seandstewart/pendulum
|
daa4b936daf3f4dfa7d211aa0ac1e9d82d5401d4
|
[
"MIT"
] | 373
|
2016-07-05T19:51:51.000Z
|
2022-03-23T16:57:46.000Z
|
from datetime import date
import pendulum
import pytest
@pytest.fixture
def today():
return pendulum.today().date()
def test_diff_in_years_positive():
dt = pendulum.date(2000, 1, 1)
assert 1 == dt.diff(dt.add(years=1)).in_years()
def test_diff_in_years_negative_with_sign():
dt = pendulum.date(2000, 1, 1)
assert -1 == dt.diff(dt.subtract(years=1), False).in_years()
def test_diff_in_years_negative_no_sign():
dt = pendulum.date(2000, 1, 1)
assert 1 == dt.diff(dt.subtract(years=1)).in_years()
def test_diff_in_years_vs_default_now(today):
assert 1 == today.subtract(years=1).diff().in_years()
def test_diff_in_years_ensure_is_truncated():
dt = pendulum.date(2000, 1, 1)
assert 1 == dt.diff(dt.add(years=1).add(months=7)).in_years()
def test_diff_in_months_positive():
dt = pendulum.date(2000, 1, 1)
assert 13 == dt.diff(dt.add(years=1).add(months=1)).in_months()
def test_diff_in_months_negative_with_sign():
dt = pendulum.date(2000, 1, 1)
assert -11 == dt.diff(dt.subtract(years=1).add(months=1), False).in_months()
def test_diff_in_months_negative_no_sign():
dt = pendulum.date(2000, 1, 1)
assert 11 == dt.diff(dt.subtract(years=1).add(months=1)).in_months()
def test_diff_in_months_vs_default_now(today):
assert 12 == today.subtract(years=1).diff().in_months()
def test_diff_in_months_ensure_is_truncated():
dt = pendulum.date(2000, 1, 1)
assert 1 == dt.diff(dt.add(months=1).add(days=16)).in_months()
def test_diff_in_days_positive():
dt = pendulum.date(2000, 1, 1)
assert 366 == dt.diff(dt.add(years=1)).in_days()
def test_diff_in_days_negative_with_sign():
dt = pendulum.date(2000, 1, 1)
assert -365 == dt.diff(dt.subtract(years=1), False).in_days()
def test_diff_in_days_negative_no_sign():
dt = pendulum.date(2000, 1, 1)
assert 365 == dt.diff(dt.subtract(years=1)).in_days()
def test_diff_in_days_vs_default_now(today):
assert 7 == today.subtract(weeks=1).diff().in_days()
def test_diff_in_weeks_positive():
dt = pendulum.date(2000, 1, 1)
assert 52 == dt.diff(dt.add(years=1)).in_weeks()
def test_diff_in_weeks_negative_with_sign():
dt = pendulum.date(2000, 1, 1)
assert -52 == dt.diff(dt.subtract(years=1), False).in_weeks()
def test_diff_in_weeks_negative_no_sign():
dt = pendulum.date(2000, 1, 1)
assert 52 == dt.diff(dt.subtract(years=1)).in_weeks()
def test_diff_in_weeks_vs_default_now(today):
assert 1 == today.subtract(weeks=1).diff().in_weeks()
def test_diff_in_weeks_ensure_is_truncated():
dt = pendulum.date(2000, 1, 1)
assert 0 == dt.diff(dt.add(weeks=1).subtract(days=1)).in_weeks()
def test_diff_for_humans_now_and_day(today):
assert "1 day ago" == today.subtract(days=1).diff_for_humans()
def test_diff_for_humans_now_and_days(today):
assert "2 days ago" == today.subtract(days=2).diff_for_humans()
def test_diff_for_humans_now_and_nearly_week(today):
assert "6 days ago" == today.subtract(days=6).diff_for_humans()
def test_diff_for_humans_now_and_week(today):
assert "1 week ago" == today.subtract(weeks=1).diff_for_humans()
def test_diff_for_humans_now_and_weeks(today):
assert "2 weeks ago" == today.subtract(weeks=2).diff_for_humans()
def test_diff_for_humans_now_and_nearly_month(today):
assert "3 weeks ago" == today.subtract(weeks=3).diff_for_humans()
def test_diff_for_humans_now_and_month():
with pendulum.test(pendulum.datetime(2016, 3, 1)):
today = pendulum.today().date()
assert "4 weeks ago" == today.subtract(weeks=4).diff_for_humans()
assert "1 month ago" == today.subtract(months=1).diff_for_humans()
with pendulum.test(pendulum.datetime(2017, 2, 28)):
today = pendulum.today().date()
assert "1 month ago" == today.subtract(weeks=4).diff_for_humans()
def test_diff_for_humans_now_and_months(today):
assert "2 months ago" == today.subtract(months=2).diff_for_humans()
def test_diff_for_humans_now_and_nearly_year(today):
assert "11 months ago" == today.subtract(months=11).diff_for_humans()
def test_diff_for_humans_now_and_year(today):
assert "1 year ago" == today.subtract(years=1).diff_for_humans()
def test_diff_for_humans_now_and_years(today):
assert "2 years ago" == today.subtract(years=2).diff_for_humans()
def test_diff_for_humans_now_and_future_day(today):
assert "in 1 day" == today.add(days=1).diff_for_humans()
def test_diff_for_humans_now_and_future_days(today):
assert "in 2 days" == today.add(days=2).diff_for_humans()
def test_diff_for_humans_now_and_nearly_future_week(today):
assert "in 6 days" == today.add(days=6).diff_for_humans()
def test_diff_for_humans_now_and_future_week(today):
assert "in 1 week" == today.add(weeks=1).diff_for_humans()
def test_diff_for_humans_now_and_future_weeks(today):
assert "in 2 weeks" == today.add(weeks=2).diff_for_humans()
def test_diff_for_humans_now_and_nearly_future_month(today):
assert "in 3 weeks" == today.add(weeks=3).diff_for_humans()
def test_diff_for_humans_now_and_future_month():
with pendulum.test(pendulum.datetime(2016, 3, 1)):
today = pendulum.today().date()
assert "in 4 weeks" == today.add(weeks=4).diff_for_humans()
assert "in 1 month" == today.add(months=1).diff_for_humans()
with pendulum.test(pendulum.datetime(2017, 3, 31)):
today = pendulum.today().date()
assert "in 1 month" == today.add(months=1).diff_for_humans()
with pendulum.test(pendulum.datetime(2017, 4, 30)):
today = pendulum.today().date()
assert "in 1 month" == today.add(months=1).diff_for_humans()
with pendulum.test(pendulum.datetime(2017, 1, 31)):
today = pendulum.today().date()
assert "in 1 month" == today.add(weeks=4).diff_for_humans()
def test_diff_for_humans_now_and_future_months(today):
assert "in 2 months" == today.add(months=2).diff_for_humans()
def test_diff_for_humans_now_and_nearly_future_year(today):
assert "in 11 months" == today.add(months=11).diff_for_humans()
def test_diff_for_humans_now_and_future_year(today):
assert "in 1 year" == today.add(years=1).diff_for_humans()
def test_diff_for_humans_now_and_future_years(today):
assert "in 2 years" == today.add(years=2).diff_for_humans()
def test_diff_for_humans_other_and_day(today):
assert "1 day before" == today.diff_for_humans(today.add(days=1))
def test_diff_for_humans_other_and_days(today):
assert "2 days before" == today.diff_for_humans(today.add(days=2))
def test_diff_for_humans_other_and_nearly_week(today):
assert "6 days before" == today.diff_for_humans(today.add(days=6))
def test_diff_for_humans_other_and_week(today):
assert "1 week before" == today.diff_for_humans(today.add(weeks=1))
def test_diff_for_humans_other_and_weeks(today):
assert "2 weeks before" == today.diff_for_humans(today.add(weeks=2))
def test_diff_for_humans_other_and_nearly_month(today):
assert "3 weeks before" == today.diff_for_humans(today.add(weeks=3))
def test_diff_for_humans_other_and_month():
with pendulum.test(pendulum.datetime(2016, 3, 1)):
today = pendulum.today().date()
assert "4 weeks before" == today.diff_for_humans(today.add(weeks=4))
assert "1 month before" == today.diff_for_humans(today.add(months=1))
with pendulum.test(pendulum.datetime(2017, 3, 31)):
today = pendulum.today().date()
assert "1 month before" == today.diff_for_humans(today.add(months=1))
with pendulum.test(pendulum.datetime(2017, 4, 30)):
today = pendulum.today().date()
assert "1 month before" == today.diff_for_humans(today.add(months=1))
with pendulum.test(pendulum.datetime(2017, 1, 31)):
today = pendulum.today().date()
assert "1 month before" == today.diff_for_humans(today.add(weeks=4))
def test_diff_for_humans_other_and_months(today):
assert "2 months before" == today.diff_for_humans(today.add(months=2))
def test_diff_for_humans_other_and_nearly_year(today):
assert "11 months before" == today.diff_for_humans(today.add(months=11))
def test_diff_for_humans_other_and_year(today):
assert "1 year before" == today.diff_for_humans(today.add(years=1))
def test_diff_for_humans_other_and_years(today):
assert "2 years before" == today.diff_for_humans(today.add(years=2))
def test_diff_for_humans_other_and_future_day(today):
assert "1 day after" == today.diff_for_humans(today.subtract(days=1))
def test_diff_for_humans_other_and_future_days(today):
assert "2 days after" == today.diff_for_humans(today.subtract(days=2))
def test_diff_for_humans_other_and_nearly_future_week(today):
assert "6 days after" == today.diff_for_humans(today.subtract(days=6))
def test_diff_for_humans_other_and_future_week(today):
assert "1 week after" == today.diff_for_humans(today.subtract(weeks=1))
def test_diff_for_humans_other_and_future_weeks(today):
assert "2 weeks after" == today.diff_for_humans(today.subtract(weeks=2))
def test_diff_for_humans_other_and_nearly_future_month(today):
assert "3 weeks after" == today.diff_for_humans(today.subtract(weeks=3))
def test_diff_for_humans_other_and_future_month():
with pendulum.test(pendulum.datetime(2016, 3, 1)):
today = pendulum.today().date()
assert "4 weeks after" == today.diff_for_humans(today.subtract(weeks=4))
assert "1 month after" == today.diff_for_humans(today.subtract(months=1))
with pendulum.test(pendulum.datetime(2017, 2, 28)):
today = pendulum.today().date()
assert "1 month after" == today.diff_for_humans(today.subtract(weeks=4))
def test_diff_for_humans_other_and_future_months(today):
assert "2 months after" == today.diff_for_humans(today.subtract(months=2))
def test_diff_for_humans_other_and_nearly_future_year(today):
assert "11 months after" == today.diff_for_humans(today.subtract(months=11))
def test_diff_for_humans_other_and_future_year(today):
assert "1 year after" == today.diff_for_humans(today.subtract(years=1))
def test_diff_for_humans_other_and_future_years(today):
assert "2 years after" == today.diff_for_humans(today.subtract(years=2))
def test_diff_for_humans_absolute_days(today):
assert "2 days" == today.diff_for_humans(today.subtract(days=2), True)
assert "2 days" == today.diff_for_humans(today.add(days=2), True)
def test_diff_for_humans_absolute_weeks(today):
assert "2 weeks" == today.diff_for_humans(today.subtract(weeks=2), True)
assert "2 weeks" == today.diff_for_humans(today.add(weeks=2), True)
def test_diff_for_humans_absolute_months(today):
assert "2 months" == today.diff_for_humans(today.subtract(months=2), True)
assert "2 months" == today.diff_for_humans(today.add(months=2), True)
def test_diff_for_humans_absolute_years(today):
assert "1 year" == today.diff_for_humans(today.subtract(years=1), True)
assert "1 year" == today.diff_for_humans(today.add(years=1), True)
def test_subtraction():
d = pendulum.date(2016, 7, 5)
future_dt = date(2016, 7, 6)
future = d.add(days=1)
assert 86400 == (future - d).total_seconds()
assert 86400 == (future_dt - d).total_seconds()
| 31.104683
| 81
| 0.726951
| 1,837
| 11,291
| 4.1546
| 0.040283
| 0.102725
| 0.190776
| 0.08805
| 0.933569
| 0.870414
| 0.799528
| 0.761792
| 0.610849
| 0.454403
| 0
| 0.03986
| 0.140112
| 11,291
| 362
| 82
| 31.190608
| 0.746215
| 0
| 0
| 0.223881
| 0
| 0
| 0.063236
| 0
| 0
| 0
| 0
| 0
| 0.422886
| 1
| 0.343284
| false
| 0
| 0.014925
| 0.004975
| 0.363184
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2986400385edc8701f9de50a0aad2d81f550332d
| 137
|
wsgi
|
Python
|
alerta/app.wsgi
|
sepich/alerta
|
64d4e9f7bca6e41bda2d0553c846fbe68b494ab2
|
[
"Apache-2.0"
] | 1
|
2019-08-15T02:37:21.000Z
|
2019-08-15T02:37:21.000Z
|
alerta/app.wsgi
|
sepich/alerta
|
64d4e9f7bca6e41bda2d0553c846fbe68b494ab2
|
[
"Apache-2.0"
] | null | null | null |
alerta/app.wsgi
|
sepich/alerta
|
64d4e9f7bca6e41bda2d0553c846fbe68b494ab2
|
[
"Apache-2.0"
] | 1
|
2021-03-11T18:19:22.000Z
|
2021-03-11T18:19:22.000Z
|
#!/usr/bin/env python
try:
from alerta import app # alerta >= 5.0
except Exception:
from alerta.app import app # alerta < 5.0
| 19.571429
| 46
| 0.656934
| 22
| 137
| 4.090909
| 0.590909
| 0.222222
| 0.333333
| 0.355556
| 0.377778
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038095
| 0.233577
| 137
| 6
| 47
| 22.833333
| 0.819048
| 0.343066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
d3d99e62d931c2c9a1d65851276326642389e256
| 1,721
|
py
|
Python
|
classification/models/shake_shake.py
|
marketler/GFW_Vessel_Classification
|
fb2ada9aeebe2582b42e940db86674fd4da6fb07
|
[
"Apache-2.0"
] | null | null | null |
classification/models/shake_shake.py
|
marketler/GFW_Vessel_Classification
|
fb2ada9aeebe2582b42e940db86674fd4da6fb07
|
[
"Apache-2.0"
] | null | null | null |
classification/models/shake_shake.py
|
marketler/GFW_Vessel_Classification
|
fb2ada9aeebe2582b42e940db86674fd4da6fb07
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
def shake_shake(x1, x2, is_training):
is_training = tf.constant(is_training, dtype=tf.bool)
# create alpha and beta
batch_size = tf.shape(x1)[0]
# TODO: modifed for 1d, make more general or rename
alpha = tf.random_uniform((batch_size, 1, 1))
beta = tf.random_uniform((batch_size, 1, 1))
# shake-shake during training phase
def x_shake():
return beta * x1 + (1 - beta) * x2 + tf.stop_gradient((alpha - beta) * x1 + (beta - alpha) * x2)
# even-even during testing phase
def x_even():
return 0.5 * x1 + 0.5 * x2
return tf.cond(is_training, x_shake, x_even)
def shake_out(x, is_training):
is_training = tf.constant(is_training, dtype=tf.bool)
# create alpha and beta
batch_size = tf.shape(x)[0]
feature_depth = tf.shape(x)[2] # TODO: bulletproof
alpha = tf.random_uniform((batch_size, 1, feature_depth))
# shake-shake during training phase
def x_shake():
return alpha * x, (1 - alpha * x)
# even-even during testing phase
def x_even():
return 0.5 * x, 0.5 * x
return tf.cond(is_training, x_shake, x_even)
def shake_out2(x1, x2, is_training):
is_training = tf.constant(is_training, dtype=tf.bool)
# create alpha and beta
batch_size = tf.shape(x1)[0]
feature_depth = tf.shape(x1)[2] # TODO: bulletproof
# TODO: modifed for 1d, make more general or rename
alpha = tf.random_uniform((batch_size, 1, feature_depth))
# shake-shake during training phase
def x_shake():
return alpha * x1 + (1 - alpha) * x2
# even-even during testing phase
def x_even():
return 0.5 * x1 + 0.5 * x2
return tf.cond(is_training, x_shake, x_even)
| 35.854167
| 104
| 0.649041
| 272
| 1,721
| 3.948529
| 0.180147
| 0.111732
| 0.050279
| 0.074488
| 0.871508
| 0.835196
| 0.835196
| 0.810056
| 0.810056
| 0.769088
| 0
| 0.034901
| 0.234166
| 1,721
| 48
| 105
| 35.854167
| 0.77997
| 0.230099
| 0
| 0.580645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0
| 1
| 0.290323
| false
| 0
| 0.032258
| 0.193548
| 0.612903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
31059162bc4ab354ba80658d6d4d99ea3df53a6e
| 99,624
|
py
|
Python
|
tests/EVM/test_EVMXOR.py
|
mroll/manticore
|
d731562f7761ed9437cce406b24c815303de370c
|
[
"Apache-2.0"
] | null | null | null |
tests/EVM/test_EVMXOR.py
|
mroll/manticore
|
d731562f7761ed9437cce406b24c815303de370c
|
[
"Apache-2.0"
] | null | null | null |
tests/EVM/test_EVMXOR.py
|
mroll/manticore
|
d731562f7761ed9437cce406b24c815303de370c
|
[
"Apache-2.0"
] | null | null | null |
import struct
import unittest
import json
from manticore.platforms import evm
from manticore.core import state
from manticore.core.smtlib import Operators, ConstraintSet
import os
class EVMTest_XOR(unittest.TestCase):
_multiprocess_can_split_ = True
maxDiff=None
def _execute(self, new_vm):
last_returned = None
last_exception = None
try:
new_vm.execute()
except evm.Stop, e:
last_exception = "STOP"
except evm.NotEnoughGas:
last_exception = "OOG"
except evm.StackUnderflow:
last_exception = "INSUFICIENT STACK"
except evm.InvalidOpcode:
last_exception = "INVALID"
except evm.SelfDestruct:
last_exception = "SUICIDED"
except evm.Return as e:
last_exception = "RETURN"
last_returned = e.data
except evm.Revert:
last_exception = "REVERT"
return last_exception, last_returned
def test_XOR_1(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0L])
def test_XOR_2(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008687907853269984665640564039457584007913129639935L])
def test_XOR_3(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008687907853269984665640564039457584007913129639934L])
def test_XOR_4(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819983L])
def test_XOR_5(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [112173586448650064316584391727166410732855297644839296413224534507665844338672L])
def test_XOR_6(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008687907853269984665640564039457584007913129639919L])
def test_XOR_7(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008687907853269984665640564039457584007913129639903L])
def test_XOR_8(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008687907853269984665640564039457584007913129639887L])
def test_XOR_9(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
new_vm._push(6089590155545428825848686802984512581899718912L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008681818263114439236814715352654599495331229921023L])
def test_XOR_10(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(0)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008687907853269984665640564039457584007913129639935L])
def test_XOR_11(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(0)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_XOR_12(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(0)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_XOR_13(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(0)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819952L])
def test_XOR_14(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(0)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281521497120414687020801267626233049500247285301263L])
def test_XOR_15(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(0)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [16])
def test_XOR_16(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(0)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [32])
def test_XOR_17(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(0)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [48])
def test_XOR_18(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(0)
new_vm._push(6089590155545428825848686802984512581899718912L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718912L])
def test_XOR_19(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(1)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008687907853269984665640564039457584007913129639934L])
def test_XOR_20(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(1)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_XOR_21(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(1)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_XOR_22(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(1)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819953L])
def test_XOR_23(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(1)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281521497120414687020801267626233049500247285301262L])
def test_XOR_24(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(1)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [17])
def test_XOR_25(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(1)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [33])
def test_XOR_26(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(1)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [49])
def test_XOR_27(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(1)
new_vm._push(6089590155545428825848686802984512581899718912L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718913L])
def test_XOR_28(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819983L])
def test_XOR_29(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819952L])
def test_XOR_30(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819953L])
def test_XOR_31(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0L])
def test_XOR_32(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [54277541829991966604798899222822456806220305312019014393495742503709279518719L])
def test_XOR_33(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819936L])
def test_XOR_34(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819920L])
def test_XOR_35(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819904L])
def test_XOR_36(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
new_vm._push(6089590155545428825848686802984512581899718912L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504337864336479446903994433332925807491374665101040L])
def test_XOR_37(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [112173586448650064316584391727166410732855297644839296413224534507665844338672L])
def test_XOR_38(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281521497120414687020801267626233049500247285301263L])
def test_XOR_39(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281521497120414687020801267626233049500247285301262L])
def test_XOR_40(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [54277541829991966604798899222822456806220305312019014393495742503709279518719L])
def test_XOR_41(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0L])
def test_XOR_42(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281521497120414687020801267626233049500247285301279L])
def test_XOR_43(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281521497120414687020801267626233049500247285301295L])
def test_XOR_44(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281521497120414687020801267626233049500247285301311L])
def test_XOR_45(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
new_vm._push(6089590155545428825848686802984512581899718912L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281527586710570232449627116313036034012829185020175L])
def test_XOR_46(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(16)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008687907853269984665640564039457584007913129639919L])
def test_XOR_47(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(16)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [16])
def test_XOR_48(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(16)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [17])
def test_XOR_49(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(16)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819936L])
def test_XOR_50(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(16)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281521497120414687020801267626233049500247285301279L])
def test_XOR_51(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(16)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_XOR_52(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(16)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [48])
def test_XOR_53(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(16)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [32])
def test_XOR_54(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(16)
new_vm._push(6089590155545428825848686802984512581899718912L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718928L])
def test_XOR_55(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(32)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008687907853269984665640564039457584007913129639903L])
def test_XOR_56(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(32)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [32])
def test_XOR_57(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(32)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [33])
def test_XOR_58(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(32)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819920L])
def test_XOR_59(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(32)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281521497120414687020801267626233049500247285301295L])
def test_XOR_60(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(32)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [48])
def test_XOR_61(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(32)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_XOR_62(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(32)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [16])
def test_XOR_63(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(32)
new_vm._push(6089590155545428825848686802984512581899718912L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718944L])
def test_XOR_64(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(48)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008687907853269984665640564039457584007913129639887L])
def test_XOR_65(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(48)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [48])
def test_XOR_66(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(48)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [49])
def test_XOR_67(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(48)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504343953926634992332820282019728792003956564819904L])
def test_XOR_68(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(48)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281521497120414687020801267626233049500247285301311L])
def test_XOR_69(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(48)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [32])
def test_XOR_70(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(48)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [16])
def test_XOR_71(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(48)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_XOR_72(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(48)
new_vm._push(6089590155545428825848686802984512581899718912L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718960L])
def test_XOR_73(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(6089590155545428825848686802984512581899718912L)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [115792089237316195423570985008681818263114439236814715352654599495331229921023L])
def test_XOR_74(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(6089590155545428825848686802984512581899718912L)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718912L])
def test_XOR_75(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(6089590155545428825848686802984512581899718912L)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718913L])
def test_XOR_76(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(6089590155545428825848686802984512581899718912L)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [57896044618658097711785492504337864336479446903994433332925807491374665101040L])
def test_XOR_77(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(6089590155545428825848686802984512581899718912L)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [3618502788666131106986593281527586710570232449627116313036034012829185020175L])
def test_XOR_78(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(6089590155545428825848686802984512581899718912L)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718928L])
def test_XOR_79(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(6089590155545428825848686802984512581899718912L)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718944L])
def test_XOR_80(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(6089590155545428825848686802984512581899718912L)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718960L])
def test_XOR_81(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='\x18'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(6089590155545428825848686802984512581899718912L)
new_vm._push(6089590155545428825848686802984512581899718912L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0L])
if __name__ == '__main__':
unittest.main()
| 43.183355
| 143
| 0.578214
| 8,049
| 99,624
| 7.002236
| 0.024102
| 0.043292
| 0.025869
| 0.057487
| 0.985895
| 0.985895
| 0.985895
| 0.985895
| 0.985895
| 0.985895
| 0
| 0.264219
| 0.346302
| 99,624
| 2,306
| 144
| 43.202082
| 0.601222
| 0.043905
| 0
| 0.897152
| 0
| 0
| 0.064509
| 0.025556
| 0
| 0
| 0.069852
| 0
| 0.128165
| 0
| null | null | 0
| 0.003692
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
311f844b691e3585ff08c23e06a7ecee54ff22c5
| 42,044
|
py
|
Python
|
test_eval_set_selection.py
|
c60evaporator/seaborn-analyzer
|
af1088dffa7d4afb1061a9b3ed220c9fc0ed6a71
|
[
"BSD-3-Clause"
] | 38
|
2021-07-31T23:50:53.000Z
|
2022-03-26T01:50:32.000Z
|
test_eval_set_selection.py
|
c60evaporator/seaborn_analyzer
|
af1088dffa7d4afb1061a9b3ed220c9fc0ed6a71
|
[
"BSD-3-Clause"
] | 5
|
2021-02-06T10:31:40.000Z
|
2021-07-23T14:59:27.000Z
|
test_eval_set_selection.py
|
c60evaporator/seaborn-analyzer
|
af1088dffa7d4afb1061a9b3ed220c9fc0ed6a71
|
[
"BSD-3-Clause"
] | 3
|
2021-08-05T00:43:25.000Z
|
2021-11-19T08:47:20.000Z
|
# # %% eval_set_selection引数の動作確認(回帰)1
# from lightgbm import LGBMRegressor
# from muscle_tuning import LGBMRegressorTuning
# import pandas as pd
# from seaborn_analyzer import regplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# df_reg = pd.read_csv(f'./sample_data/osaka_metropolis_english.csv')
# OBJECTIVE_VARIABLE = 'approval_rate' # 目的変数
# USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # 説明変数
# y = df_reg[OBJECTIVE_VARIABLE].values
# X = df_reg[USE_EXPLANATORY].values
# tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
# fit_params={'eval_set': None}
# not_opt_params={'objective': 'regression', # 最小化させるべき損失関数
# 'random_state': 42, # 乱数シード
# 'boosting_type': 'gbdt', # boosting_type
# 'n_estimators': 100 # 最大学習サイクル数(評価指標がearly_stopping_rounds連続で改善しなければ打ち切り)
# }
# tuning.optuna_tuning(not_opt_params=not_opt_params, fit_params=fit_params)
# estimator = LGBMRegressor()
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# regplot.regression_pred_true(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# scores='mse',
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.average_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.regression_heat_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection,
# rounddigit_x1=3,
# rounddigit_x2=3,
# rounddigit_x3=3
# )
# # %% eval_set_selection引数の動作確認(回帰)2
# from lightgbm import LGBMRegressor
# from muscle_tuning import LGBMRegressorTuning
# import pandas as pd
# from seaborn_analyzer import regplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# df_reg = pd.read_csv(f'./sample_data/osaka_metropolis_english.csv')
# OBJECTIVE_VARIABLE = 'approval_rate' # 目的変数
# USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # 説明変数
# y = df_reg[OBJECTIVE_VARIABLE].values
# X = df_reg[USE_EXPLANATORY].values
# tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
# fit_params={'eval_set': None}
# not_opt_params={'objective': 'regression', # 最小化させるべき損失関数
# 'random_state': 42, # 乱数シード
# 'boosting_type': 'gbdt', # boosting_type
# 'n_estimators': 100 # 最大学習サイクル数(評価指標がearly_stopping_rounds連続で改善しなければ打ち切り)
# }
# estimator = Pipeline([("scaler", StandardScaler()), ("lgbm", LGBMRegressor())])
# tuning.optuna_tuning(estimator=estimator, not_opt_params=not_opt_params, fit_params=fit_params)
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# regplot.regression_pred_true(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# scores='mse',
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.average_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.regression_heat_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection,
# rounddigit_x1=3,
# rounddigit_x2=3,
# rounddigit_x3=3
# )
# # %% eval_set_selection引数の動作確認(回帰)3
# from lightgbm import LGBMRegressor
# from muscle_tuning import LGBMRegressorTuning
# import pandas as pd
# from seaborn_analyzer import regplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# df_reg = pd.read_csv(f'./sample_data/osaka_metropolis_english.csv')
# OBJECTIVE_VARIABLE = 'approval_rate' # 目的変数
# USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # 説明変数
# y = df_reg[OBJECTIVE_VARIABLE].values
# X = df_reg[USE_EXPLANATORY].values
# tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
# fit_params={'verbose': 0, # 学習中のコマンドライン出力
# 'early_stopping_rounds': 10, # 学習時、評価指標がこの回数連続で改善しなくなった時点でストップ
# 'eval_metric': 'rmse', # early_stopping_roundsの評価指標
# 'eval_set': [(X, y)]
# }
# estimator = Pipeline([("scaler", StandardScaler()), ("lgbm", LGBMRegressor())])
# tuning.optuna_tuning(estimator=estimator, fit_params=fit_params)
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# regplot.regression_pred_true(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# scores='mse',
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.average_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection='original_transformed'
# )
# regplot.regression_heat_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection='original_transformed',
# rounddigit_x1=3,
# rounddigit_x2=3,
# rounddigit_x3=3
# )
# # %% eval_set_selection引数の動作確認(回帰)4
# from lightgbm import LGBMRegressor
# from muscle_tuning import LGBMRegressorTuning
# import pandas as pd
# from seaborn_analyzer import regplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# df_reg = pd.read_csv(f'./sample_data/osaka_metropolis_english.csv')
# OBJECTIVE_VARIABLE = 'approval_rate' # 目的変数
# USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # 説明変数
# y = df_reg[OBJECTIVE_VARIABLE].values
# X = df_reg[USE_EXPLANATORY].values
# tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE, eval_set_selection='original')
# fit_params={'verbose': 0, # 学習中のコマンドライン出力
# 'early_stopping_rounds': 10, # 学習時、評価指標がこの回数連続で改善しなく なった時点でストップ
# 'eval_metric': 'rmse', # early_stopping_roundsの評価指標
# 'eval_set': [(X, y)]
# }
# estimator = Pipeline([("scaler", StandardScaler()), ("lgbm", LGBMRegressor())])
# tuning.optuna_tuning(estimator=estimator, fit_params=fit_params)
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# regplot.regression_pred_true(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# scores='mse',
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.average_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.regression_heat_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection,
# rounddigit_x1=3,
# rounddigit_x2=3,
# rounddigit_x3=3
# )
# # %% eval_set_selection引数の動作確認(回帰)5
# from lightgbm import LGBMRegressor
# from muscle_tuning import LGBMRegressorTuning
# import pandas as pd
# from seaborn_analyzer import regplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# df_reg = pd.read_csv(f'./sample_data/osaka_metropolis_english.csv')
# OBJECTIVE_VARIABLE = 'approval_rate' # 目的変数
# USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # 説明変数
# y = df_reg[OBJECTIVE_VARIABLE].values
# X = df_reg[USE_EXPLANATORY].values
# tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
# estimator = Pipeline([("scaler", StandardScaler()), ("lgbm", LGBMRegressor())])
# tuning.optuna_tuning(estimator=estimator)
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# regplot.regression_pred_true(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# scores='mse',
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.average_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.regression_heat_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection,
# rounddigit_x1=3,
# rounddigit_x2=3,
# rounddigit_x3=3
# )
# # %% eval_set_selection引数の動作確認(回帰)6
# from lightgbm import LGBMRegressor
# from muscle_tuning import LGBMRegressorTuning
# import pandas as pd
# from seaborn_analyzer import regplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# df_reg = pd.read_csv(f'./sample_data/osaka_metropolis_english.csv')
# OBJECTIVE_VARIABLE = 'approval_rate' # 目的変数
# USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # 説明変数
# y = df_reg[OBJECTIVE_VARIABLE].values
# X = df_reg[USE_EXPLANATORY].values
# tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE, eval_set_selection='all')
# estimator = Pipeline([("scaler", StandardScaler()), ("lgbm", LGBMRegressor())])
# tuning.optuna_tuning(estimator=estimator)
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# regplot.regression_pred_true(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# scores='mse',
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.average_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.regression_heat_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection,
# rounddigit_x1=3,
# rounddigit_x2=3,
# rounddigit_x3=3
# )
# # %% eval_set_selection引数の動作確認(回帰)7
# from lightgbm import LGBMRegressor
# from muscle_tuning import LGBMRegressorTuning
# import pandas as pd
# from seaborn_analyzer import regplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# df_reg = pd.read_csv(f'./sample_data/osaka_metropolis_english.csv')
# OBJECTIVE_VARIABLE = 'approval_rate' # 目的変数
# USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # 説明変数
# y = df_reg[OBJECTIVE_VARIABLE].values
# X = df_reg[USE_EXPLANATORY].values
# tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
# fit_params={'verbose': 0, # 学習中のコマンドライン出力
# 'early_stopping_rounds': 10, # 学習時、評価指標がこの回数連続で改善しなくなった時点でストップ
# 'eval_metric': 'rmse', # early_stopping_roundsの評価指標
# 'eval_set': [(X, y)]
# }
# tuning.optuna_tuning(fit_params=fit_params)
# estimator = LGBMRegressor()
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# regplot.regression_pred_true(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# scores='mse',
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.average_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.regression_heat_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection,
# rounddigit_x1=3,
# rounddigit_x2=3,
# rounddigit_x3=3
# )
# # %% eval_set_selection引数の動作確認(回帰)8
# from lightgbm import LGBMRegressor
# from muscle_tuning import LGBMRegressorTuning
# import pandas as pd
# from seaborn_analyzer import regplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# df_reg = pd.read_csv(f'./sample_data/osaka_metropolis_english.csv')
# OBJECTIVE_VARIABLE = 'approval_rate' # 目的変数
# USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # 説明変数
# y = df_reg[OBJECTIVE_VARIABLE].values
# X = df_reg[USE_EXPLANATORY].values
# tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
# tuning.optuna_tuning()
# estimator = LGBMRegressor()
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# regplot.regression_pred_true(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# scores='mse',
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.average_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.regression_heat_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection,
# rounddigit_x1=3,
# rounddigit_x2=3,
# rounddigit_x3=3
# )
# # %% eval_set_selection引数の動作確認(回帰)9
# from lightgbm import LGBMRegressor
# from muscle_tuning import LGBMRegressorTuning
# import pandas as pd
# from seaborn_analyzer import regplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# df_reg = pd.read_csv(f'./sample_data/osaka_metropolis_english.csv')
# OBJECTIVE_VARIABLE = 'approval_rate' # 目的変数
# USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # 説明変数
# y = df_reg[OBJECTIVE_VARIABLE].values
# X = df_reg[USE_EXPLANATORY].values
# tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE, eval_set_selection='train')
# tuning.optuna_tuning()
# estimator = LGBMRegressor()
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# regplot.regression_pred_true(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# scores='mse',
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.average_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# regplot.regression_heat_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=df_reg,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection,
# rounddigit_x1=3,
# rounddigit_x2=3,
# rounddigit_x3=3
# )
# # %% eval_set_selection引数の動作確認(分類)1
# import matplotlib.pyplot as plt
# import seaborn as sns
# from lightgbm import LGBMClassifier
# from muscle_tuning import LGBMClassifierTuning
# from seaborn_analyzer import classplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# iris = sns.load_dataset("iris")
# OBJECTIVE_VARIABLE = 'species' # 目的変数
# USE_EXPLANATORY = ['petal_width', 'petal_length', 'sepal_width', 'sepal_length'] # 説明変数
# y = iris[OBJECTIVE_VARIABLE].values
# X = iris[USE_EXPLANATORY].values
# tuning = LGBMClassifierTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
# fit_params={'eval_set': None}
# not_opt_params={'objective': None, # 最小化させるべき損失関数
# 'random_state': 42, # 乱数シード
# 'boosting_type': 'gbdt', # boosting_type
# 'n_estimators': 100 # 最大学習サイクル数(評価指標がearly_stopping_rounds連続で改善しなければ打ち切り)
# }
# tuning.optuna_tuning(not_opt_params=not_opt_params, fit_params=fit_params)
# estimator = LGBMClassifier()
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# classplot.class_separator_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# pair_sigmarange=0.5,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# plt.show()
# classplot.class_proba_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# pair_sigmarange=0.5,
# proba_type='imshow',
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# plt.show()
# classplot.roc_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# # %% eval_set_selection引数の動作確認(分類)2
# import matplotlib.pyplot as plt
# import seaborn as sns
# from lightgbm import LGBMClassifier
# from muscle_tuning import LGBMClassifierTuning
# from seaborn_analyzer import classplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# iris = sns.load_dataset("iris")
# OBJECTIVE_VARIABLE = 'species' # 目的変数
# USE_EXPLANATORY = ['petal_width', 'petal_length', 'sepal_width', 'sepal_length'] # 説明変数
# y = iris[OBJECTIVE_VARIABLE].values
# X = iris[USE_EXPLANATORY].values
# tuning = LGBMClassifierTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
# fit_params={'eval_set': None}
# not_opt_params={'objective': None, # 最小化させるべき損失関数
# 'random_state': 42, # 乱数シード
# 'boosting_type': 'gbdt', # boosting_type
# 'n_estimators': 100 # 最大学習サイクル数(評価指標がearly_stopping_rounds連続で改善しなければ打ち切り)
# }
# estimator = Pipeline([("scaler", StandardScaler()), ("lgbm", LGBMClassifier())])
# tuning.optuna_tuning(estimator=estimator, not_opt_params=not_opt_params, fit_params=fit_params)
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# classplot.class_separator_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# pair_sigmarange=0.5,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# plt.show()
# classplot.class_proba_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# pair_sigmarange=0.5,
# proba_type='imshow',
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# plt.show()
# classplot.roc_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# # %% eval_set_selection引数の動作確認(分類)3
# import matplotlib.pyplot as plt
# import seaborn as sns
# from lightgbm import LGBMClassifier
# from muscle_tuning import LGBMClassifierTuning
# from seaborn_analyzer import classplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# iris = sns.load_dataset("iris")
# OBJECTIVE_VARIABLE = 'species' # 目的変数
# USE_EXPLANATORY = ['petal_width', 'petal_length', 'sepal_width', 'sepal_length'] # 説明変数
# y = iris[OBJECTIVE_VARIABLE].values
# X = iris[USE_EXPLANATORY].values
# tuning = LGBMClassifierTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
# fit_params={'verbose': 0, # 学習中のコマンドライン出力
# 'early_stopping_rounds': 10, # 学習時、評価指標がこの回数連続で改善しなくなった時点でストップ
# 'eval_metric': 'multi_logloss', # early_stopping_roundsの評価指標
# 'eval_set': [(X, y)]
# }
# estimator = Pipeline([("scaler", StandardScaler()), ("lgbm", LGBMClassifier())])
# tuning.optuna_tuning(estimator=estimator, fit_params=fit_params)
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# classplot.class_separator_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# pair_sigmarange=0.5,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# plt.show()
# classplot.class_proba_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# pair_sigmarange=0.5,
# proba_type='imshow',
# fit_params=tuning.fit_params,
# eval_set_selection='original_transformed'
# )
# plt.show()
# classplot.roc_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection='original_transformed'
# )
# # %% eval_set_selection引数の動作確認(分類)4
# import matplotlib.pyplot as plt
# import seaborn as sns
# from lightgbm import LGBMClassifier
# from muscle_tuning import LGBMClassifierTuning
# from seaborn_analyzer import classplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# iris = sns.load_dataset("iris")
# OBJECTIVE_VARIABLE = 'species' # 目的変数
# USE_EXPLANATORY = ['petal_width', 'petal_length', 'sepal_width', 'sepal_length'] # 説明変数
# y = iris[OBJECTIVE_VARIABLE].values
# X = iris[USE_EXPLANATORY].values
# tuning = LGBMClassifierTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE, eval_set_selection='original')
# fit_params={'verbose': 0, # 学習中のコマンドライン出力
# 'early_stopping_rounds': 10, # 学習時、評価指標がこの回数連続で改善しなく なった時点でストップ
# 'eval_metric': 'multi_logloss', # early_stopping_roundsの評価指標
# 'eval_set': [(X, y)]
# }
# estimator = Pipeline([("scaler", StandardScaler()), ("lgbm", LGBMClassifier())])
# tuning.optuna_tuning(estimator=estimator, fit_params=fit_params)
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# classplot.class_separator_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# pair_sigmarange=0.5,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# plt.show()
# classplot.class_proba_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# pair_sigmarange=0.5,
# proba_type='imshow',
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# plt.show()
# classplot.roc_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# # %% eval_set_selection引数の動作確認(分類)5
# import matplotlib.pyplot as plt
# import seaborn as sns
# from lightgbm import LGBMClassifier
# from muscle_tuning import LGBMClassifierTuning
# from seaborn_analyzer import classplot
# from sklearn.pipeline import Pipeline
# from sklearn.preprocessing import StandardScaler
# iris = sns.load_dataset("iris")
# OBJECTIVE_VARIABLE = 'species' # 目的変数
# USE_EXPLANATORY = ['petal_width', 'petal_length', 'sepal_width', 'sepal_length'] # 説明変数
# y = iris[OBJECTIVE_VARIABLE].values
# X = iris[USE_EXPLANATORY].values
# tuning = LGBMClassifierTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
# estimator = Pipeline([("scaler", StandardScaler()), ("lgbm", LGBMClassifier())])
# tuning.optuna_tuning(estimator=estimator)
# params_after = {}
# params_after.update(tuning.best_params)
# params_after.update(tuning.not_opt_params)
# best_estimator = estimator.set_params(**params_after)
# classplot.class_separator_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# pair_sigmarange=0.5,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# plt.show()
# classplot.class_proba_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# pair_sigmarange=0.5,
# proba_type='imshow',
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# plt.show()
# classplot.roc_plot(best_estimator, x=USE_EXPLANATORY,
# y=OBJECTIVE_VARIABLE, data=iris,
# cv=tuning.cv,
# fit_params=tuning.fit_params,
# eval_set_selection=tuning.eval_set_selection
# )
# %% eval_set_selection引数の動作確認(分類)6
import matplotlib.pyplot as plt
import seaborn as sns
from lightgbm import LGBMClassifier
from muscle_tuning import LGBMClassifierTuning
from seaborn_analyzer import classplot
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
iris = sns.load_dataset("iris")
OBJECTIVE_VARIABLE = 'species' # 目的変数
USE_EXPLANATORY = ['petal_width', 'petal_length', 'sepal_width', 'sepal_length'] # 説明変数
y = iris[OBJECTIVE_VARIABLE].values
X = iris[USE_EXPLANATORY].values
tuning = LGBMClassifierTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE, eval_set_selection='all')
estimator = Pipeline([("scaler", StandardScaler()), ("lgbm", LGBMClassifier())])
tuning.optuna_tuning(estimator=estimator)
params_after = {}
params_after.update(tuning.best_params)
params_after.update(tuning.not_opt_params)
best_estimator = estimator.set_params(**params_after)
classplot.class_separator_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
pair_sigmarange=0.5,
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
plt.show()
classplot.class_proba_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
pair_sigmarange=0.5,
proba_type='imshow',
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
plt.show()
classplot.roc_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
# %% eval_set_selection引数の動作確認(分類)7
import matplotlib.pyplot as plt
import seaborn as sns
from lightgbm import LGBMClassifier
from muscle_tuning import LGBMClassifierTuning
from seaborn_analyzer import classplot
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
iris = sns.load_dataset("iris")
OBJECTIVE_VARIABLE = 'species' # 目的変数
USE_EXPLANATORY = ['petal_width', 'petal_length', 'sepal_width', 'sepal_length'] # 説明変数
y = iris[OBJECTIVE_VARIABLE].values
X = iris[USE_EXPLANATORY].values
tuning = LGBMClassifierTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
fit_params={'verbose': 0, # 学習中のコマンドライン出力
'early_stopping_rounds': 10, # 学習時、評価指標がこの回数連続で改善しなくなった時点でストップ
'eval_metric': 'multi_logloss', # early_stopping_roundsの評価指標
'eval_set': [(X, y)]
}
tuning.optuna_tuning(fit_params=fit_params)
estimator = LGBMClassifier()
params_after = {}
params_after.update(tuning.best_params)
params_after.update(tuning.not_opt_params)
best_estimator = estimator.set_params(**params_after)
classplot.class_separator_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
pair_sigmarange=0.5,
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
plt.show()
classplot.class_proba_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
pair_sigmarange=0.5,
proba_type='imshow',
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
plt.show()
classplot.roc_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
# %% eval_set_selection引数の動作確認(分類)8
import matplotlib.pyplot as plt
import seaborn as sns
from lightgbm import LGBMClassifier
from muscle_tuning import LGBMClassifierTuning
from seaborn_analyzer import classplot
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
iris = sns.load_dataset("iris")
OBJECTIVE_VARIABLE = 'species' # 目的変数
USE_EXPLANATORY = ['petal_width', 'petal_length', 'sepal_width', 'sepal_length'] # 説明変数
y = iris[OBJECTIVE_VARIABLE].values
X = iris[USE_EXPLANATORY].values
tuning = LGBMClassifierTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
tuning.optuna_tuning()
estimator = LGBMClassifier()
params_after = {}
params_after.update(tuning.best_params)
params_after.update(tuning.not_opt_params)
best_estimator = estimator.set_params(**params_after)
classplot.class_separator_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
pair_sigmarange=0.5,
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
plt.show()
classplot.class_proba_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
pair_sigmarange=0.5,
proba_type='imshow',
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
plt.show()
classplot.roc_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
# %% eval_set_selection引数の動作確認(分類)9
import matplotlib.pyplot as plt
import seaborn as sns
from lightgbm import LGBMClassifier
from muscle_tuning import LGBMClassifierTuning
from seaborn_analyzer import classplot
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
iris = sns.load_dataset("iris")
OBJECTIVE_VARIABLE = 'species' # 目的変数
USE_EXPLANATORY = ['petal_width', 'petal_length', 'sepal_width', 'sepal_length'] # 説明変数
y = iris[OBJECTIVE_VARIABLE].values
X = iris[USE_EXPLANATORY].values
tuning = LGBMClassifierTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE, eval_set_selection='train')
tuning.optuna_tuning()
estimator = LGBMClassifier()
params_after = {}
params_after.update(tuning.best_params)
params_after.update(tuning.not_opt_params)
best_estimator = estimator.set_params(**params_after)
classplot.class_separator_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
pair_sigmarange=0.5,
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
plt.show()
classplot.class_proba_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
pair_sigmarange=0.5,
proba_type='imshow',
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
plt.show()
classplot.roc_plot(best_estimator, x=USE_EXPLANATORY,
y=OBJECTIVE_VARIABLE, data=iris,
cv=tuning.cv,
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
# %% eval_set_selection引数の動作確認(分類)XGBoost
import matplotlib.pyplot as plt
import seaborn as sns
from lightgbm import LGBMClassifier
from xgboost import XGBClassifier
from muscle_tuning import LGBMClassifierTuning, XGBClassifierTuning
from seaborn_analyzer import classplot
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler, LabelEncoder
iris = sns.load_dataset("iris")
OBJECTIVE_VARIABLE = 'species' # 目的変数
USE_EXPLANATORY = ['petal_width', 'petal_length', 'sepal_width', 'sepal_length'] # 説明変数
y = iris[OBJECTIVE_VARIABLE].values
X = iris[USE_EXPLANATORY].values
tuning = XGBClassifierTuning(X, y, USE_EXPLANATORY, y_colname=OBJECTIVE_VARIABLE)
fit_params={'eval_set': None}
not_opt_params={'objective': None, # 最小化させるべき損失関数
'random_state': 42, # 乱数シード
'booster': 'gbtree', # ブースター
'n_estimators': 10000, # 最大学習サイクル数(評価指標がearly_stopping_rounds連続で改善しなければ打ち切り)
'use_label_encoder': False # UserWarning防止(The use of label encoder in XGBClassifier is deprecated)
}
tuning.optuna_tuning(not_opt_params=not_opt_params, fit_params=fit_params, n_trials=5)
estimator = XGBClassifier()
le = LabelEncoder()
le.fit(y)
iris['transformed'] = le.transform(y)
params_after = {}
params_after.update(tuning.best_params)
params_after.update(tuning.not_opt_params)
best_estimator = estimator.set_params(**params_after)
classplot.class_separator_plot(best_estimator, x=USE_EXPLANATORY,
y='transformed', data=iris,
cv=tuning.cv,
pair_sigmarange=0.5,
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
plt.show()
classplot.class_proba_plot(best_estimator, x=USE_EXPLANATORY,
y='transformed', data=iris,
cv=tuning.cv,
pair_sigmarange=0.5,
proba_type='imshow',
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
plt.show()
classplot.roc_plot(best_estimator, x=USE_EXPLANATORY,
y='transformed', data=iris,
cv=tuning.cv,
fit_params=tuning.fit_params,
eval_set_selection=tuning.eval_set_selection
)
| 45.403888
| 122
| 0.619137
| 4,412
| 42,044
| 5.580462
| 0.036945
| 0.053735
| 0.075383
| 0.040291
| 0.984119
| 0.982413
| 0.982413
| 0.982413
| 0.982413
| 0.982413
| 0
| 0.007374
| 0.287175
| 42,044
| 926
| 123
| 45.403888
| 0.814147
| 0.73121
| 0
| 0.818584
| 0
| 0
| 0.048056
| 0.001987
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.159292
| 0
| 0.159292
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
319138af56056e7bff714dee3190bfca6676f0bf
| 102
|
py
|
Python
|
zbexample/utils.py
|
rular099/testbind
|
00672033f846d5b73356375787ddbc69c5567a4b
|
[
"MIT"
] | null | null | null |
zbexample/utils.py
|
rular099/testbind
|
00672033f846d5b73356375787ddbc69c5567a4b
|
[
"MIT"
] | null | null | null |
zbexample/utils.py
|
rular099/testbind
|
00672033f846d5b73356375787ddbc69c5567a4b
|
[
"MIT"
] | null | null | null |
from scipy import signal
def detrend(x,*args,**kwargs):
return(signal.detrend(x,*args,**kwargs))
| 20.4
| 44
| 0.705882
| 15
| 102
| 4.8
| 0.666667
| 0.222222
| 0.333333
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 102
| 4
| 45
| 25.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 8
|
31da00d36c5d99c33f82dcf1ca8af9113b0fc66a
| 307,954
|
py
|
Python
|
mailchimp_marketing/api/ecommerce_api.py
|
michaelschem/mailchimp-marketing-python
|
c4685d7de567f853214e0244488b39f6bf3b654f
|
[
"Apache-2.0"
] | null | null | null |
mailchimp_marketing/api/ecommerce_api.py
|
michaelschem/mailchimp-marketing-python
|
c4685d7de567f853214e0244488b39f6bf3b654f
|
[
"Apache-2.0"
] | null | null | null |
mailchimp_marketing/api/ecommerce_api.py
|
michaelschem/mailchimp-marketing-python
|
c4685d7de567f853214e0244488b39f6bf3b654f
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Mailchimp Marketing API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 3.0.70
Contact: apihelp@mailchimp.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from mailchimp_marketing.api_client import ApiClient
class EcommerceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client):
self.api_client = api_client
def delete_store(self, store_id, **kwargs): # noqa: E501
"""Delete store # noqa: E501
Delete a store. Deleting a store will also delete any associated subresources, including Customers, Orders, Products, and Carts. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_store(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_store_with_http_info(store_id, **kwargs) # noqa: E501
else:
(data) = self.delete_store_with_http_info(store_id, **kwargs) # noqa: E501
return data
def delete_store_with_http_info(self, store_id, **kwargs): # noqa: E501
"""Delete store # noqa: E501
Delete a store. Deleting a store will also delete any associated subresources, including Customers, Orders, Products, and Carts. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_store_with_http_info(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_store" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_store_cart(self, store_id, cart_id, **kwargs): # noqa: E501
"""Delete cart # noqa: E501
Delete a cart. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_store_cart(store_id, cart_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_store_cart_with_http_info(store_id, cart_id, **kwargs) # noqa: E501
else:
(data) = self.delete_store_cart_with_http_info(store_id, cart_id, **kwargs) # noqa: E501
return data
def delete_store_cart_with_http_info(self, store_id, cart_id, **kwargs): # noqa: E501
"""Delete cart # noqa: E501
Delete a cart. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_store_cart_with_http_info(store_id, cart_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'cart_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_store_cart" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'cart_id' is set
if ('cart_id' not in params or
params['cart_id'] is None):
raise ValueError("Missing the required parameter `cart_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'cart_id' in params:
path_params['cart_id'] = params['cart_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/carts/{cart_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_cart_line_item(self, store_id, cart_id, line_id, **kwargs): # noqa: E501
"""Delete cart line item # noqa: E501
Delete a specific cart line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cart_line_item(store_id, cart_id, line_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param str line_id: The id for the line item of a cart. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_cart_line_item_with_http_info(store_id, cart_id, line_id, **kwargs) # noqa: E501
else:
(data) = self.delete_cart_line_item_with_http_info(store_id, cart_id, line_id, **kwargs) # noqa: E501
return data
def delete_cart_line_item_with_http_info(self, store_id, cart_id, line_id, **kwargs): # noqa: E501
"""Delete cart line item # noqa: E501
Delete a specific cart line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cart_line_item_with_http_info(store_id, cart_id, line_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param str line_id: The id for the line item of a cart. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'cart_id', 'line_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_cart_line_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'cart_id' is set
if ('cart_id' not in params or
params['cart_id'] is None):
raise ValueError("Missing the required parameter `cart_id` when calling ``") # noqa: E501
# verify the required parameter 'line_id' is set
if ('line_id' not in params or
params['line_id'] is None):
raise ValueError("Missing the required parameter `line_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'cart_id' in params:
path_params['cart_id'] = params['cart_id'] # noqa: E501
if 'line_id' in params:
path_params['line_id'] = params['line_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/carts/{cart_id}/lines/{line_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_store_customer(self, store_id, customer_id, **kwargs): # noqa: E501
"""Delete customer # noqa: E501
Delete a customer from a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_store_customer(store_id, customer_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str customer_id: The id for the customer of a store. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_store_customer_with_http_info(store_id, customer_id, **kwargs) # noqa: E501
else:
(data) = self.delete_store_customer_with_http_info(store_id, customer_id, **kwargs) # noqa: E501
return data
def delete_store_customer_with_http_info(self, store_id, customer_id, **kwargs): # noqa: E501
"""Delete customer # noqa: E501
Delete a customer from a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_store_customer_with_http_info(store_id, customer_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str customer_id: The id for the customer of a store. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'customer_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_store_customer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/customers/{customer_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_order(self, store_id, order_id, **kwargs): # noqa: E501
"""Delete order # noqa: E501
Delete an order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_order(store_id, order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_order_with_http_info(store_id, order_id, **kwargs) # noqa: E501
else:
(data) = self.delete_order_with_http_info(store_id, order_id, **kwargs) # noqa: E501
return data
def delete_order_with_http_info(self, store_id, order_id, **kwargs): # noqa: E501
"""Delete order # noqa: E501
Delete an order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_order_with_http_info(store_id, order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'order_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_order" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/orders/{order_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_order_line_item(self, store_id, order_id, line_id, **kwargs): # noqa: E501
"""Delete order line item # noqa: E501
Delete a specific order line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_order_line_item(store_id, order_id, line_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param str line_id: The id for the line item of an order. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_order_line_item_with_http_info(store_id, order_id, line_id, **kwargs) # noqa: E501
else:
(data) = self.delete_order_line_item_with_http_info(store_id, order_id, line_id, **kwargs) # noqa: E501
return data
def delete_order_line_item_with_http_info(self, store_id, order_id, line_id, **kwargs): # noqa: E501
"""Delete order line item # noqa: E501
Delete a specific order line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_order_line_item_with_http_info(store_id, order_id, line_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param str line_id: The id for the line item of an order. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'order_id', 'line_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_order_line_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling ``") # noqa: E501
# verify the required parameter 'line_id' is set
if ('line_id' not in params or
params['line_id'] is None):
raise ValueError("Missing the required parameter `line_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
if 'line_id' in params:
path_params['line_id'] = params['line_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/orders/{order_id}/lines/{line_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_store_product(self, store_id, product_id, **kwargs): # noqa: E501
"""Delete product # noqa: E501
Delete a product. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_store_product(store_id, product_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_store_product_with_http_info(store_id, product_id, **kwargs) # noqa: E501
else:
(data) = self.delete_store_product_with_http_info(store_id, product_id, **kwargs) # noqa: E501
return data
def delete_store_product_with_http_info(self, store_id, product_id, **kwargs): # noqa: E501
"""Delete product # noqa: E501
Delete a product. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_store_product_with_http_info(store_id, product_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_store_product" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_product_image(self, store_id, product_id, image_id, **kwargs): # noqa: E501
"""Delete product image # noqa: E501
Delete a product image. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_product_image(store_id, product_id, image_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str image_id: The id for the product image. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_product_image_with_http_info(store_id, product_id, image_id, **kwargs) # noqa: E501
else:
(data) = self.delete_product_image_with_http_info(store_id, product_id, image_id, **kwargs) # noqa: E501
return data
def delete_product_image_with_http_info(self, store_id, product_id, image_id, **kwargs): # noqa: E501
"""Delete product image # noqa: E501
Delete a product image. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_product_image_with_http_info(store_id, product_id, image_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str image_id: The id for the product image. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'image_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_product_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
# verify the required parameter 'image_id' is set
if ('image_id' not in params or
params['image_id'] is None):
raise ValueError("Missing the required parameter `image_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
if 'image_id' in params:
path_params['image_id'] = params['image_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/images/{image_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_product_variant(self, store_id, product_id, variant_id, **kwargs): # noqa: E501
"""Delete product variant # noqa: E501
Delete a product variant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_product_variant(store_id, product_id, variant_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str variant_id: The id for the product variant. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_product_variant_with_http_info(store_id, product_id, variant_id, **kwargs) # noqa: E501
else:
(data) = self.delete_product_variant_with_http_info(store_id, product_id, variant_id, **kwargs) # noqa: E501
return data
def delete_product_variant_with_http_info(self, store_id, product_id, variant_id, **kwargs): # noqa: E501
"""Delete product variant # noqa: E501
Delete a product variant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_product_variant_with_http_info(store_id, product_id, variant_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str variant_id: The id for the product variant. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'variant_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_product_variant" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
# verify the required parameter 'variant_id' is set
if ('variant_id' not in params or
params['variant_id'] is None):
raise ValueError("Missing the required parameter `variant_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
if 'variant_id' in params:
path_params['variant_id'] = params['variant_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/variants/{variant_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_promo_code(self, store_id, promo_rule_id, promo_code_id, **kwargs): # noqa: E501
"""Delete promo code # noqa: E501
Delete a promo code from a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_promo_code(store_id, promo_rule_id, promo_code_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param str promo_code_id: The id for the promo code of a store. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_promo_code_with_http_info(store_id, promo_rule_id, promo_code_id, **kwargs) # noqa: E501
else:
(data) = self.delete_promo_code_with_http_info(store_id, promo_rule_id, promo_code_id, **kwargs) # noqa: E501
return data
def delete_promo_code_with_http_info(self, store_id, promo_rule_id, promo_code_id, **kwargs): # noqa: E501
"""Delete promo code # noqa: E501
Delete a promo code from a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_promo_code_with_http_info(store_id, promo_rule_id, promo_code_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param str promo_code_id: The id for the promo code of a store. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'promo_rule_id', 'promo_code_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_promo_code" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'promo_rule_id' is set
if ('promo_rule_id' not in params or
params['promo_rule_id'] is None):
raise ValueError("Missing the required parameter `promo_rule_id` when calling ``") # noqa: E501
# verify the required parameter 'promo_code_id' is set
if ('promo_code_id' not in params or
params['promo_code_id'] is None):
raise ValueError("Missing the required parameter `promo_code_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'promo_rule_id' in params:
path_params['promo_rule_id'] = params['promo_rule_id'] # noqa: E501
if 'promo_code_id' in params:
path_params['promo_code_id'] = params['promo_code_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/promo-rules/{promo_rule_id}/promo-codes/{promo_code_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_promo_rule(self, store_id, promo_rule_id, **kwargs): # noqa: E501
"""Delete promo rule # noqa: E501
Delete a promo rule from a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_promo_rule(store_id, promo_rule_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_promo_rule_with_http_info(store_id, promo_rule_id, **kwargs) # noqa: E501
else:
(data) = self.delete_promo_rule_with_http_info(store_id, promo_rule_id, **kwargs) # noqa: E501
return data
def delete_promo_rule_with_http_info(self, store_id, promo_rule_id, **kwargs): # noqa: E501
"""Delete promo rule # noqa: E501
Delete a promo rule from a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_promo_rule_with_http_info(store_id, promo_rule_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'promo_rule_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_promo_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'promo_rule_id' is set
if ('promo_rule_id' not in params or
params['promo_rule_id'] is None):
raise ValueError("Missing the required parameter `promo_rule_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'promo_rule_id' in params:
path_params['promo_rule_id'] = params['promo_rule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/promo-rules/{promo_rule_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def orders(self, **kwargs): # noqa: E501
"""List account orders # noqa: E501
Get information about an account's orders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.orders(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str campaign_id: Restrict results to orders with a specific `campaign_id` value.
:param str outreach_id: Restrict results to orders with a specific `outreach_id` value.
:param str customer_id: Restrict results to orders made by a specific customer.
:param bool has_outreach: Restrict results to orders that have an outreach attached. For example, an email campaign or Facebook ad.
:return: Orders
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.orders_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.orders_with_http_info(**kwargs) # noqa: E501
return data
def orders_with_http_info(self, **kwargs): # noqa: E501
"""List account orders # noqa: E501
Get information about an account's orders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.orders_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str campaign_id: Restrict results to orders with a specific `campaign_id` value.
:param str outreach_id: Restrict results to orders with a specific `outreach_id` value.
:param str customer_id: Restrict results to orders made by a specific customer.
:param bool has_outreach: Restrict results to orders that have an outreach attached. For example, an email campaign or Facebook ad.
:return: Orders
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'exclude_fields', 'count', 'offset', 'campaign_id', 'outreach_id', 'customer_id', 'has_outreach'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method orders" % key
)
params[key] = val
del params['kwargs']
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'campaign_id' in params:
query_params.append(('campaign_id', params['campaign_id'])) # noqa: E501
if 'outreach_id' in params:
query_params.append(('outreach_id', params['outreach_id'])) # noqa: E501
if 'customer_id' in params:
query_params.append(('customer_id', params['customer_id'])) # noqa: E501
if 'has_outreach' in params:
query_params.append(('has_outreach', params['has_outreach'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/orders', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Orders', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def stores(self, **kwargs): # noqa: E501
"""List stores # noqa: E501
Get information about all stores in the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.stores(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: EcommerceStores
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.stores_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.stores_with_http_info(**kwargs) # noqa: E501
return data
def stores_with_http_info(self, **kwargs): # noqa: E501
"""List stores # noqa: E501
Get information about all stores in the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.stores_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: EcommerceStores
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method stores" % key
)
params[key] = val
del params['kwargs']
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceStores', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_store(self, store_id, **kwargs): # noqa: E501
"""Get store info # noqa: E501
Get information about a specific store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceStore
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_store_with_http_info(store_id, **kwargs) # noqa: E501
else:
(data) = self.get_store_with_http_info(store_id, **kwargs) # noqa: E501
return data
def get_store_with_http_info(self, store_id, **kwargs): # noqa: E501
"""Get store info # noqa: E501
Get information about a specific store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_with_http_info(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceStore
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_store" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceStore', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_store_carts(self, store_id, **kwargs): # noqa: E501
"""List carts # noqa: E501
Get information about a store's carts. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_carts(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: Carts
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_store_carts_with_http_info(store_id, **kwargs) # noqa: E501
else:
(data) = self.get_store_carts_with_http_info(store_id, **kwargs) # noqa: E501
return data
def get_store_carts_with_http_info(self, store_id, **kwargs): # noqa: E501
"""List carts # noqa: E501
Get information about a store's carts. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_carts_with_http_info(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: Carts
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_store_carts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/carts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Carts', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_store_cart(self, store_id, cart_id, **kwargs): # noqa: E501
"""Get cart info # noqa: E501
Get information about a specific cart. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_cart(store_id, cart_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceCart
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_store_cart_with_http_info(store_id, cart_id, **kwargs) # noqa: E501
else:
(data) = self.get_store_cart_with_http_info(store_id, cart_id, **kwargs) # noqa: E501
return data
def get_store_cart_with_http_info(self, store_id, cart_id, **kwargs): # noqa: E501
"""Get cart info # noqa: E501
Get information about a specific cart. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_cart_with_http_info(store_id, cart_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceCart
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'cart_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_store_cart" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'cart_id' is set
if ('cart_id' not in params or
params['cart_id'] is None):
raise ValueError("Missing the required parameter `cart_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'cart_id' in params:
path_params['cart_id'] = params['cart_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/carts/{cart_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceCart', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_cart_line_items(self, store_id, cart_id, **kwargs): # noqa: E501
"""List cart line items # noqa: E501
Get information about a cart's line items. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_cart_line_items(store_id, cart_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: CartLines
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_cart_line_items_with_http_info(store_id, cart_id, **kwargs) # noqa: E501
else:
(data) = self.get_all_cart_line_items_with_http_info(store_id, cart_id, **kwargs) # noqa: E501
return data
def get_all_cart_line_items_with_http_info(self, store_id, cart_id, **kwargs): # noqa: E501
"""List cart line items # noqa: E501
Get information about a cart's line items. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_cart_line_items_with_http_info(store_id, cart_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: CartLines
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'cart_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_cart_line_items" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'cart_id' is set
if ('cart_id' not in params or
params['cart_id'] is None):
raise ValueError("Missing the required parameter `cart_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'cart_id' in params:
path_params['cart_id'] = params['cart_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/carts/{cart_id}/lines', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CartLines', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cart_line_item(self, store_id, cart_id, line_id, **kwargs): # noqa: E501
"""Get cart line item # noqa: E501
Get information about a specific cart line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cart_line_item(store_id, cart_id, line_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param str line_id: The id for the line item of a cart. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceCartLineItem
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_cart_line_item_with_http_info(store_id, cart_id, line_id, **kwargs) # noqa: E501
else:
(data) = self.get_cart_line_item_with_http_info(store_id, cart_id, line_id, **kwargs) # noqa: E501
return data
def get_cart_line_item_with_http_info(self, store_id, cart_id, line_id, **kwargs): # noqa: E501
"""Get cart line item # noqa: E501
Get information about a specific cart line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cart_line_item_with_http_info(store_id, cart_id, line_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param str line_id: The id for the line item of a cart. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceCartLineItem
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'cart_id', 'line_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cart_line_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'cart_id' is set
if ('cart_id' not in params or
params['cart_id'] is None):
raise ValueError("Missing the required parameter `cart_id` when calling ``") # noqa: E501
# verify the required parameter 'line_id' is set
if ('line_id' not in params or
params['line_id'] is None):
raise ValueError("Missing the required parameter `line_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'cart_id' in params:
path_params['cart_id'] = params['cart_id'] # noqa: E501
if 'line_id' in params:
path_params['line_id'] = params['line_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/carts/{cart_id}/lines/{line_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceCartLineItem', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_store_customers(self, store_id, **kwargs): # noqa: E501
"""List customers # noqa: E501
Get information about a store's customers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_store_customers(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str email_address: Restrict the response to customers with the email address.
:return: Customers
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_store_customers_with_http_info(store_id, **kwargs) # noqa: E501
else:
(data) = self.get_all_store_customers_with_http_info(store_id, **kwargs) # noqa: E501
return data
def get_all_store_customers_with_http_info(self, store_id, **kwargs): # noqa: E501
"""List customers # noqa: E501
Get information about a store's customers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_store_customers_with_http_info(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str email_address: Restrict the response to customers with the email address.
:return: Customers
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'fields', 'exclude_fields', 'count', 'offset', 'email_address'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_store_customers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'email_address' in params:
query_params.append(('email_address', params['email_address'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/customers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Customers', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_store_customer(self, store_id, customer_id, **kwargs): # noqa: E501
"""Get customer info # noqa: E501
Get information about a specific customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_customer(store_id, customer_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str customer_id: The id for the customer of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceCustomer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_store_customer_with_http_info(store_id, customer_id, **kwargs) # noqa: E501
else:
(data) = self.get_store_customer_with_http_info(store_id, customer_id, **kwargs) # noqa: E501
return data
def get_store_customer_with_http_info(self, store_id, customer_id, **kwargs): # noqa: E501
"""Get customer info # noqa: E501
Get information about a specific customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_customer_with_http_info(store_id, customer_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str customer_id: The id for the customer of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceCustomer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'customer_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_store_customer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/customers/{customer_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceCustomer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_store_orders(self, store_id, **kwargs): # noqa: E501
"""List orders # noqa: E501
Get information about a store's orders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_orders(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str customer_id: Restrict results to orders made by a specific customer.
:param bool has_outreach: Restrict results to orders that have an outreach attached. For example, an email campaign or Facebook ad.
:param str campaign_id: Restrict results to orders with a specific `campaign_id` value.
:param str outreach_id: Restrict results to orders with a specific `outreach_id` value.
:return: Orders1
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_store_orders_with_http_info(store_id, **kwargs) # noqa: E501
else:
(data) = self.get_store_orders_with_http_info(store_id, **kwargs) # noqa: E501
return data
def get_store_orders_with_http_info(self, store_id, **kwargs): # noqa: E501
"""List orders # noqa: E501
Get information about a store's orders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_orders_with_http_info(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str customer_id: Restrict results to orders made by a specific customer.
:param bool has_outreach: Restrict results to orders that have an outreach attached. For example, an email campaign or Facebook ad.
:param str campaign_id: Restrict results to orders with a specific `campaign_id` value.
:param str outreach_id: Restrict results to orders with a specific `outreach_id` value.
:return: Orders1
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'fields', 'exclude_fields', 'count', 'offset', 'customer_id', 'has_outreach', 'campaign_id', 'outreach_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_store_orders" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'customer_id' in params:
query_params.append(('customer_id', params['customer_id'])) # noqa: E501
if 'has_outreach' in params:
query_params.append(('has_outreach', params['has_outreach'])) # noqa: E501
if 'campaign_id' in params:
query_params.append(('campaign_id', params['campaign_id'])) # noqa: E501
if 'outreach_id' in params:
query_params.append(('outreach_id', params['outreach_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/orders', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Orders1', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_order(self, store_id, order_id, **kwargs): # noqa: E501
"""Get order info # noqa: E501
Get information about a specific order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order(store_id, order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceOrder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_order_with_http_info(store_id, order_id, **kwargs) # noqa: E501
else:
(data) = self.get_order_with_http_info(store_id, order_id, **kwargs) # noqa: E501
return data
def get_order_with_http_info(self, store_id, order_id, **kwargs): # noqa: E501
"""Get order info # noqa: E501
Get information about a specific order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_with_http_info(store_id, order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceOrder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'order_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/orders/{order_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceOrder', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_order_line_items(self, store_id, order_id, **kwargs): # noqa: E501
"""List order line items # noqa: E501
Get information about an order's line items. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_order_line_items(store_id, order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: OrderLines
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_order_line_items_with_http_info(store_id, order_id, **kwargs) # noqa: E501
else:
(data) = self.get_all_order_line_items_with_http_info(store_id, order_id, **kwargs) # noqa: E501
return data
def get_all_order_line_items_with_http_info(self, store_id, order_id, **kwargs): # noqa: E501
"""List order line items # noqa: E501
Get information about an order's line items. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_order_line_items_with_http_info(store_id, order_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: OrderLines
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'order_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_order_line_items" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/orders/{order_id}/lines', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrderLines', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_order_line_item(self, store_id, order_id, line_id, **kwargs): # noqa: E501
"""Get order line item # noqa: E501
Get information about a specific order line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_line_item(store_id, order_id, line_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param str line_id: The id for the line item of an order. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceOrderLineItem
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_order_line_item_with_http_info(store_id, order_id, line_id, **kwargs) # noqa: E501
else:
(data) = self.get_order_line_item_with_http_info(store_id, order_id, line_id, **kwargs) # noqa: E501
return data
def get_order_line_item_with_http_info(self, store_id, order_id, line_id, **kwargs): # noqa: E501
"""Get order line item # noqa: E501
Get information about a specific order line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_order_line_item_with_http_info(store_id, order_id, line_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param str line_id: The id for the line item of an order. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceOrderLineItem
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'order_id', 'line_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_order_line_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling ``") # noqa: E501
# verify the required parameter 'line_id' is set
if ('line_id' not in params or
params['line_id'] is None):
raise ValueError("Missing the required parameter `line_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
if 'line_id' in params:
path_params['line_id'] = params['line_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/orders/{order_id}/lines/{line_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceOrderLineItem', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_store_products(self, store_id, **kwargs): # noqa: E501
"""List product # noqa: E501
Get information about a store's products. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_store_products(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: Products
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_store_products_with_http_info(store_id, **kwargs) # noqa: E501
else:
(data) = self.get_all_store_products_with_http_info(store_id, **kwargs) # noqa: E501
return data
def get_all_store_products_with_http_info(self, store_id, **kwargs): # noqa: E501
"""List product # noqa: E501
Get information about a store's products. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_store_products_with_http_info(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: Products
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_store_products" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Products', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_store_product(self, store_id, product_id, **kwargs): # noqa: E501
"""Get product info # noqa: E501
Get information about a specific product. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_product(store_id, product_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceProduct
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_store_product_with_http_info(store_id, product_id, **kwargs) # noqa: E501
else:
(data) = self.get_store_product_with_http_info(store_id, product_id, **kwargs) # noqa: E501
return data
def get_store_product_with_http_info(self, store_id, product_id, **kwargs): # noqa: E501
"""Get product info # noqa: E501
Get information about a specific product. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_store_product_with_http_info(store_id, product_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceProduct
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_store_product" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProduct', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_product_images(self, store_id, product_id, **kwargs): # noqa: E501
"""List product images # noqa: E501
Get information about a product's images. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_images(store_id, product_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: EcommerceProductImages
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_product_images_with_http_info(store_id, product_id, **kwargs) # noqa: E501
else:
(data) = self.get_product_images_with_http_info(store_id, product_id, **kwargs) # noqa: E501
return data
def get_product_images_with_http_info(self, store_id, product_id, **kwargs): # noqa: E501
"""List product images # noqa: E501
Get information about a product's images. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_images_with_http_info(store_id, product_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: EcommerceProductImages
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_images" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/images', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProductImages', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_product_image(self, store_id, product_id, image_id, **kwargs): # noqa: E501
"""Get product image info # noqa: E501
Get information about a specific product image. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_image(store_id, product_id, image_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str image_id: The id for the product image. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceProductImage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_product_image_with_http_info(store_id, product_id, image_id, **kwargs) # noqa: E501
else:
(data) = self.get_product_image_with_http_info(store_id, product_id, image_id, **kwargs) # noqa: E501
return data
def get_product_image_with_http_info(self, store_id, product_id, image_id, **kwargs): # noqa: E501
"""Get product image info # noqa: E501
Get information about a specific product image. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_image_with_http_info(store_id, product_id, image_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str image_id: The id for the product image. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceProductImage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'image_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
# verify the required parameter 'image_id' is set
if ('image_id' not in params or
params['image_id'] is None):
raise ValueError("Missing the required parameter `image_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
if 'image_id' in params:
path_params['image_id'] = params['image_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/images/{image_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProductImage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_product_variants(self, store_id, product_id, **kwargs): # noqa: E501
"""List product variants # noqa: E501
Get information about a product's variants. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_variants(store_id, product_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: EcommerceProductVariants
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_product_variants_with_http_info(store_id, product_id, **kwargs) # noqa: E501
else:
(data) = self.get_product_variants_with_http_info(store_id, product_id, **kwargs) # noqa: E501
return data
def get_product_variants_with_http_info(self, store_id, product_id, **kwargs): # noqa: E501
"""List product variants # noqa: E501
Get information about a product's variants. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_variants_with_http_info(store_id, product_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: EcommerceProductVariants
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_variants" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/variants', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProductVariants', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_product_variant(self, store_id, product_id, variant_id, **kwargs): # noqa: E501
"""Get product variant info # noqa: E501
Get information about a specific product variant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_variant(store_id, product_id, variant_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str variant_id: The id for the product variant. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceProductVariant
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_product_variant_with_http_info(store_id, product_id, variant_id, **kwargs) # noqa: E501
else:
(data) = self.get_product_variant_with_http_info(store_id, product_id, variant_id, **kwargs) # noqa: E501
return data
def get_product_variant_with_http_info(self, store_id, product_id, variant_id, **kwargs): # noqa: E501
"""Get product variant info # noqa: E501
Get information about a specific product variant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_variant_with_http_info(store_id, product_id, variant_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str variant_id: The id for the product variant. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommerceProductVariant
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'variant_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_variant" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
# verify the required parameter 'variant_id' is set
if ('variant_id' not in params or
params['variant_id'] is None):
raise ValueError("Missing the required parameter `variant_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
if 'variant_id' in params:
path_params['variant_id'] = params['variant_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/variants/{variant_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProductVariant', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_promo_codes(self, promo_rule_id, store_id, **kwargs): # noqa: E501
"""List promo codes # noqa: E501
Get information about a store's promo codes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_promo_codes(promo_rule_id, store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: PromoCodes
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_promo_codes_with_http_info(promo_rule_id, store_id, **kwargs) # noqa: E501
else:
(data) = self.get_promo_codes_with_http_info(promo_rule_id, store_id, **kwargs) # noqa: E501
return data
def get_promo_codes_with_http_info(self, promo_rule_id, store_id, **kwargs): # noqa: E501
"""List promo codes # noqa: E501
Get information about a store's promo codes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_promo_codes_with_http_info(promo_rule_id, store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: PromoCodes
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['promo_rule_id', 'store_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_promo_codes" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'promo_rule_id' is set
if ('promo_rule_id' not in params or
params['promo_rule_id'] is None):
raise ValueError("Missing the required parameter `promo_rule_id` when calling ``") # noqa: E501
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'promo_rule_id' in params:
path_params['promo_rule_id'] = params['promo_rule_id'] # noqa: E501
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/promo-rules/{promo_rule_id}/promo-codes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PromoCodes', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_promo_code(self, store_id, promo_rule_id, promo_code_id, **kwargs): # noqa: E501
"""Get promo code # noqa: E501
Get information about a specific promo code. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_promo_code(store_id, promo_rule_id, promo_code_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param str promo_code_id: The id for the promo code of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommercePromoCode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_promo_code_with_http_info(store_id, promo_rule_id, promo_code_id, **kwargs) # noqa: E501
else:
(data) = self.get_promo_code_with_http_info(store_id, promo_rule_id, promo_code_id, **kwargs) # noqa: E501
return data
def get_promo_code_with_http_info(self, store_id, promo_rule_id, promo_code_id, **kwargs): # noqa: E501
"""Get promo code # noqa: E501
Get information about a specific promo code. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_promo_code_with_http_info(store_id, promo_rule_id, promo_code_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param str promo_code_id: The id for the promo code of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommercePromoCode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'promo_rule_id', 'promo_code_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_promo_code" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'promo_rule_id' is set
if ('promo_rule_id' not in params or
params['promo_rule_id'] is None):
raise ValueError("Missing the required parameter `promo_rule_id` when calling ``") # noqa: E501
# verify the required parameter 'promo_code_id' is set
if ('promo_code_id' not in params or
params['promo_code_id'] is None):
raise ValueError("Missing the required parameter `promo_code_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'promo_rule_id' in params:
path_params['promo_rule_id'] = params['promo_rule_id'] # noqa: E501
if 'promo_code_id' in params:
path_params['promo_code_id'] = params['promo_code_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/promo-rules/{promo_rule_id}/promo-codes/{promo_code_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommercePromoCode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_promo_rules(self, store_id, **kwargs): # noqa: E501
"""List promo rules # noqa: E501
Get information about a store's promo rules. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_promo_rules(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: PromoRules
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_promo_rules_with_http_info(store_id, **kwargs) # noqa: E501
else:
(data) = self.list_promo_rules_with_http_info(store_id, **kwargs) # noqa: E501
return data
def list_promo_rules_with_http_info(self, store_id, **kwargs): # noqa: E501
"""List promo rules # noqa: E501
Get information about a store's promo rules. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_promo_rules_with_http_info(store_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: PromoRules
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_promo_rules" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/promo-rules', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PromoRules', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_promo_rule(self, store_id, promo_rule_id, **kwargs): # noqa: E501
"""Get promo rule # noqa: E501
Get information about a specific promo rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_promo_rule(store_id, promo_rule_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommercePromoRule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_promo_rule_with_http_info(store_id, promo_rule_id, **kwargs) # noqa: E501
else:
(data) = self.get_promo_rule_with_http_info(store_id, promo_rule_id, **kwargs) # noqa: E501
return data
def get_promo_rule_with_http_info(self, store_id, promo_rule_id, **kwargs): # noqa: E501
"""Get promo rule # noqa: E501
Get information about a specific promo rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_promo_rule_with_http_info(store_id, promo_rule_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EcommercePromoRule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'promo_rule_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_promo_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'promo_rule_id' is set
if ('promo_rule_id' not in params or
params['promo_rule_id'] is None):
raise ValueError("Missing the required parameter `promo_rule_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'promo_rule_id' in params:
path_params['promo_rule_id'] = params['promo_rule_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/promo-rules/{promo_rule_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommercePromoRule', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_store(self, store_id, body, **kwargs): # noqa: E501
"""Update store # noqa: E501
Update a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_store(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommerceStore2 body: (required)
:return: EcommerceStore
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_store_with_http_info(store_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_store_with_http_info(store_id, body, **kwargs) # noqa: E501
return data
def update_store_with_http_info(self, store_id, body, **kwargs): # noqa: E501
"""Update store # noqa: E501
Update a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_store_with_http_info(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommerceStore2 body: (required)
:return: EcommerceStore
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_store" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceStore', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_store_cart(self, store_id, cart_id, body, **kwargs): # noqa: E501
"""Update cart # noqa: E501
Update a specific cart. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_store_cart(store_id, cart_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param EcommerceCart2 body: (required)
:return: EcommerceCart
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_store_cart_with_http_info(store_id, cart_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_store_cart_with_http_info(store_id, cart_id, body, **kwargs) # noqa: E501
return data
def update_store_cart_with_http_info(self, store_id, cart_id, body, **kwargs): # noqa: E501
"""Update cart # noqa: E501
Update a specific cart. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_store_cart_with_http_info(store_id, cart_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param EcommerceCart2 body: (required)
:return: EcommerceCart
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'cart_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_store_cart" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'cart_id' is set
if ('cart_id' not in params or
params['cart_id'] is None):
raise ValueError("Missing the required parameter `cart_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'cart_id' in params:
path_params['cart_id'] = params['cart_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/carts/{cart_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceCart', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_cart_line_item(self, store_id, cart_id, line_id, body, **kwargs): # noqa: E501
"""Update cart line item # noqa: E501
Update a specific cart line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_cart_line_item(store_id, cart_id, line_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param str line_id: The id for the line item of a cart. (required)
:param EcommerceCartLineItem4 body: (required)
:return: EcommerceCartLineItem
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_cart_line_item_with_http_info(store_id, cart_id, line_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_cart_line_item_with_http_info(store_id, cart_id, line_id, body, **kwargs) # noqa: E501
return data
def update_cart_line_item_with_http_info(self, store_id, cart_id, line_id, body, **kwargs): # noqa: E501
"""Update cart line item # noqa: E501
Update a specific cart line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_cart_line_item_with_http_info(store_id, cart_id, line_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param str line_id: The id for the line item of a cart. (required)
:param EcommerceCartLineItem4 body: (required)
:return: EcommerceCartLineItem
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'cart_id', 'line_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_cart_line_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'cart_id' is set
if ('cart_id' not in params or
params['cart_id'] is None):
raise ValueError("Missing the required parameter `cart_id` when calling ``") # noqa: E501
# verify the required parameter 'line_id' is set
if ('line_id' not in params or
params['line_id'] is None):
raise ValueError("Missing the required parameter `line_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'cart_id' in params:
path_params['cart_id'] = params['cart_id'] # noqa: E501
if 'line_id' in params:
path_params['line_id'] = params['line_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/carts/{cart_id}/lines/{line_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceCartLineItem', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_store_customer(self, store_id, customer_id, body, **kwargs): # noqa: E501
"""Update customer # noqa: E501
Update a customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_store_customer(store_id, customer_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str customer_id: The id for the customer of a store. (required)
:param EcommerceCustomer5 body: (required)
:return: EcommerceCustomer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_store_customer_with_http_info(store_id, customer_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_store_customer_with_http_info(store_id, customer_id, body, **kwargs) # noqa: E501
return data
def update_store_customer_with_http_info(self, store_id, customer_id, body, **kwargs): # noqa: E501
"""Update customer # noqa: E501
Update a customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_store_customer_with_http_info(store_id, customer_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str customer_id: The id for the customer of a store. (required)
:param EcommerceCustomer5 body: (required)
:return: EcommerceCustomer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'customer_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_store_customer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/customers/{customer_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceCustomer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_order(self, store_id, order_id, body, **kwargs): # noqa: E501
"""Update order # noqa: E501
Update a specific order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_order(store_id, order_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param EcommerceOrder2 body: (required)
:return: EcommerceOrder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_order_with_http_info(store_id, order_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_order_with_http_info(store_id, order_id, body, **kwargs) # noqa: E501
return data
def update_order_with_http_info(self, store_id, order_id, body, **kwargs): # noqa: E501
"""Update order # noqa: E501
Update a specific order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_order_with_http_info(store_id, order_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param EcommerceOrder2 body: (required)
:return: EcommerceOrder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'order_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_order" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/orders/{order_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceOrder', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_order_line_item(self, store_id, order_id, line_id, body, **kwargs): # noqa: E501
"""Update order line item # noqa: E501
Update a specific order line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_order_line_item(store_id, order_id, line_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param str line_id: The id for the line item of an order. (required)
:param EcommerceOrderLineItem4 body: (required)
:return: EcommerceOrderLineItem
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_order_line_item_with_http_info(store_id, order_id, line_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_order_line_item_with_http_info(store_id, order_id, line_id, body, **kwargs) # noqa: E501
return data
def update_order_line_item_with_http_info(self, store_id, order_id, line_id, body, **kwargs): # noqa: E501
"""Update order line item # noqa: E501
Update a specific order line item. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_order_line_item_with_http_info(store_id, order_id, line_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param str line_id: The id for the line item of an order. (required)
:param EcommerceOrderLineItem4 body: (required)
:return: EcommerceOrderLineItem
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'order_id', 'line_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_order_line_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling ``") # noqa: E501
# verify the required parameter 'line_id' is set
if ('line_id' not in params or
params['line_id'] is None):
raise ValueError("Missing the required parameter `line_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
if 'line_id' in params:
path_params['line_id'] = params['line_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/orders/{order_id}/lines/{line_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceOrderLineItem', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_store_product(self, store_id, product_id, body, **kwargs): # noqa: E501
"""Update product # noqa: E501
Update a specific product. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_store_product(store_id, product_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param EcommerceProduct2 body: (required)
:return: EcommerceProduct
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_store_product_with_http_info(store_id, product_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_store_product_with_http_info(store_id, product_id, body, **kwargs) # noqa: E501
return data
def update_store_product_with_http_info(self, store_id, product_id, body, **kwargs): # noqa: E501
"""Update product # noqa: E501
Update a specific product. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_store_product_with_http_info(store_id, product_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param EcommerceProduct2 body: (required)
:return: EcommerceProduct
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_store_product" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProduct', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_product_image(self, store_id, product_id, image_id, body, **kwargs): # noqa: E501
"""Update product image # noqa: E501
Update a product image. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_product_image(store_id, product_id, image_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str image_id: The id for the product image. (required)
:param EcommerceProductImage4 body: (required)
:return: EcommerceProductImage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_product_image_with_http_info(store_id, product_id, image_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_product_image_with_http_info(store_id, product_id, image_id, body, **kwargs) # noqa: E501
return data
def update_product_image_with_http_info(self, store_id, product_id, image_id, body, **kwargs): # noqa: E501
"""Update product image # noqa: E501
Update a product image. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_product_image_with_http_info(store_id, product_id, image_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str image_id: The id for the product image. (required)
:param EcommerceProductImage4 body: (required)
:return: EcommerceProductImage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'image_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_product_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
# verify the required parameter 'image_id' is set
if ('image_id' not in params or
params['image_id'] is None):
raise ValueError("Missing the required parameter `image_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
if 'image_id' in params:
path_params['image_id'] = params['image_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/images/{image_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProductImage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_product_variant(self, store_id, product_id, variant_id, body, **kwargs): # noqa: E501
"""Update product variant # noqa: E501
Update a product variant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_product_variant(store_id, product_id, variant_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str variant_id: The id for the product variant. (required)
:param EcommerceProductVariant5 body: (required)
:return: EcommerceProductVariant
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_product_variant_with_http_info(store_id, product_id, variant_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_product_variant_with_http_info(store_id, product_id, variant_id, body, **kwargs) # noqa: E501
return data
def update_product_variant_with_http_info(self, store_id, product_id, variant_id, body, **kwargs): # noqa: E501
"""Update product variant # noqa: E501
Update a product variant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_product_variant_with_http_info(store_id, product_id, variant_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str variant_id: The id for the product variant. (required)
:param EcommerceProductVariant5 body: (required)
:return: EcommerceProductVariant
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'variant_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_product_variant" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
# verify the required parameter 'variant_id' is set
if ('variant_id' not in params or
params['variant_id'] is None):
raise ValueError("Missing the required parameter `variant_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
if 'variant_id' in params:
path_params['variant_id'] = params['variant_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/variants/{variant_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProductVariant', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_promo_code(self, store_id, promo_rule_id, promo_code_id, body, **kwargs): # noqa: E501
"""Update promo code # noqa: E501
Update a promo code. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_promo_code(store_id, promo_rule_id, promo_code_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param str promo_code_id: The id for the promo code of a store. (required)
:param EcommercePromoCode2 body: (required)
:return: EcommercePromoCode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_promo_code_with_http_info(store_id, promo_rule_id, promo_code_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_promo_code_with_http_info(store_id, promo_rule_id, promo_code_id, body, **kwargs) # noqa: E501
return data
def update_promo_code_with_http_info(self, store_id, promo_rule_id, promo_code_id, body, **kwargs): # noqa: E501
"""Update promo code # noqa: E501
Update a promo code. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_promo_code_with_http_info(store_id, promo_rule_id, promo_code_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param str promo_code_id: The id for the promo code of a store. (required)
:param EcommercePromoCode2 body: (required)
:return: EcommercePromoCode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'promo_rule_id', 'promo_code_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_promo_code" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'promo_rule_id' is set
if ('promo_rule_id' not in params or
params['promo_rule_id'] is None):
raise ValueError("Missing the required parameter `promo_rule_id` when calling ``") # noqa: E501
# verify the required parameter 'promo_code_id' is set
if ('promo_code_id' not in params or
params['promo_code_id'] is None):
raise ValueError("Missing the required parameter `promo_code_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'promo_rule_id' in params:
path_params['promo_rule_id'] = params['promo_rule_id'] # noqa: E501
if 'promo_code_id' in params:
path_params['promo_code_id'] = params['promo_code_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/promo-rules/{promo_rule_id}/promo-codes/{promo_code_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommercePromoCode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_promo_rule(self, store_id, promo_rule_id, body, **kwargs): # noqa: E501
"""Update promo rule # noqa: E501
Update a promo rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_promo_rule(store_id, promo_rule_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param EcommercePromoRule2 body: (required)
:return: EcommercePromoRule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_promo_rule_with_http_info(store_id, promo_rule_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_promo_rule_with_http_info(store_id, promo_rule_id, body, **kwargs) # noqa: E501
return data
def update_promo_rule_with_http_info(self, store_id, promo_rule_id, body, **kwargs): # noqa: E501
"""Update promo rule # noqa: E501
Update a promo rule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_promo_rule_with_http_info(store_id, promo_rule_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param EcommercePromoRule2 body: (required)
:return: EcommercePromoRule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'promo_rule_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_promo_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'promo_rule_id' is set
if ('promo_rule_id' not in params or
params['promo_rule_id'] is None):
raise ValueError("Missing the required parameter `promo_rule_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'promo_rule_id' in params:
path_params['promo_rule_id'] = params['promo_rule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/promo-rules/{promo_rule_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommercePromoRule', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_store(self, body, **kwargs): # noqa: E501
"""Add store # noqa: E501
Add a new store to your Mailchimp account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_store(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EcommerceStore1 body: (required)
:return: EcommerceStore
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_store_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_store_with_http_info(body, **kwargs) # noqa: E501
return data
def add_store_with_http_info(self, body, **kwargs): # noqa: E501
"""Add store # noqa: E501
Add a new store to your Mailchimp account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_store_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EcommerceStore1 body: (required)
:return: EcommerceStore
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_store" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceStore', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_store_cart(self, store_id, body, **kwargs): # noqa: E501
"""Add cart # noqa: E501
Add a new cart to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_store_cart(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommerceCart1 body: (required)
:return: EcommerceCart
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_store_cart_with_http_info(store_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_store_cart_with_http_info(store_id, body, **kwargs) # noqa: E501
return data
def add_store_cart_with_http_info(self, store_id, body, **kwargs): # noqa: E501
"""Add cart # noqa: E501
Add a new cart to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_store_cart_with_http_info(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommerceCart1 body: (required)
:return: EcommerceCart
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_store_cart" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/carts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceCart', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_cart_line_item(self, store_id, cart_id, body, **kwargs): # noqa: E501
"""Add cart line item # noqa: E501
Add a new line item to an existing cart. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_cart_line_item(store_id, cart_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param EcommerceCartLineItem3 body: (required)
:return: EcommerceCartLineItem
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_cart_line_item_with_http_info(store_id, cart_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_cart_line_item_with_http_info(store_id, cart_id, body, **kwargs) # noqa: E501
return data
def add_cart_line_item_with_http_info(self, store_id, cart_id, body, **kwargs): # noqa: E501
"""Add cart line item # noqa: E501
Add a new line item to an existing cart. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_cart_line_item_with_http_info(store_id, cart_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str cart_id: The id for the cart. (required)
:param EcommerceCartLineItem3 body: (required)
:return: EcommerceCartLineItem
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'cart_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_cart_line_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'cart_id' is set
if ('cart_id' not in params or
params['cart_id'] is None):
raise ValueError("Missing the required parameter `cart_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'cart_id' in params:
path_params['cart_id'] = params['cart_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/carts/{cart_id}/lines', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceCartLineItem', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_store_customer(self, store_id, body, **kwargs): # noqa: E501
"""Add customer # noqa: E501
Add a new customer to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_store_customer(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommerceCustomer3 body: (required)
:return: EcommerceCustomer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_store_customer_with_http_info(store_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_store_customer_with_http_info(store_id, body, **kwargs) # noqa: E501
return data
def add_store_customer_with_http_info(self, store_id, body, **kwargs): # noqa: E501
"""Add customer # noqa: E501
Add a new customer to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_store_customer_with_http_info(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommerceCustomer3 body: (required)
:return: EcommerceCustomer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_store_customer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/customers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceCustomer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_store_order(self, store_id, body, **kwargs): # noqa: E501
"""Add order # noqa: E501
Add a new order to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_store_order(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommerceOrder1 body: (required)
:return: EcommerceOrder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_store_order_with_http_info(store_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_store_order_with_http_info(store_id, body, **kwargs) # noqa: E501
return data
def add_store_order_with_http_info(self, store_id, body, **kwargs): # noqa: E501
"""Add order # noqa: E501
Add a new order to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_store_order_with_http_info(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommerceOrder1 body: (required)
:return: EcommerceOrder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_store_order" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/orders', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceOrder', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_order_line_item(self, store_id, order_id, body, **kwargs): # noqa: E501
"""Add order line item # noqa: E501
Add a new line item to an existing order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_order_line_item(store_id, order_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param EcommerceOrderLineItem3 body: (required)
:return: EcommerceOrderLineItem
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_order_line_item_with_http_info(store_id, order_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_order_line_item_with_http_info(store_id, order_id, body, **kwargs) # noqa: E501
return data
def add_order_line_item_with_http_info(self, store_id, order_id, body, **kwargs): # noqa: E501
"""Add order line item # noqa: E501
Add a new line item to an existing order. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_order_line_item_with_http_info(store_id, order_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str order_id: The id for the order in a store. (required)
:param EcommerceOrderLineItem3 body: (required)
:return: EcommerceOrderLineItem
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'order_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_order_line_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'order_id' in params:
path_params['order_id'] = params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/orders/{order_id}/lines', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceOrderLineItem', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_store_product(self, store_id, body, **kwargs): # noqa: E501
"""Add product # noqa: E501
Add a new product to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_store_product(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommerceProduct1 body: (required)
:return: EcommerceProduct
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_store_product_with_http_info(store_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_store_product_with_http_info(store_id, body, **kwargs) # noqa: E501
return data
def add_store_product_with_http_info(self, store_id, body, **kwargs): # noqa: E501
"""Add product # noqa: E501
Add a new product to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_store_product_with_http_info(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommerceProduct1 body: (required)
:return: EcommerceProduct
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_store_product" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProduct', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_product_image(self, store_id, product_id, body, **kwargs): # noqa: E501
"""Add product image # noqa: E501
Add a new image to the product. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_product_image(store_id, product_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param EcommerceProductImage3 body: (required)
:return: EcommerceProductImage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_product_image_with_http_info(store_id, product_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_product_image_with_http_info(store_id, product_id, body, **kwargs) # noqa: E501
return data
def add_product_image_with_http_info(self, store_id, product_id, body, **kwargs): # noqa: E501
"""Add product image # noqa: E501
Add a new image to the product. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_product_image_with_http_info(store_id, product_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param EcommerceProductImage3 body: (required)
:return: EcommerceProductImage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_product_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/images', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProductImage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_product_variants(self, store_id, product_id, body, **kwargs): # noqa: E501
"""Add product variant # noqa: E501
Add a new variant to the product. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_product_variants(store_id, product_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param EcommerceProductVariant3 body: (required)
:return: EcommerceProductVariant
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_product_variants_with_http_info(store_id, product_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_product_variants_with_http_info(store_id, product_id, body, **kwargs) # noqa: E501
return data
def add_product_variants_with_http_info(self, store_id, product_id, body, **kwargs): # noqa: E501
"""Add product variant # noqa: E501
Add a new variant to the product. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_product_variants_with_http_info(store_id, product_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param EcommerceProductVariant3 body: (required)
:return: EcommerceProductVariant
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_product_variants" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/variants', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProductVariant', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_promo_code(self, store_id, promo_rule_id, body, **kwargs): # noqa: E501
"""Add promo code # noqa: E501
Add a new promo code to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_promo_code(store_id, promo_rule_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param EcommercePromoCode1 body: (required)
:return: EcommercePromoCode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_promo_code_with_http_info(store_id, promo_rule_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_promo_code_with_http_info(store_id, promo_rule_id, body, **kwargs) # noqa: E501
return data
def add_promo_code_with_http_info(self, store_id, promo_rule_id, body, **kwargs): # noqa: E501
"""Add promo code # noqa: E501
Add a new promo code to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_promo_code_with_http_info(store_id, promo_rule_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str promo_rule_id: The id for the promo rule of a store. (required)
:param EcommercePromoCode1 body: (required)
:return: EcommercePromoCode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'promo_rule_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_promo_code" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'promo_rule_id' is set
if ('promo_rule_id' not in params or
params['promo_rule_id'] is None):
raise ValueError("Missing the required parameter `promo_rule_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'promo_rule_id' in params:
path_params['promo_rule_id'] = params['promo_rule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/promo-rules/{promo_rule_id}/promo-codes', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommercePromoCode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_promo_rules(self, store_id, body, **kwargs): # noqa: E501
"""Add promo rule # noqa: E501
Add a new promo rule to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_promo_rules(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommercePromoRule1 body: (required)
:return: EcommercePromoRule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_promo_rules_with_http_info(store_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_promo_rules_with_http_info(store_id, body, **kwargs) # noqa: E501
return data
def add_promo_rules_with_http_info(self, store_id, body, **kwargs): # noqa: E501
"""Add promo rule # noqa: E501
Add a new promo rule to a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_promo_rules_with_http_info(store_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param EcommercePromoRule1 body: (required)
:return: EcommercePromoRule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_promo_rules" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/promo-rules', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommercePromoRule', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_store_customer(self, store_id, customer_id, body, **kwargs): # noqa: E501
"""Add or update customer # noqa: E501
Add or update a customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_store_customer(store_id, customer_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str customer_id: The id for the customer of a store. (required)
:param EcommerceCustomer4 body: (required)
:return: EcommerceCustomer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_store_customer_with_http_info(store_id, customer_id, body, **kwargs) # noqa: E501
else:
(data) = self.set_store_customer_with_http_info(store_id, customer_id, body, **kwargs) # noqa: E501
return data
def set_store_customer_with_http_info(self, store_id, customer_id, body, **kwargs): # noqa: E501
"""Add or update customer # noqa: E501
Add or update a customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_store_customer_with_http_info(store_id, customer_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str customer_id: The id for the customer of a store. (required)
:param EcommerceCustomer4 body: (required)
:return: EcommerceCustomer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'customer_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_store_customer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/customers/{customer_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceCustomer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_product_variant(self, store_id, product_id, variant_id, body, **kwargs): # noqa: E501
"""Add or update product variant # noqa: E501
Add or update a product variant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_product_variant(store_id, product_id, variant_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str variant_id: The id for the product variant. (required)
:param EcommerceProductVariant4 body: (required)
:return: EcommerceProductVariant
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_product_variant_with_http_info(store_id, product_id, variant_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_product_variant_with_http_info(store_id, product_id, variant_id, body, **kwargs) # noqa: E501
return data
def add_product_variant_with_http_info(self, store_id, product_id, variant_id, body, **kwargs): # noqa: E501
"""Add or update product variant # noqa: E501
Add or update a product variant. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_product_variant_with_http_info(store_id, product_id, variant_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str store_id: The store id. (required)
:param str product_id: The id for the product of a store. (required)
:param str variant_id: The id for the product variant. (required)
:param EcommerceProductVariant4 body: (required)
:return: EcommerceProductVariant
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_id', 'product_id', 'variant_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_product_variant" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling ``") # noqa: E501
# verify the required parameter 'product_id' is set
if ('product_id' not in params or
params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling ``") # noqa: E501
# verify the required parameter 'variant_id' is set
if ('variant_id' not in params or
params['variant_id'] is None):
raise ValueError("Missing the required parameter `variant_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'product_id' in params:
path_params['product_id'] = params['product_id'] # noqa: E501
if 'variant_id' in params:
path_params['variant_id'] = params['variant_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/ecommerce/stores/{store_id}/products/{product_id}/variants/{variant_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EcommerceProductVariant', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.33402
| 205
| 0.619735
| 37,651
| 307,954
| 4.840456
| 0.00749
| 0.053202
| 0.030069
| 0.022914
| 0.995501
| 0.994875
| 0.994436
| 0.993487
| 0.992642
| 0.991007
| 0
| 0.018024
| 0.286371
| 307,954
| 6,792
| 206
| 45.340695
| 0.811262
| 0.354491
| 0
| 0.877193
| 1
| 0.00319
| 0.224968
| 0.047517
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0311
| false
| 0
| 0.001063
| 0
| 0.078682
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
31ecd2628b36435d2c3cdc96f3d445a03ebaebc8
| 2,816
|
py
|
Python
|
tests/test_phase_unwrap.py
|
TAdeJong/pyGPA
|
fa14e544d8faa5095bf3503160e88131b5e7298e
|
[
"MIT"
] | 3
|
2021-08-02T13:55:18.000Z
|
2021-11-25T11:44:49.000Z
|
tests/test_phase_unwrap.py
|
TAdeJong/pyGPA
|
fa14e544d8faa5095bf3503160e88131b5e7298e
|
[
"MIT"
] | 1
|
2021-11-18T11:11:18.000Z
|
2021-11-18T11:14:57.000Z
|
tests/test_phase_unwrap.py
|
TAdeJong/pyGPA
|
fa14e544d8faa5095bf3503160e88131b5e7298e
|
[
"MIT"
] | null | null | null |
import numpy as np
from hypothesis import given, strategies as st
import pytest
import pyGPA.phase_unwrap as pu
# This test code was written by the `hypothesis.extra.ghostwriter` module
# and is provided under the Creative Commons Zero public domain dedication.
@pytest.mark.filterwarnings("ignore:invalid value encountered in true_divide")
@given(kmax=st.integers(1, 30))
def test_equivalent_phase_unwrap_ref_phase_unwrap(kmax):
N = 256
xx, yy = np.meshgrid(np.arange(N), np.arange(N), indexing='ij')
psi0 = (yy+xx) / (4*np.sqrt(2))
psi = pu._wrapToPi(psi0)
weight = np.ones_like(psi)
result_phase_unwrap_ref = pu.phase_unwrap_ref(
psi=psi, weight=weight, kmax=kmax
)
assert np.allclose(result_phase_unwrap_ref - result_phase_unwrap_ref.mean(),
psi0 - psi0.mean())
result_phase_unwrap = pu.phase_unwrap(
psi=psi, weight=weight, kmax=kmax
)
assert np.allclose(result_phase_unwrap_ref, result_phase_unwrap)
result_phase_unwrap = pu.phase_unwrap(
psi=psi, weight=None, kmax=kmax
)
assert np.allclose(result_phase_unwrap_ref, result_phase_unwrap)
def test_equivalent_phase_unwrap_gaussian_weight():
N = 256
xx, yy = np.meshgrid(np.arange(N), np.arange(N), indexing='ij')
psi0 = (yy+xx) / (4*np.sqrt(2))
psi = pu._wrapToPi(psi0)
gaussian = np.exp(-((xx-N//2)**2+(yy-N//2)**2)/(0.3*N**2))
result_phase_unwrap = pu.phase_unwrap(
psi=psi, weight=gaussian
)
result_phase_unwrap_ref = pu.phase_unwrap(
psi=psi, weight=None
)
assert np.allclose(result_phase_unwrap_ref, result_phase_unwrap)
@pytest.mark.filterwarnings("ignore:invalid value encountered in true_divide")
@given(kmax=st.integers(1,30))
def test_equivalent_phase_unwrap_ref_prediff_phase_unwrap_prediff(kmax):
N = 256
xx, yy = np.meshgrid(np.arange(N), np.arange(N), indexing='ij')
psi0 = (yy+xx) / (4*np.sqrt(2))
psi = pu._wrapToPi(psi0)
dx = np.diff(psi, axis=1)
dy = np.diff(psi, axis=0)
weight = np.ones_like(psi)
result_phase_unwrap_ref = pu.phase_unwrap_ref_prediff(
dx=dx, dy=dy, weight=weight, kmax=kmax
)
assert np.allclose(result_phase_unwrap_ref - result_phase_unwrap_ref.mean(),
psi0 - psi0.mean())
result_phase_unwrap = pu.phase_unwrap_prediff(
dx=dx, dy=dy, weight=weight, kmax=kmax
)
assert np.allclose(result_phase_unwrap_ref, result_phase_unwrap)
result_phase_unwrap = pu.phase_unwrap_prediff(
dx=dx, dy=dy, weight=None, kmax=kmax
)
assert np.allclose(result_phase_unwrap_ref, result_phase_unwrap)
result_phase_unwrap_ref = pu.phase_unwrap_ref(
psi=psi, weight=weight, kmax=kmax
)
assert np.allclose(result_phase_unwrap_ref, result_phase_unwrap)
| 37.052632
| 81
| 0.699219
| 422
| 2,816
| 4.414692
| 0.191943
| 0.236178
| 0.228127
| 0.150295
| 0.822866
| 0.807837
| 0.807837
| 0.781535
| 0.781535
| 0.75577
| 0
| 0.017429
| 0.185014
| 2,816
| 75
| 82
| 37.546667
| 0.794336
| 0.051491
| 0
| 0.615385
| 0
| 0
| 0.037481
| 0
| 0
| 0
| 0
| 0
| 0.123077
| 1
| 0.046154
| false
| 0
| 0.061538
| 0
| 0.107692
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b40abf9e82bd9f0244eb5671dd9f60d5b6e6a4f9
| 58,894
|
py
|
Python
|
geotrek/core/tests/test_path_split.py
|
jmdecastel/GEOTADMIN
|
15547c0a99ae4c541ca517cdbc2cf17ab5c96f87
|
[
"BSD-2-Clause"
] | null | null | null |
geotrek/core/tests/test_path_split.py
|
jmdecastel/GEOTADMIN
|
15547c0a99ae4c541ca517cdbc2cf17ab5c96f87
|
[
"BSD-2-Clause"
] | null | null | null |
geotrek/core/tests/test_path_split.py
|
jmdecastel/GEOTADMIN
|
15547c0a99ae4c541ca517cdbc2cf17ab5c96f87
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.contrib.gis.geos import LineString, Point
from django.conf import settings
from geotrek.common.utils import almostequal
from geotrek.core.factories import PathFactory, TopologyFactory, NetworkFactory, UsageFactory
from geotrek.core.models import Path, Topology
class SplitPathTest(TestCase):
def test_split_attributes(self):
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
ab.networks.add(NetworkFactory.create())
ab.usages.add(UsageFactory.create())
PathFactory.create(geom=LineString((2, 0), (2, 2)))
ab_2 = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
self.assertEqual(ab.source, ab_2.source)
self.assertEqual(ab.stake, ab_2.stake)
self.assertListEqual(list(ab.networks.all()), list(ab_2.networks.all()))
self.assertListEqual(list(ab.usages.all()), list(ab_2.usages.all()))
def test_split_tee_1(self):
"""
C
A +----+----+ B
|
+ AB exists. Add CD.
D
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
self.assertEqual(ab.length, 4)
cd = PathFactory.create(geom=LineString((2, 0), (2, 2)))
self.assertEqual(cd.length, 2)
# Make sure AB was split :
ab.reload()
self.assertEqual(ab.geom, LineString((0, 0), (2, 0)))
self.assertEqual(ab.length, 2) # Length was also updated
# And a clone of AB was created
clones = Path.objects.filter(name="AB").exclude(pk=ab.pk)
self.assertEqual(len(clones), 1)
ab_2 = clones[0]
self.assertEqual(ab_2.geom, LineString((2, 0), (4, 0)))
self.assertEqual(ab_2.length, 2) # Length was also updated
def test_split_tee_2(self):
"""
CD exists. Add AB.
"""
cd = PathFactory.create(geom=LineString((2, 0), (2, 2)))
self.assertEqual(cd.length, 2)
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
# Make sure AB was split :
self.assertEqual(ab.geom, LineString((0, 0), (2, 0)))
self.assertEqual(ab.length, 2) # Length was also updated
clones = Path.objects.filter(name="AB").exclude(pk=ab.pk)
ab_2 = clones[0]
self.assertEqual(ab_2.geom, LineString((2, 0), (4, 0)))
self.assertEqual(ab_2.length, 2) # Length was also updated
def test_split_cross(self):
"""
C
+
|
A +----+----+ B
|
+ AB exists. Add CD.
D
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((2, -2), (2, 2)))
ab.reload()
ab_2 = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
cd_2 = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(ab.geom, LineString((0, 0), (2, 0)))
self.assertEqual(cd.geom, LineString((2, -2), (2, 0)))
self.assertEqual(ab_2.geom, LineString((2, 0), (4, 0)))
self.assertEqual(cd_2.geom, LineString((2, 0), (2, 2)))
def test_split_cross_on_deleted(self):
"""
Paths should not be splitted if they cross deleted paths.
(attribute delete=True)
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
self.assertEqual(len(Path.objects.all()), 1)
ab.delete()
self.assertEqual(len(Path.objects.all()), 0)
PathFactory.create(name="CD", geom=LineString((2, -2), (2, 2)))
self.assertEqual(len(Path.objects.all()), 1)
def test_split_on_update(self):
"""
+ E
:
A +----+----+ B A +----+----+ B
:
C +----+ D C +----+ D
AB and CD exist. CD updated into CE.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, -2), (2, -2)))
self.assertEqual(ab.length, 4)
self.assertEqual(cd.length, 2)
cd.geom = LineString((0, -2), (2, -2), (2, 2))
cd.save()
ab.reload()
self.assertEqual(ab.length, 2)
self.assertEqual(cd.length, 4)
ab_2 = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
cd_2 = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(ab_2.length, 2)
self.assertEqual(cd_2.length, 2)
def test_split_twice(self):
"""
C D
+ +
| |
A +--+---+--+ B
| |
+---+
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((1, 2), (1, -2),
(3, -2), (3, 2)))
ab.reload()
self.assertEqual(ab.length, 1)
self.assertEqual(cd.length, 2)
ab_clones = Path.objects.filter(name="AB").exclude(pk=ab.pk)
cd_clones = Path.objects.filter(name="CD").exclude(pk=cd.pk)
self.assertEqual(len(ab_clones), 2)
self.assertEqual(len(cd_clones), 2)
# Depending on PostgreSQL fetch order
if ab_clones[0].geom == LineString((1, 0), (3, 0)):
self.assertEqual(ab_clones[0].geom, LineString((1, 0), (3, 0)))
self.assertEqual(ab_clones[1].geom, LineString((3, 0), (4, 0)))
else:
self.assertEqual(ab_clones[0].geom, LineString((3, 0), (4, 0)))
self.assertEqual(ab_clones[1].geom, LineString((1, 0), (3, 0)))
if cd_clones[0].geom == LineString((3, 0), (3, 2)):
self.assertEqual(cd_clones[0].geom, LineString((3, 0), (3, 2)))
self.assertEqual(cd_clones[1].geom, LineString((1, 0), (1, -2),
(3, -2), (3, 0)))
else:
self.assertEqual(cd_clones[0].geom, LineString((1, 0), (1, -2),
(3, -2), (3, 0)))
self.assertEqual(cd_clones[1].geom, LineString((3, 0), (3, 2)))
def test_add_shortest_path(self):
"""
A +---- -----+ C
\ /
\ /
--+--
B
D E
A +---+---------+---+ C
\ /
\ /
--+--
B
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0),
(6, -2), (8, -2)))
cb = PathFactory.create(name="CB", geom=LineString((14, 0), (12, 0),
(10, -2), (8, -2)))
de = PathFactory.create(name="DE", geom=LineString((4, 0), (12, 0)))
# Paths were split, there are 5 now
self.assertEqual(len(Path.objects.all()), 5)
ab.reload()
cb.reload()
de.reload()
ab_2 = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
cb_2 = Path.objects.filter(name="CB").exclude(pk=cb.pk)[0]
self.assertEqual(de.geom, LineString((4, 0), (12, 0)))
self.assertEqual(ab.geom, LineString((0, 0), (4, 0)))
self.assertEqual(ab_2.geom, LineString((4, 0), (6, -2), (8, -2)))
self.assertEqual(cb.geom, LineString((14, 0), (12, 0)))
self.assertEqual(cb_2.geom, LineString((12, 0), (10, -2), (8, -2)))
def test_split_almost(self):
"""
C D
+ +
\ /
A +--V--+ B
E
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((1, 1), (2, -0.2),
(3, 1)))
ab.reload()
cd.reload()
eb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
ed = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(ab.geom, LineString((0, 0), (2, -0.2)))
self.assertEqual(cd.geom, LineString((1, 1), (2, -0.2)))
self.assertEqual(eb.geom, LineString((2, -0.2), (4, 0)))
self.assertEqual(ed.geom, LineString((2, -0.2), (3, 1)))
def test_split_almost_2(self):
"""
+ C
|
A +------- ... ----+ B
|
+ D
"""
cd = PathFactory.create(name="CD", geom=LineString((0.1, 1), (0.1, -1)))
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (10000000, 0)))
ab.reload()
cd.reload()
self.assertEqual(ab.geom, LineString((0.1, 0), (10000000, 0)))
self.assertEqual(cd.geom, LineString((0.1, 1), (0.1, 0)))
self.assertEqual(len(Path.objects.all()), 3)
def test_split_almost_3(self):
"""
+ C
|
A +-+------ ... ----+ B
|
+ D
"""
cd = PathFactory.create(name="CD", geom=LineString((1.1, 1), (1.1, -1)))
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (10000000, 0)))
ab.reload()
cd.reload()
self.assertEqual(ab.geom, LineString((0, 0), (1.1, 0)))
self.assertEqual(cd.geom, LineString((1.1, 1), (1.1, 0)))
self.assertEqual(len(Path.objects.all()), 4)
def test_split_almost_4(self):
"""
C
-----+----+ A
| |
| |
-----+----+ B
D
"""
ab = PathFactory.create(name="AB", geom=LineString((998522.520690918, 6381896.4595642),
(997785.990158081, 6381124.21846007),
(998272.546691896, 6380561.77696227),
(999629.548400879, 6381209.03106688)))
cd = PathFactory.create(name="CD", geom=LineString((998522.520690918, 6381896.4595642),
(999098.044800479, 6380955.51783641)))
ab.reload()
cd.reload()
self.assertEqual(len(Path.objects.all()), 3)
def test_split_multiple(self):
"""
C E G I
+ + + +
| | | |
A +--+---+---+---+--+ B
| | | |
+ + + +
D F H J
"""
PathFactory.create(name="CD", geom=LineString((1, -2), (1, 2)))
PathFactory.create(name="EF", geom=LineString((2, -2), (2, 2)))
PathFactory.create(name="GH", geom=LineString((3, -2), (3, 2)))
PathFactory.create(name="IJ", geom=LineString((4, -2), (4, 2)))
PathFactory.create(name="AB", geom=LineString((0, 0), (5, 0)))
self.assertEqual(len(Path.objects.filter(name="CD")), 2)
self.assertEqual(len(Path.objects.filter(name="EF")), 2)
self.assertEqual(len(Path.objects.filter(name="GH")), 2)
self.assertEqual(len(Path.objects.filter(name="IJ")), 2)
self.assertEqual(len(Path.objects.filter(name="AB")), 5)
def test_split_multiple_2(self):
"""
C E G I
+ + + +
| | | |
| | | |
A +--+---+---+---+--+ B
D F H J
"""
PathFactory.create(name="CD", geom=LineString((1, -2), (1, 2)))
PathFactory.create(name="EF", geom=LineString((2, -2), (2, 2)))
PathFactory.create(name="GH", geom=LineString((3, -2), (3, 2)))
PathFactory.create(name="IJ", geom=LineString((4, -2), (4, 2)))
PathFactory.create(name="AB", geom=LineString((0, -2), (5, -2)))
self.assertEqual(len(Path.objects.filter(name="CD")), 1)
self.assertEqual(len(Path.objects.filter(name="EF")), 1)
self.assertEqual(len(Path.objects.filter(name="GH")), 1)
self.assertEqual(len(Path.objects.filter(name="IJ")), 1)
self.assertEqual(len(Path.objects.filter(name="AB")), 5)
def test_split_multiple_3(self):
"""
+ +
E \ / F
A +---+--+--------+--+---+ B
| \ / | AB exists. Create EF. Create CD.
+----+----+----+
\ /
\/
"""
PathFactory.create(name="AB", geom=LineString((0, 0), (10, 0)))
PathFactory.create(name="EF", geom=LineString((2, 0), (2, -1), (8, -1), (8, 0)))
PathFactory.create(name="CD", geom=LineString((2, 1), (5, -2), (8, 1)))
self.assertEqual(len(Path.objects.filter(name="AB")), 5)
self.assertEqual(len(Path.objects.filter(name="EF")), 3)
self.assertEqual(len(Path.objects.filter(name="CD")), 5)
def test_split_multiple_4(self):
"""
Same as previous, without round values for intersections.
C D
+ +
E \ / F
A +---+--+--------+--+---+ B
\ \ / / AB exists. Create EF. Create CD.
\ \ / /
---+--+---
\/
"""
PathFactory.create(name="AB", geom=LineString((0, 0), (10, 0)))
PathFactory.create(name="EF", geom=LineString((2, 0), (2, -1), (8, -1), (8, 0)))
PathFactory.create(name="CD", geom=LineString((2, 1), (5, -2), (8, 1)))
PathFactory.create(name="CD", geom=LineString((3, 1), (5, -2), (7, 1)))
self.assertEqual(len(Path.objects.filter(name="AB")), 5)
self.assertEqual(len(Path.objects.filter(name="EF")), 3)
class SplitPathLineTopologyTest(TestCase):
def test_split_tee_1(self):
"""
C
A +---===+===---+ B
A' | B'
+ AB exists with topology A'B'.
D Add CD.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.25, end=0.75)
topogeom = topology.geom
# Topology covers 1 path
self.assertEqual(len(topology.paths.all()), 1)
PathFactory.create(name="CD", geom=LineString((2, 0), (2, 2)))
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
# Topology now covers 2 paths
self.assertEqual(len(topology.paths.all()), 2)
# AB and AB2 has one topology each
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(cb.aggregations.all()), 1)
# Topology position became proportional
aggr_ab = ab.aggregations.all()[0]
aggr_cb = cb.aggregations.all()[0]
self.assertEqual((0.5, 1.0), (aggr_ab.start_position, aggr_ab.end_position))
self.assertEqual((0.0, 0.5), (aggr_cb.start_position, aggr_cb.end_position))
topology.reload()
self.assertNotEqual(topology.geom, topogeom)
self.assertEqual(topology.geom.coords[0], topogeom.coords[0])
self.assertEqual(topology.geom.coords[-1], topogeom.coords[-1])
def test_split_tee_1_reversed(self):
"""
C
A +---===+===---+ B
A' | B'
+ AB exists with topology A'B'.
D Add CD.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.75, end=0.25, order=1)
# Topology covers 1 path
self.assertEqual(len(topology.paths.all()), 1)
PathFactory.create(name="CD", geom=LineString((2, 0), (2, 2)))
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
# Topology now covers 2 paths
self.assertEqual(len(topology.paths.all()), 2)
# AB and AB2 has one topology each
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(cb.aggregations.all()), 1)
# Topology position became proportional
aggr_ab = ab.aggregations.all()[0]
aggr_cb = cb.aggregations.all()[0]
self.assertEqual((1.0, 0.5), (aggr_ab.start_position, aggr_ab.end_position))
self.assertEqual((0.5, 0.0), (aggr_cb.start_position, aggr_cb.end_position))
topology.reload()
self.assertEqual(topology.geom, LineString((3.0, 0.0, 0.0), (2.0, 0.0, 0.0), (1.0, 0.0, 0.0)))
def test_split_tee_2(self):
"""
C
A +---+---=====--+ B
| A' B'
+ AB exists with topology A'B'.
D Add CD
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.5, end=0.75)
topogeom = topology.geom
# Topology covers 1 path
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(topology.paths.all()[0], ab)
PathFactory.create(name="CD", geom=LineString((1, 0), (1, 2)))
# CB was just created
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
# AB has no topology anymore
self.assertEqual(len(ab.aggregations.all()), 0)
# Topology now still covers 1 path, but the new one
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(cb.aggregations.all()), 1)
self.assertEqual(topology.paths.all()[0].pk, cb.pk)
topology.reload()
self.assertEqual(topology.geom, topogeom)
def test_split_tee_2_reversed(self):
"""
C
A +---+---=====--+ B
| A' B'
+ AB exists with topology A'B'.
D Add CD
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.75, end=0.5)
topogeom = topology.geom
# Topology covers 1 path
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(topology.paths.all()[0], ab)
PathFactory.create(name="CD", geom=LineString((1, 0), (1, 2)))
# CB was just created
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
# AB has no topology anymore
self.assertEqual(len(ab.aggregations.all()), 0)
# Topology now still covers 1 path, but the new one
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(cb.aggregations.all()), 1)
self.assertEqual(topology.paths.all()[0].pk, cb.pk)
topology.reload()
self.assertEqual(topology.geom, topogeom)
def test_split_tee_3(self):
"""
C
A +--=====--+---+ B
A' B' |
+ AB exists with topology A'B'.
D Add CD
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.3, end=0.6)
topogeom = topology.geom
# Topology covers 1 path
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(topology.paths.all()[0], ab)
PathFactory.create(name="CD", geom=LineString((3, 0), (3, 2)))
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
# CB does not have any
self.assertEqual(len(cb.aggregations.all()), 0)
# AB has still its topology
self.assertEqual(len(ab.aggregations.all()), 1)
# But start/end have changed
aggr_ab = ab.aggregations.all()[0]
self.assertEqual((0.4, 0.8), (aggr_ab.start_position, aggr_ab.end_position))
topology.reload()
self.assertEqual(topology.geom, topogeom)
def test_split_tee_3_reversed(self):
"""
C
A +--=====--+---+ B
A' B' |
+ AB exists with topology A'B'.
D Add CD
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.45, end=0.15)
# Topology covers 1 path
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(topology.paths.all()[0], ab)
PathFactory.create(name="CD", geom=LineString((3, 0), (3, 2)))
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
# CB does not have any
self.assertEqual(len(cb.aggregations.all()), 0)
# AB has still its topology
self.assertEqual(len(ab.aggregations.all()), 1)
# But start/end have changed
aggr_ab = ab.aggregations.all()[0]
self.assertEqual((0.6, 0.2), (aggr_ab.start_position, aggr_ab.end_position))
topology.reload()
self.assertEqual(topology.geom, LineString((1.7999999999999998, 0.0, 0.0), (0.5999999999999996, 0.0, 0.0)))
def test_split_tee_4(self):
"""
B C E
A +--===+===+===+===--+ F
|
+ AB, BE, EF exist. A topology exists along them.
D Add CD.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (2, 0)))
be = PathFactory.create(name="BE", geom=LineString((2, 0), (4, 0)))
ef = PathFactory.create(name="EF", geom=LineString((4, 0), (6, 0)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.5, end=1)
topology.add_path(be, start=0, end=1)
topology.add_path(ef, start=0.0, end=0.5)
topogeom = topology.geom
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(be.aggregations.all()), 1)
self.assertEqual(len(ef.aggregations.all()), 1)
self.assertEqual(len(topology.paths.all()), 3)
# Create CD
PathFactory.create(name="CD", geom=LineString((3, 0), (3, 2)))
# Topology now covers 4 paths
self.assertEqual(len(topology.paths.all()), 4)
# AB and EF have still their topology
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(ef.aggregations.all()), 1)
# BE and CE have one topology from 0.0 to 1.0
bc = Path.objects.filter(pk=be.pk)[0]
ce = Path.objects.filter(name="BE").exclude(pk=be.pk)[0]
self.assertEqual(len(bc.aggregations.all()), 1)
self.assertEqual(len(ce.aggregations.all()), 1)
aggr_bc = bc.aggregations.all()[0]
aggr_ce = ce.aggregations.all()[0]
self.assertEqual((0.0, 1.0), (aggr_bc.start_position, aggr_bc.end_position))
self.assertEqual((0.0, 1.0), (aggr_ce.start_position, aggr_ce.end_position))
topology.reload()
self.assertEqual(len(topology.aggregations.all()), 4)
# Geometry has changed
self.assertNotEqual(topology.geom, topogeom)
# But extremities are equal
self.assertEqual(topology.geom.coords[0], topogeom.coords[0])
self.assertEqual(topology.geom.coords[-1], topogeom.coords[-1])
def test_split_tee_4_reversed(self):
"""
B C E
A +--===+===+===+===--+ F
|
+ AB, BE, EF exist. A topology exists along them.
D Add CD.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (2, 0)))
be = PathFactory.create(name="BE", geom=LineString((4, 0), (2, 0)))
ef = PathFactory.create(name="EF", geom=LineString((4, 0), (6, 0)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.5, end=1)
topology.add_path(be, start=1, end=0)
topology.add_path(ef, start=0.0, end=0.5)
# Create DC
PathFactory.create(name="DC", geom=LineString((3, 0), (3, 2)))
# Topology now covers 4 paths
topology.reload()
self.assertEqual(len(topology.paths.all()), 4)
# BE and CE have one topology from 0.0 to 1.0
bc = Path.objects.filter(pk=be.pk)[0]
ce = Path.objects.filter(name="BE").exclude(pk=be.pk)[0]
aggr_ab = ab.aggregations.all()[0]
aggr_bc = bc.aggregations.all()[0]
aggr_ce = ce.aggregations.all()[0]
aggr_ef = ef.aggregations.all()[0]
self.assertEqual((0.5, 1.0), (aggr_ab.start_position, aggr_ab.end_position))
self.assertEqual((1.0, 0.0), (aggr_bc.start_position, aggr_bc.end_position))
self.assertEqual((1.0, 0.0), (aggr_ce.start_position, aggr_ce.end_position))
self.assertEqual((0.0, 0.5), (aggr_ef.start_position, aggr_ef.end_position))
topology.reload()
self.assertEqual(len(topology.aggregations.all()), 4)
# Geometry has changed
self.assertEqual(topology.geom, LineString((1.0, 0.0, 0.0), (2.0, 0.0, 0.0),
(3.0, 0.0, 0.0), (4.0, 0.0, 0.0),
(5.0, 0.0, 0.0)))
def test_split_twice(self):
"""
C D
+ +
| |
A +--==+===+==--+ B
| |
+---+
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.1, end=0.9)
topogeom = topology.geom
self.assertEqual(len(topology.paths.all()), 1)
PathFactory.create(name="CD", geom=LineString((1, 2), (1, -2),
(3, -2), (3, 2)))
self.assertEqual(len(topology.paths.all()), 3)
self.assertEqual(len(ab.aggregations.all()), 1)
aggr_ab = ab.aggregations.all()[0]
self.assertEqual((0.4, 1.0), (aggr_ab.start_position, aggr_ab.end_position))
ab2 = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
ab3 = Path.objects.filter(name="AB").exclude(pk__in=[ab.pk, ab2.pk])[0]
if ab2.geom.length < ab3.geom.length:
ab2, ab3 = ab3, ab2
aggr_ab2 = ab2.aggregations.all()[0]
aggr_ab3 = ab3.aggregations.all()[0]
self.assertEqual((0.0, 1.0), (aggr_ab2.start_position, aggr_ab2.end_position))
self.assertEqual((0.0, 0.6), (aggr_ab3.start_position, aggr_ab3.end_position))
topology.reload()
self.assertNotEqual(topology.geom, topogeom)
self.assertEqual(topology.geom.coords[0], topogeom.coords[0])
self.assertEqual(topology.geom.coords[-1], topogeom.coords[-1])
def test_split_twice_reversed(self):
"""
C D
+ +
| |
A +--==+===+==--+ B
| |
+---+
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.9, end=0.1, order=1)
self.assertEqual(len(topology.paths.all()), 1)
PathFactory.create(name="CD", geom=LineString((1, 2), (1, -2),
(3, -2), (3, 2)))
self.assertEqual(len(topology.paths.all()), 3)
self.assertEqual(len(ab.aggregations.all()), 1)
aggr_ab = ab.aggregations.all()[0]
self.assertEqual((1.0, 0.4), (aggr_ab.start_position, aggr_ab.end_position))
ab2 = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
ab3 = Path.objects.filter(name="AB").exclude(pk__in=[ab.pk, ab2.pk])[0]
aggr_ab2 = ab2.aggregations.all()[0]
aggr_ab3 = ab3.aggregations.all()[0]
if aggr_ab2.start_position == 1.0:
self.assertEqual((1.0, 0.0), (aggr_ab2.start_position, aggr_ab2.end_position))
self.assertEqual((0.6, 0.0), (aggr_ab3.start_position, aggr_ab3.end_position))
else:
# Depended on postgresql fetch order, `ab2` was actually `ab3`
self.assertEqual((1.0, 0.0), (aggr_ab3.start_position, aggr_ab3.end_position))
self.assertEqual((0.6, 0.0), (aggr_ab2.start_position, aggr_ab2.end_position))
topology.reload()
self.assertEqual(topology.geom, LineString((3.6000000000000001, 0), (3, 0),
(1.0, 0.0), (0.4, 0.0)))
def test_split_on_update(self):
""" + E
:
||
A +-----------+ B A +----++---+ B
||
C +-====-+ D C +--===+ D
"""
PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, -1), (4, -1)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(cd, start=0.3, end=0.9)
self.assertEqual(len(topology.paths.all()), 1)
cd.geom = LineString((0, -1), (2, -1), (2, 2))
cd.save()
cd2 = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(len(topology.paths.all()), 2)
self.assertEqual(len(cd.aggregations.all()), 1)
self.assertEqual(len(cd2.aggregations.all()), 1)
aggr_cd = cd.aggregations.all()[0]
aggr_cd2 = cd2.aggregations.all()[0]
self.assertEqual((0.5, 1.0), (aggr_cd.start_position, aggr_cd.end_position))
self.assertEqual((0.0, 0.75), (aggr_cd2.start_position, aggr_cd2.end_position))
def test_split_on_update_2(self):
""" + E
:
:
A +-----------+ B A +-----+---+ B
:
C +-==------+ D C +--===+ D
"""
PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, -1), (4, -1)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(cd, start=0.15, end=0.3)
self.assertEqual(len(topology.paths.all()), 1)
cd.geom = LineString((0, -1), (2, -1), (2, 2))
cd.save()
cd2 = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(cd.aggregations.all()), 1)
self.assertEqual(len(cd2.aggregations.all()), 0)
aggr_cd = cd.aggregations.all()[0]
self.assertEqual((0.25, 0.5), (aggr_cd.start_position, aggr_cd.end_position))
def test_split_on_update_3(self):
""" + E
||
||
A +-----------+ B A +-----+---+ B
:
C +------==-+ D C +-----+ D
"""
PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, -1), (4, -1)))
# Create a topology
topology = TopologyFactory.create(no_path=True)
topology.add_path(cd, start=0.7, end=0.85)
self.assertEqual(len(topology.paths.all()), 1)
cd.geom = LineString((0, -1), (2, -1), (2, 2))
cd.save()
cd2 = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(cd.aggregations.all()), 0)
self.assertEqual(len(cd2.aggregations.all()), 1)
aggr_cd2 = cd2.aggregations.all()[0]
self.assertEqual((0.25, 0.625), (aggr_cd2.start_position, aggr_cd2.end_position))
def test_split_on_return_topology(self):
"""
A B C D
+-------+-------+-------+
>=================+
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
bc = PathFactory.create(name="BC", geom=LineString((4, 0), (8, 0)))
cd = PathFactory.create(name="CD", geom=LineString((8, 0), (12, 0)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.5, end=1, order=1)
topology.add_path(bc, start=0, end=1, order=2)
topology.add_path(cd, start=0.0, end=0.5, order=3)
topology.add_path(cd, start=0.5, end=0.5, order=4)
topology.add_path(cd, start=0.5, end=0.0, order=5)
topology.add_path(bc, start=1, end=0, order=6)
topology.add_path(ab, start=1, end=0.5, order=7)
self.assertEqual(len(topology.aggregations.all()), 7)
topogeom = topology.geom
PathFactory.create(name="split", geom=LineString((9, -1), (9, 1)))
topology.reload()
self.assertItemsEqual(topology.aggregations.order_by('order').values_list('order', 'path__name'),
[(1, 'AB'), (2, 'BC'), (3, 'CD'), (3, 'CD'), (4, 'CD'),
(5, 'CD'), (5, 'CD'), (6, 'BC'), (7, 'AB')])
self.assertTrue(topology.geom.equals(topogeom))
def test_split_on_topology_with_offset(self):
"""
A B
+---------------+
>=======+
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
topology = TopologyFactory.create(no_path=True, offset=1)
topology.add_path(ab, start=0.25, end=0.75, order=1)
self.assertEqual(len(topology.aggregations.all()), 1)
topogeom = topology.geom
PathFactory.create(name="split", geom=LineString((2, -2), (2, 2)))
topology.reload()
self.assertItemsEqual(topology.aggregations.order_by('order').values_list('order', 'path__name'),
[(1, 'AB'), (1, 'AB')])
self.assertTrue(topology.geom.equals(topogeom))
def test_split_on_topology_with_offset_and_point(self):
"""
A B
+---------------+
>=======+
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (5, 0)))
topology = TopologyFactory.create(no_path=True, offset=1)
topology.add_path(ab, start=0.2, end=0.6, order=1)
topology.add_path(ab, start=0.6, end=0.6, order=2)
topology.add_path(ab, start=0.6, end=0.8, order=3)
self.assertEqual(len(topology.aggregations.all()), 3)
topogeom = topology.geom
PathFactory.create(name="split", geom=LineString((2, -2), (2, 2)))
topology.reload()
self.assertItemsEqual(topology.aggregations.order_by('order').values_list('order', 'path__name'),
[(1, 'AB'), (1, 'AB'), (2, 'AB'), (3, 'AB')])
self.assertTrue(topology.geom.equals(topogeom))
class SplitPathPointTopologyTest(TestCase):
def test_split_tee_1(self):
"""
C
A +-----X----+ B
|
+ AB exists with topology at C.
D Add CD.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.5, end=0.5)
self.assertEqual(len(topology.paths.all()), 1)
cd = PathFactory.create(geom=LineString((2, 0), (2, 2)))
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
self.assertEqual(len(topology.paths.all()), 3)
self.assertEqual(len(ab.aggregations.all()), 1)
aggr_ab = ab.aggregations.all()[0]
self.assertEqual(len(cb.aggregations.all()), 1)
aggr_cb = cb.aggregations.all()[0]
self.assertEqual(len(cd.aggregations.all()), 1)
aggr_cd = cd.aggregations.all()[0]
self.assertEqual((1.0, 1.0), (aggr_ab.start_position, aggr_ab.end_position))
self.assertEqual((0.0, 0.0), (aggr_cb.start_position, aggr_cb.end_position))
self.assertEqual((0.0, 0.0), (aggr_cd.start_position, aggr_cd.end_position))
def test_split_tee_2(self):
"""
C
A +--X--+----+ B
|
+ AB exists.
D Add CD.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.25, end=0.25)
self.assertEqual(len(topology.paths.all()), 1)
PathFactory.create(geom=LineString((2, 0), (2, 2)))
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(ab.aggregations.all()), 1)
aggr_ab = ab.aggregations.all()[0]
self.assertEqual((0.5, 0.5), (aggr_ab.start_position, aggr_ab.end_position))
def test_split_tee_3(self):
"""
C
A +-----+--X--+ B
|
+ AB exists.
D Add CD.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.75, end=0.75)
self.assertEqual(len(topology.paths.all()), 1)
PathFactory.create(geom=LineString((2, 0), (2, 2)))
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(ab.aggregations.all()), 0)
self.assertEqual(len(cb.aggregations.all()), 1)
aggr_cb = cb.aggregations.all()[0]
self.assertEqual((0.5, 0.5), (aggr_cb.start_position, aggr_cb.end_position))
def test_split_tee_4(self):
"""
C
A X-----+----+ B
|
+ AB exists.
D Add CD.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.0, end=0.0)
self.assertEqual(len(topology.paths.all()), 1)
PathFactory.create(geom=LineString((2, 0), (2, 2)))
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(cb.aggregations.all()), 0)
aggr_ab = ab.aggregations.all()[0]
self.assertEqual((0.0, 0.0), (aggr_ab.start_position, aggr_ab.end_position))
def test_split_tee_5(self):
"""
C
A +-----+----X B
|
+ AB exists.
D Add CD.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=1.0, end=1.0)
self.assertEqual(len(topology.paths.all()), 1)
PathFactory.create(name="CD", geom=LineString((2, 0), (2, 2)))
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(ab.aggregations.all()), 0)
self.assertEqual(len(cb.aggregations.all()), 1)
aggr_cb = cb.aggregations.all()[0]
self.assertEqual((1.0, 1.0), (aggr_cb.start_position, aggr_cb.end_position))
def test_split_tee_6(self):
"""
X
C
A +-----+-----+ B
|
+ AB exists. Add CD.
D Point with offset is now linked to AC.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (8, 0)))
poi = Point(1, 3, srid=settings.SRID)
poi.transform(settings.API_SRID)
topology = Topology.deserialize({'lat': poi.y, 'lng': poi.x})
aggr = topology.aggregations.all()[0]
position = topology.geom.coords
self.assertTrue(almostequal(3, topology.offset))
self.assertTrue(almostequal(0.125, aggr.start_position))
self.assertTrue(almostequal(0.125, aggr.end_position))
# Add CD
PathFactory.create(name="CD", geom=LineString((4, 0), (4, 2)))
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
aggr_ab = ab.aggregations.all()[0]
topology.reload()
self.assertTrue(almostequal(3, topology.offset))
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(cb.aggregations.all()), 0)
self.assertEqual(position, topology.geom.coords)
self.assertTrue(almostequal(0.5, aggr_ab.start_position))
self.assertTrue(almostequal(0.5, aggr_ab.end_position))
def test_split_tee_7(self):
"""
X
C
A +-----+-----+ B
|
+ AB exists. Add CD.
D Point with offset is now linked to CB.
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (8, 0)))
poi = Point(7, 3, srid=settings.SRID)
poi.transform(settings.API_SRID)
topology = Topology.deserialize({'lat': poi.y, 'lng': poi.x})
aggr = topology.aggregations.all()[0]
position = topology.geom.coords
self.assertTrue(almostequal(3, topology.offset))
self.assertTrue(almostequal(0.875, aggr.start_position))
self.assertTrue(almostequal(0.875, aggr.end_position))
# Add CD
PathFactory.create(name="CD", geom=LineString((4, 0), (4, 2)))
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
topology.reload()
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(ab.aggregations.all()), 0)
self.assertEqual(len(cb.aggregations.all()), 1)
self.assertTrue(almostequal(3, topology.offset), topology.offset)
self.assertEqual(position, topology.geom.coords)
aggr_cb = cb.aggregations.all()[0]
self.assertTrue(almostequal(0.75, aggr_cb.start_position))
self.assertTrue(almostequal(0.75, aggr_cb.end_position))
def test_split_on_update(self):
""" + D
:
:
A +-----------+ B A +-----X---+ B
:
C +---X---+ D C +----+
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, 1), (4, 1)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(cd, start=0.5, end=0.5)
self.assertEqual(len(topology.paths.all()), 1)
cd.geom = LineString((2, -2), (2, 2))
cd.save()
ab2 = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
cd2 = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(len(ab2.aggregations.all()), 1)
self.assertEqual(len(cd2.aggregations.all()), 1)
self.assertEqual(len(cd.aggregations.all()), 1)
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(topology.paths.all()), 4)
aggr_ab = ab.aggregations.all()[0]
aggr_ab2 = ab2.aggregations.all()[0]
aggr_cd = cd.aggregations.all()[0]
aggr_cd2 = cd2.aggregations.all()[0]
self.assertEqual((1.0, 1.0), (aggr_ab.start_position, aggr_ab.end_position))
self.assertEqual((0.0, 0.0), (aggr_ab2.start_position, aggr_ab2.end_position))
self.assertEqual((1.0, 1.0), (aggr_cd.start_position, aggr_cd.end_position))
self.assertEqual((0.0, 0.0), (aggr_cd2.start_position, aggr_cd2.end_position))
def test_split_on_update_2(self):
""" + D
:
:
A +-----------+ B A +-----+---+ B
:
C +-X-----+ D C +--X-+
"""
PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, 1), (4, 1)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(cd, start=0.25, end=0.25)
self.assertEqual(len(topology.paths.all()), 1)
cd.geom = LineString((2, -2), (2, 2))
cd.save()
cd2 = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(cd.aggregations.all()), 1)
self.assertEqual(len(cd2.aggregations.all()), 0)
aggr_cd = cd.aggregations.all()[0]
self.assertEqual((0.5, 0.5), (aggr_cd.start_position, aggr_cd.end_position))
def test_split_on_update_3(self):
""" + E
X
:
A +-----------+ B A +-----+---+ B
:
C +-----X-+ D C +----+ D
"""
PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, 1), (4, 1)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(cd, start=0.75, end=0.75)
self.assertEqual(len(topology.paths.all()), 1)
cd.geom = LineString((2, -2), (2, 2))
cd.save()
cd2 = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(cd.aggregations.all()), 0)
self.assertEqual(len(cd2.aggregations.all()), 1)
aggr_cd2 = cd2.aggregations.all()[0]
self.assertEqual((0.5, 0.5), (aggr_cd2.start_position, aggr_cd2.end_position))
def test_split_on_update_4(self):
""" + E
:
:
A +-----------+ B A +-----+---+ B
:
C X-------+ D C X----+ D
"""
PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, 1), (4, 1)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(cd, start=0.0, end=0.0)
self.assertEqual(len(topology.paths.all()), 1)
cd.geom = LineString((2, -2), (2, 2))
cd.save()
cd2 = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(cd.aggregations.all()), 1)
self.assertEqual(len(cd2.aggregations.all()), 0)
aggr_cd = cd.aggregations.all()[0]
self.assertEqual((0.0, 0.0), (aggr_cd.start_position, aggr_cd.end_position))
def test_split_on_update_5(self):
""" X E
:
:
A +-----------+ B A +-----+---+ B
:
C +-------X D C +----+ D
"""
PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, 1), (4, 1)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(cd, start=1.0, end=1.0)
self.assertEqual(len(topology.paths.all()), 1)
cd.geom = LineString((2, -2), (2, 2))
cd.save()
cd2 = Path.objects.filter(name="CD").exclude(pk=cd.pk)[0]
self.assertEqual(len(topology.paths.all()), 1)
self.assertEqual(len(cd.aggregations.all()), 0)
self.assertEqual(len(cd2.aggregations.all()), 1)
aggr_cd2 = cd2.aggregations.all()[0]
self.assertEqual((1.0, 1.0), (aggr_cd2.start_position, aggr_cd2.end_position))
def test_split_on_update_6(self):
"""
D
A +-----------+ B A +-----X---+ B
:
C +-------X D :
+
C
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, 1), (4, 1)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(cd, start=1.0, end=1.0)
self.assertEqual(len(topology.paths.all()), 1)
cd.geom = LineString((2, -2), (2, 0))
cd.save()
db = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(db.aggregations.all()), 1)
self.assertEqual(len(cd.aggregations.all()), 1)
self.assertEqual(len(topology.paths.all()), 3)
aggr_ab = ab.aggregations.all()[0]
aggr_db = db.aggregations.all()[0]
aggr_cd = cd.aggregations.all()[0]
self.assertEqual((1.0, 1.0), (aggr_ab.start_position, aggr_ab.end_position))
self.assertEqual((0.0, 0.0), (aggr_db.start_position, aggr_db.end_position))
self.assertEqual((1.0, 1.0), (aggr_cd.start_position, aggr_cd.end_position))
def test_split_on_update_7(self):
"""
C
A +-----------+ B A +-----X---+ B
:
C X-------+ D :
+ D
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0)))
cd = PathFactory.create(name="CD", geom=LineString((0, 1), (4, 1)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(cd, start=0.0, end=0.0)
self.assertEqual(len(topology.paths.all()), 1)
cd.geom = LineString((2, 0), (2, -2))
cd.save()
cb = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(cb.aggregations.all()), 1)
self.assertEqual(len(cd.aggregations.all()), 1)
self.assertEqual(len(topology.paths.all()), 3)
aggr_ab = ab.aggregations.all()[0]
aggr_cb = cb.aggregations.all()[0]
aggr_cd = cd.aggregations.all()[0]
self.assertEqual((1.0, 1.0), (aggr_ab.start_position, aggr_ab.end_position))
self.assertEqual((0.0, 0.0), (aggr_cb.start_position, aggr_cb.end_position))
self.assertEqual((0.0, 0.0), (aggr_cd.start_position, aggr_cd.end_position))
class SplitPathGenericTopologyTest(TestCase):
def test_add_simple_path(self):
"""
A +--== ==----+ C
\\ //
\\ //
==+==
B
Add path:
D E
A +--==+--------+==----+ C
\\ //
\\ //
==+==
B
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0),
(6, -2), (8, -2)))
bc = PathFactory.create(name="BC", geom=LineString((8, -2), (10, -2),
(12, 0), (14, 0)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.25, end=1.0)
topology.add_path(bc, start=0.0, end=0.75)
self.assertEqual(len(topology.paths.all()), 2)
originalgeom = LineString((2.2071067811865475, 0), (4, 0), (6, -2), (8, -2), (10, -2), (12, 0), (12.2928932188134521, 0))
self.assertEqual(topology.geom, originalgeom)
# Add a path
de = PathFactory.create(name="DE", geom=LineString((4, 0), (12, 0)))
self.assertEqual(len(Path.objects.all()), 5)
ab_2 = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
bc_2 = Path.objects.filter(name="BC").exclude(pk=bc.pk)[0]
# Topology aggregations were updated
topology.reload()
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(ab_2.aggregations.all()), 1)
self.assertEqual(len(bc.aggregations.all()), 1)
self.assertEqual(len(bc_2.aggregations.all()), 1)
self.assertEqual(len(de.aggregations.all()), 0)
aggr_ab = ab.aggregations.all()[0]
aggr_ab2 = ab_2.aggregations.all()[0]
aggr_bc = bc.aggregations.all()[0]
aggr_bc2 = bc_2.aggregations.all()[0]
self.assertEqual((0.551776695296637, 1.0), (aggr_ab.start_position, aggr_ab.end_position))
self.assertEqual((0.0, 1.0), (aggr_ab2.start_position, aggr_ab2.end_position))
self.assertEqual((0.0, 1.0), (aggr_bc.start_position, aggr_bc.end_position))
self.assertEqual((0.0, 0.146446609406726), (aggr_bc2.start_position, aggr_bc2.end_position))
# But topology resulting geometry did not change
self.assertEqual(topology.geom, originalgeom)
def test_add_path_converge(self):
"""
A +--== ==----+ C
\\ //
\\ //
==+==
B
Add path:
D E
A +--==+--------+==----+ C
\\ //
\\ //
==+==
B
"""
ab = PathFactory.create(name="AB", geom=LineString((0, 0), (4, 0),
(6, -2), (8, -2)))
cb = PathFactory.create(name="CB", geom=LineString((14, 0), (12, 0),
(10, -2), (8, -2)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(ab, start=0.25, end=1.0)
topology.add_path(cb, start=1.0, end=0.25)
self.assertEqual(len(topology.paths.all()), 2)
originalgeom = LineString((2.2071067811865475, 0), (4, 0), (6, -2), (8, -2), (10, -2), (12, 0), (12.2928932188134521, 0))
self.assertEqual(topology.geom, originalgeom)
# Add a path
de = PathFactory.create(name="DE", geom=LineString((4, 0), (12, 0)))
self.assertEqual(len(Path.objects.all()), 5)
ab_2 = Path.objects.filter(name="AB").exclude(pk=ab.pk)[0]
cb_2 = Path.objects.filter(name="CB").exclude(pk=cb.pk)[0]
# Topology aggregations were updated
topology.reload()
self.assertEqual(len(ab.aggregations.all()), 1)
self.assertEqual(len(ab_2.aggregations.all()), 1)
self.assertEqual(len(cb.aggregations.all()), 1)
self.assertEqual(len(cb_2.aggregations.all()), 1)
self.assertEqual(len(de.aggregations.all()), 0)
aggr_ab = ab.aggregations.all()[0]
aggr_ab2 = ab_2.aggregations.all()[0]
aggr_cb = cb.aggregations.all()[0]
aggr_cb2 = cb_2.aggregations.all()[0]
self.assertEqual((0.551776695296637, 1.0), (aggr_ab.start_position, aggr_ab.end_position))
self.assertEqual((0.0, 1.0), (aggr_ab2.start_position, aggr_ab2.end_position))
self.assertEqual((1.0, 0.0), (aggr_cb2.start_position, aggr_cb2.end_position))
self.assertEqual((1.0, 0.853553390593274), (aggr_cb.start_position, aggr_cb.end_position))
# But topology resulting geometry did not change
self.assertEqual(topology.geom, originalgeom)
def test_add_path_diverge(self):
"""
A +--== ==----+ C
\\ //
\\ //
==+==
B
Add path:
D E
A +--==+--------+==----+ C
\\ //
\\ //
==+==
B
"""
ba = PathFactory.create(name="BA", geom=LineString((8, -2), (6, -2),
(4, 0), (0, 0)))
bc = PathFactory.create(name="BC", geom=LineString((8, -2), (10, -2),
(12, 0), (14, 0)))
topology = TopologyFactory.create(no_path=True)
topology.add_path(ba, start=0.75, end=0.0, order=1)
topology.add_path(bc, start=0.0, end=0.75, order=2)
self.assertEqual(len(topology.paths.all()), 2)
originalgeom = LineString((2.2071067811865475, 0), (4, 0), (6, -2), (8, -2), (10, -2), (12, 0), (12.2928932188134521, 0))
self.assertEqual(topology.geom, originalgeom)
# Add a path
de = PathFactory.create(name="DE", geom=LineString((4, 0), (12, 0)))
self.assertEqual(len(Path.objects.all()), 5)
ba_2 = Path.objects.filter(name="BA").exclude(pk=ba.pk)[0]
bc_2 = Path.objects.filter(name="BC").exclude(pk=bc.pk)[0]
# Topology aggregations were updated
topology.reload()
self.assertEqual(len(ba.aggregations.all()), 1)
self.assertEqual(len(ba_2.aggregations.all()), 1)
self.assertEqual(len(bc.aggregations.all()), 1)
self.assertEqual(len(bc_2.aggregations.all()), 1)
self.assertEqual(len(de.aggregations.all()), 0)
aggr_ba = ba.aggregations.all()[0]
aggr_ba2 = ba_2.aggregations.all()[0]
aggr_bc = bc.aggregations.all()[0]
aggr_bc2 = bc_2.aggregations.all()[0]
self.assertEqual((0.448223304703363, 0.0), (aggr_ba2.start_position, aggr_ba2.end_position))
self.assertEqual((1.0, 0.0), (aggr_ba.start_position, aggr_ba.end_position))
self.assertEqual((0.0, 1.0), (aggr_bc.start_position, aggr_bc.end_position))
self.assertEqual((0.0, 0.146446609406726), (aggr_bc2.start_position, aggr_bc2.end_position))
# But topology resulting geometry did not change
originalgeom = LineString((2.2071067811865470, 0), *originalgeom[1:])
self.assertEqual(topology.geom, originalgeom)
| 42.988321
| 129
| 0.518151
| 7,262
| 58,894
| 4.118149
| 0.034701
| 0.14696
| 0.098107
| 0.044239
| 0.925199
| 0.908881
| 0.886043
| 0.857052
| 0.821909
| 0.803651
| 0
| 0.056948
| 0.309166
| 58,894
| 1,369
| 130
| 43.019722
| 0.678096
| 0.137875
| 0
| 0.701135
| 0
| 0
| 0.009619
| 0
| 0
| 0
| 0
| 0
| 0.398487
| 1
| 0.061791
| false
| 0
| 0.007566
| 0
| 0.074401
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b45fd4f9ef4a6e00f5ddef5c44f3a22a71897762
| 41,665
|
py
|
Python
|
tests/test_templates.py
|
ldflo/autojinja
|
bc126da3b77dd89e88b697a38b2b4ea2cc45f6af
|
[
"BSD-3-Clause"
] | 6
|
2022-01-02T17:28:13.000Z
|
2022-01-18T20:34:53.000Z
|
tests/test_templates.py
|
ldflo/autojinja
|
bc126da3b77dd89e88b697a38b2b4ea2cc45f6af
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_templates.py
|
ldflo/autojinja
|
bc126da3b77dd89e88b697a38b2b4ea2cc45f6af
|
[
"BSD-3-Clause"
] | null | null | null |
import autojinja
import os
import tempfile
class CustomException(Exception):
def __init__(self, result, expected):
result = str(result).replace('\t', "\\t").replace('\n', "\\n\n")
expected = str(expected).replace('\t', "\\t").replace('\n', "\\n\n")
message = f"--- Expected ---\n{expected}\\0\n--- Got ---\n{result}\\0"
super().__init__(message)
settingsRemoveMarkers = autojinja.ParserSettings(remove_markers = True)
settingsPreserveMarkers = autojinja.ParserSettings(remove_markers = False)
tmp = tempfile.TemporaryDirectory()
root = autojinja.path[tmp.name]
input_file = root.join("input.txt")
output_file = root.join("output.txt")
### RawTemplate
class Generator_RawTemplate:
def render(template, expected, args, kwargs):
result = template.context(*args, **kwargs).render()
if result != expected:
raise CustomException(result, expected)
def render_file(template, expected, output, encoding, newline, args, kwargs):
result = template.context(*args, **kwargs).render_file(output, encoding, newline)
if result != expected:
raise CustomException(result, expected)
encoding = encoding or template.encoding
newline = newline or template.newline
with open(output_file, 'r', encoding = encoding, newline = newline) as f:
content = f.read()
if os.name == "nt": # Windows
result = result.replace('\n', newline or '\n')
if content != result:
raise CustomException(content, result)
def check(input, expected, *args, **kwargs):
with open(input_file, 'w') as f:
f.write(input)
### Output
template = autojinja.RawTemplate.from_file(input_file, output_file, None, None, None)
Generator_RawTemplate.render(template, expected, args, kwargs)
template = autojinja.RawTemplate.from_string(input, None, None, None, None)
Generator_RawTemplate.render_file(template, expected, output_file, None, None, args, kwargs)
### Encoding / Newline
template = autojinja.RawTemplate.from_file(input_file, output_file, "ascii", "\r\n", None)
Generator_RawTemplate.render(template, expected, args, kwargs)
template = autojinja.RawTemplate.from_string(input, output_file, None, None, None)
Generator_RawTemplate.render_file(template, expected, None, "ascii", "\r\n", args, kwargs)
### Globals
template = autojinja.RawTemplate.from_file(input_file, output_file, None, None, kwargs)
Generator_RawTemplate.render(template, expected, (), {})
template = autojinja.RawTemplate.from_string(input, output_file, None, None, None)
Generator_RawTemplate.render_file(template, expected, None, None, None, args, kwargs)
### CogTemplate / JinjaTemplate
class Generator:
def render(template, output, expected, remove_markers, args, kwargs):
result = template.context(*args, **kwargs).render(output, remove_markers)
if result != expected:
raise CustomException(result, expected)
def render_file(template, output, expected, remove_markers, encoding, newline, args, kwargs):
result = template.context(*args, **kwargs).render_file(output, remove_markers, encoding, newline)
if result != expected:
raise CustomException(result, expected)
encoding = encoding or template.encoding
newline = newline or template.newline
with open(output_file, 'r', encoding = encoding, newline = newline) as f:
content = f.read()
result = result.replace('\n', newline or '\n')
if content != result:
raise CustomException(content, result)
def check(class_type, input, output, expected, remove_markers, *args, **kwargs):
def prepare():
if output_file.exists:
os.remove(output_file)
if output != None:
with open(output_file, 'w') as f:
f.write(output)
with open(input_file, 'w') as f:
f.write(input)
### Output
template = class_type.from_file(input_file, None, None, remove_markers, None, None, None)
prepare(); Generator.render(template, output, expected, None, args, kwargs)
template = class_type.from_string(input, None, None, remove_markers, None, None, None)
prepare(); Generator.render_file(template, output_file, expected, None, None, None, args, kwargs)
### Settings
template = class_type.from_file(input_file, None, autojinja.ParserSettings(), remove_markers, None, None, None)
prepare(); Generator.render(template, output, expected, None, args, kwargs)
template = class_type.from_string(input, output_file, autojinja.ParserSettings(), remove_markers, None, None, None)
prepare(); Generator.render_file(template, None, expected, None, None, None, args, kwargs)
### Remove markers
template = class_type.from_file(input_file, None, None, not remove_markers if remove_markers else None, None, None, None)
prepare(); Generator.render(template, output, expected, remove_markers, args, kwargs)
template = class_type.from_string(input, output_file, None, None, None, None, None)
prepare(); Generator.render_file(template, None, expected, remove_markers, None, None, args, kwargs)
### Encoding / Newline
template = class_type.from_file(input_file, None, None, remove_markers, "ascii", "\r\n", None)
prepare(); Generator.render(template, output, expected, None, args, kwargs)
template = class_type.from_string(input, output_file, None, remove_markers, None, None, None)
prepare(); Generator.render_file(template, None, expected, None, "ascii", "\r\n", args, kwargs)
### Globals
template = class_type.from_file(input_file, None, None, remove_markers, None, None, kwargs)
prepare(); Generator.render(template, output, expected, None, (), {})
template = class_type.from_string(input, output_file, None, remove_markers, None, None, None)
prepare(); Generator.render_file(template, None, expected, None, None, None, args, kwargs)
class Test_RawTemplate:
def test_1(self):
input = " std::cout << {{ var }} << std::endl; "
expected = " std::cout << \"Hello world\" << std::endl; "
Generator_RawTemplate.check(input, expected, var = "\"Hello world\"")
def test_2(self):
input = " {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}"
expected = " result : var1\n" \
" result : var2\n"
Generator_RawTemplate.check(input, expected, list = ["var1", "var2"])
def test_3(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]"
expected = " // [[[ \"Hello world\" ]]]\n" \
" // [[[ end ]]]"
Generator_RawTemplate.check(input, expected, var = "\"Hello world\"")
def test_4(self):
input = " // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = " // <<[ \"Hello world\" ]>>\n" \
" // <<[ end ]>>"
Generator_RawTemplate.check(input, expected, var = "\"Hello world\"")
def test_5(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = " // [[[ \"Hello world\" ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ \"Hello world\" ]>>\n" \
" // <<[ end ]>>"
Generator_RawTemplate.check(input, expected, var = "\"Hello world\"")
def test_6(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]"
expected = " [[[\n" \
" <<[ \"Hello world\" ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]"
Generator_RawTemplate.check(input, expected, var = "\"Hello world\"")
def test_7(self):
input = " // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" // [[[ end ]]]"
expected = " // [[[\n" \
" result : var1\n" \
" result : var2\n" \
" // ]]]\n" \
" // [[[ end ]]]"
Generator_RawTemplate.check(input, expected, list = ["var1", "var2"])
class Test_CogTemplate:
def test_newfile(self):
if output_file.exists:
os.remove(output_file)
with open(input_file, 'w') as f:
f.write("test")
template = autojinja.CogTemplate.from_file(input_file)
output = template.render_file(output_file)
assert output == "test"
def test_1(self):
input = " std::cout << {{ var }} << std::endl; "
expected = input
Generator.check(autojinja.CogTemplate, input, None, expected, None, var = "\"Hello world\"")
def test_1_output(self):
input = " std::cout << {{ var }} << std::endl; "
expected = input
output = "Test"
Generator.check(autojinja.CogTemplate, input, output, expected, None, var = "\"Hello world\"")
def test_1_remove_markers(self):
input = " std::cout << {{ var }} << std::endl; "
expected = input
Generator.check(autojinja.CogTemplate, input, None, expected, True, var = "\"Hello world\"")
def test_2(self):
input = " {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}"
expected = input
Generator.check(autojinja.CogTemplate, input, None, expected, None, list = ["var1", "var2"])
def test_2_output(self):
input = " {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}"
expected = input
output = "Test"
Generator.check(autojinja.CogTemplate, input, output, expected, None, list = ["var1", "var2"])
def test_2_remove_markers(self):
input = " {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}"
expected = input
Generator.check(autojinja.CogTemplate, input, None, expected, True, list = ["var1", "var2"])
def test_3(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]"
Generator.check(autojinja.CogTemplate, input, None, expected, None, var = "\"Hello world\"")
def test_3_output(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]"
output = "Test"
Generator.check(autojinja.CogTemplate, input, output, expected, None, var = "\"Hello world\"")
def test_3_remove_markers(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]"
expected = " \"Hello world\"\n"
Generator.check(autojinja.CogTemplate, input, None, expected, True, var = "\"Hello world\"")
def test_4(self):
input = " // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = input
Generator.check(autojinja.CogTemplate, input, None, expected, None, var = "\"Hello world\"")
def test_4_output(self):
input = " // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = " // <<[ {{ var }} ]>>\n" \
" Test\n" \
" // <<[ end ]>>"
output = "<<[ {{ var }} ]>>\n" \
" Test\n" \
"<<[ end ]>>"
Generator.check(autojinja.CogTemplate, input, output, expected, None, var = "\"Hello world\"")
def test_4_remove_markers(self):
input = " // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = ""
Generator.check(autojinja.CogTemplate, input, None, expected, True, var = "\"Hello world\"")
def test_5(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
Generator.check(autojinja.CogTemplate, input, None, expected, None, var = "\"Hello world\"")
def test_5_output(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" Test\n" \
" // <<[ end ]>>"
output = "<<[ {{ var }} ]>>\n" \
" Test\n" \
"<<[ end ]>>"
Generator.check(autojinja.CogTemplate, input, output, expected, None, var = "\"Hello world\"")
def test_5_remove_markers(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = " \"Hello world\"\n" \
"var\n"
Generator.check(autojinja.CogTemplate, input, None, expected, True, var = "\"Hello world\"")
def test_6(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
expected = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" <<[ \"Hello world\" ]>>\n" \
" <<[ end ]>>\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
Generator.check(autojinja.CogTemplate, input, None, expected, None, var = "\"Hello world\"", tmp = "test")
def test_6_output(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
expected = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" <<[ \"Hello world\" ]>>\n" \
" Test\n" \
" <<[ end ]>>\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
output = "<<[ \"Hello world\" ]>> Test <<[ end ]>>"
Generator.check(autojinja.CogTemplate, input, output, expected, None, var = "\"Hello world\"", tmp = "test")
def test_6_remove_markers(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
expected = " Test\n" \
" {{ tmp }}"
output = "<<[ \"Hello world\" ]>> Test <<[ end ]>>"
Generator.check(autojinja.CogTemplate, input, output, expected, True, var = "\"Hello world\"", tmp = "test")
def test_7(self):
input = " {{ tmp }}\n" \
" // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" // [[[ end ]]]"
expected = " {{ tmp }}\n" \
" // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" result : var1\n" \
" result : var2\n" \
" // [[[ end ]]]"
Generator.check(autojinja.CogTemplate, input, None, expected, None, list = ["var1", "var2"], tmp = "test")
def test_7_output(self):
input = " {{ tmp }}\n" \
" // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" // [[[ end ]]]"
expected = " {{ tmp }}\n" \
" // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" result : var1\n" \
" result : var2\n" \
" // [[[ end ]]]"
output = "Test"
Generator.check(autojinja.CogTemplate, input, output, expected, None, list = ["var1", "var2"], tmp = "test")
def test_7_remove_markers(self):
input = " {{ tmp }}\n" \
" // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" // [[[ end ]]]"
expected = " {{ tmp }}\n" \
" result : var1\n" \
" result : var2\n"
Generator.check(autojinja.CogTemplate, input, None, expected, True, list = ["var1", "var2"], tmp = "test")
def test_8(self):
input = "[[[\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"]]]\n" \
"[[[ end ]]]"
expected = "[[[\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"]]]\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"[[[ end ]]]"
Generator.check(autojinja.CogTemplate, input, None, expected, None, var = "\"Hello world\"")
def test_8_output(self):
input = "[[[\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"]]]\n" \
"[[[ end ]]]"
expected = "[[[\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"]]]\n" \
" <<[ a ]>>\n" \
" [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" [[[ end ]]]\n" \
" <<[ end ]>>\n" \
"[[[ end ]]]"
output = "[[[ ]]]\n" \
" <<[ a ]>>\n" \
" [[[ {{ var }} ]]]\n" \
" [[[ end ]]]\n" \
" <<[ end ]>>\n" \
"[[[ end ]]]"
Generator.check(autojinja.CogTemplate, input, output, expected, None, var = "\"Hello world\"")
def test_8_remove_markers(self):
input = "[[[\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"]]]\n" \
"[[[ end ]]]"
expected = " \"Hello world\"\n"
output = "[[[ ]]]\n" \
" <<[ a ]>>\n" \
" [[[ {{ var }} ]]]\n" \
" [[[ end ]]]\n" \
" <<[ end ]>>\n" \
"[[[ end ]]]"
Generator.check(autojinja.CogTemplate, input, output, expected, True, var = "\"Hello world\"")
def test_9(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
expected = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" <<[ \"Hello world\" ]>>\n" \
" azerty\n" \
" <<[ end ]>>\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
template = autojinja.CogTemplate.from_string(input)
template.edits = { "\"Hello world\"":"azerty" }
result = template.context(var = "\"Hello world\"", tmp = "test").render()
if result != expected:
raise CustomException(result, expected)
def test_10(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
expected = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" <<[ \"Hello world\" ]>>\n" \
" azerty\n" \
" <<[ end ]>>\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
output = "<<[ \"Hello world\" ]>> Test <<[ end ]>>"
template = autojinja.CogTemplate.from_string(input)
template.edits = { "\"Hello world\"":"azerty" }
result = template.context(var = "\"Hello world\"", tmp = "test").render(output)
if result != expected:
raise CustomException(result, expected)
def test_11(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]"
with open(input_file, 'w') as f:
f.write(input)
template = autojinja.CogTemplate.from_file(input_file)
result = template.context(var = "\"Hello world\"").render_file()
if result != expected:
raise CustomException(result, expected)
with open(input_file, 'r') as f:
content = f.read()
if content != result:
raise CustomException(content, result)
def test_12(self):
input = "<<[ abc ]>>\n" \
"test1\n" \
"<<[ end ]>>\n" \
"<<[ def ]>>\n" \
"test2\n" \
"<<[ end ]>>"
expected = "<<[ abc ]>>\n" \
"dummy\n" \
"<<[ end ]>>\n" \
"<<[ def ]>>\n" \
"test2\n" \
"<<[ end ]>>"
output = "<<[ abc ]>> dummy <<[ end ]>>"
template = autojinja.CogTemplate.from_string(input)
dummy = template.edits
result = template.context().render(output)
if result != expected:
raise CustomException(result, expected)
class Test_JinjaTemplate:
def test_newfile(self):
if output_file.exists:
os.remove(output_file)
with open(input_file, 'w') as f:
f.write("test")
template = autojinja.JinjaTemplate.from_file(input_file)
output = template.render_file(output_file)
assert output == "test"
def test_1(self):
input = " std::cout << {{ var }} << std::endl; "
expected = " std::cout << \"Hello world\" << std::endl; "
Generator.check(autojinja.JinjaTemplate, input, None, expected, None, var = "\"Hello world\"")
def test_1_output(self):
input = " std::cout << {{ var }} << std::endl; "
expected = " std::cout << \"Hello world\" << std::endl; "
output = "Test"
Generator.check(autojinja.JinjaTemplate, input, output, expected, None, var = "\"Hello world\"")
def test_1_remove_markers(self):
input = " std::cout << {{ var }} << std::endl; "
expected = " std::cout << \"Hello world\" << std::endl; "
Generator.check(autojinja.JinjaTemplate, input, None, expected, True, var = "\"Hello world\"")
def test_2(self):
input = " {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}"
expected = " result : var1\n" \
" result : var2\n"
Generator.check(autojinja.JinjaTemplate, input, None, expected, None, list = ["var1", "var2"])
def test_2_output(self):
input = " {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}"
expected = " result : var1\n" \
" result : var2\n"
output = "Test"
Generator.check(autojinja.JinjaTemplate, input, output, expected, None, list = ["var1", "var2"])
def test_2_remove_markers(self):
input = " {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}"
expected = " result : var1\n" \
" result : var2\n"
Generator.check(autojinja.JinjaTemplate, input, None, expected, True, list = ["var1", "var2"])
def test_3(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]"
Generator.check(autojinja.JinjaTemplate, input, None, expected, None, var = "\"Hello world\"")
def test_3_output(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]"
output = "Test"
Generator.check(autojinja.JinjaTemplate, input, output, expected, None, var = "\"Hello world\"")
def test_3_remove_markers(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]"
expected = " \"Hello world\"\n"
Generator.check(autojinja.JinjaTemplate, input, None, expected, True, var = "\"Hello world\"")
def test_4(self):
input = " // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = input
Generator.check(autojinja.JinjaTemplate, input, None, expected, None, var = "\"Hello world\"")
def test_4_output(self):
input = " // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = " // <<[ {{ var }} ]>>\n" \
" Test\n" \
" // <<[ end ]>>"
output = "<<[ {{ var }} ]>>\n" \
" Test\n" \
"<<[ end ]>>"
Generator.check(autojinja.JinjaTemplate, input, output, expected, None, var = "\"Hello world\"")
def test_4_remove_markers(self):
input = " // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = ""
Generator.check(autojinja.JinjaTemplate, input, None, expected, True, var = "\"Hello world\"")
def test_5(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
Generator.check(autojinja.JinjaTemplate, input, None, expected, None, var = "\"Hello world\"")
def test_5_output(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" Test\n" \
" // <<[ end ]>>"
output = "<<[ {{ var }} ]>>\n" \
" Test\n" \
"<<[ end ]>>"
Generator.check(autojinja.JinjaTemplate, input, output, expected, None, var = "\"Hello world\"")
def test_5_remove_markers(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" // <<[ {{ var }} ]>>\n" \
" // <<[ end ]>>"
expected = " \"Hello world\"\n" \
"var\n"
Generator.check(autojinja.JinjaTemplate, input, None, expected, True, var = "\"Hello world\"")
def test_6(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
expected = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" <<[ \"Hello world\" ]>>\n" \
" <<[ end ]>>\n" \
" [[[ end ]]]\n" \
" test"
Generator.check(autojinja.JinjaTemplate, input, None, expected, None, var = "\"Hello world\"", tmp = "test")
def test_6_output(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
expected = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" <<[ \"Hello world\" ]>>\n" \
" Test\n" \
" <<[ end ]>>\n" \
" [[[ end ]]]\n" \
" test"
output = "<<[ \"Hello world\" ]>> Test <<[ end ]>>"
Generator.check(autojinja.JinjaTemplate, input, output, expected, None, var = "\"Hello world\"", tmp = "test")
def test_6_remove_markers(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
expected = " Test\n" \
" test"
output = "<<[ \"Hello world\" ]>> Test <<[ end ]>>"
Generator.check(autojinja.JinjaTemplate, input, output, expected, True, var = "\"Hello world\"", tmp = "test")
def test_7(self):
input = " {{ tmp }}\n" \
" // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" // [[[ end ]]]"
expected = " test\n" \
" // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" result : var1\n" \
" result : var2\n" \
" // [[[ end ]]]"
Generator.check(autojinja.JinjaTemplate, input, None, expected, None, list = ["var1", "var2"], tmp = "test")
def test_7_output(self):
input = " {{ tmp }}\n" \
" // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" // [[[ end ]]]"
expected = " test\n" \
" // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" result : var1\n" \
" result : var2\n" \
" // [[[ end ]]]"
output = "Test"
Generator.check(autojinja.JinjaTemplate, input, output, expected, None, list = ["var1", "var2"], tmp = "test")
def test_7_remove_markers(self):
input = " {{ tmp }}\n" \
" // [[[\n" \
" {% for var in list %}\n" \
" result : {{ var }}\n" \
" {% endfor %}\n" \
" // ]]]\n" \
" // [[[ end ]]]"
expected = " test\n" \
" result : var1\n" \
" result : var2\n"
Generator.check(autojinja.JinjaTemplate, input, None, expected, True, list = ["var1", "var2"], tmp = "test")
def test_8(self):
input = "[[[\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"]]]\n" \
"[[[ end ]]]"
expected = "[[[\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"]]]\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"[[[ end ]]]"
Generator.check(autojinja.JinjaTemplate, input, None, expected, None, var = "\"Hello world\"")
def test_8_output(self):
input = "[[[\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"]]]\n" \
"[[[ end ]]]"
expected = "[[[\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"]]]\n" \
" <<[ a ]>>\n" \
" [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" [[[ end ]]]\n" \
" <<[ end ]>>\n" \
"[[[ end ]]]"
output = "[[[ ]]]\n" \
" <<[ a ]>>\n" \
" [[[ {{ var }} ]]]\n" \
" [[[ end ]]]\n" \
" <<[ end ]>>\n" \
"[[[ end ]]]"
Generator.check(autojinja.JinjaTemplate, input, output, expected, None, var = "\"Hello world\"")
def test_8_remove_markers(self):
input = "[[[\n" \
" <<[ a ]>>\n" \
" <<[ end ]>>\n" \
"]]]\n" \
"[[[ end ]]]"
expected = " \"Hello world\"\n"
output = "[[[ ]]]\n" \
" <<[ a ]>>\n" \
" [[[ {{ var }} ]]]\n" \
" [[[ end ]]]\n" \
" <<[ end ]>>\n" \
"[[[ end ]]]"
Generator.check(autojinja.JinjaTemplate, input, output, expected, True, var = "\"Hello world\"")
def test_9(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
expected = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" <<[ \"Hello world\" ]>>\n" \
" azerty\n" \
" <<[ end ]>>\n" \
" [[[ end ]]]\n" \
" test"
template = autojinja.JinjaTemplate.from_string(input)
template.edits = { "\"Hello world\"":"azerty" }
result = template.context(var = "\"Hello world\"", tmp = "test").render()
if result != expected:
raise CustomException(result, expected)
def test_10(self):
input = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" [[[ end ]]]\n" \
" {{ tmp }}"
expected = " [[[\n" \
" <<[ {{ var }} ]>>\n" \
" <<[ end ]>>\n" \
" ]]]\n" \
" <<[ \"Hello world\" ]>>\n" \
" azerty\n" \
" <<[ end ]>>\n" \
" [[[ end ]]]\n" \
" test"
output = "<<[ \"Hello world\" ]>> Test <<[ end ]>>"
template = autojinja.JinjaTemplate.from_string(input)
template.edits = { "\"Hello world\"":"azerty" }
result = template.context(var = "\"Hello world\"", tmp = "test").render(output)
if result != expected:
raise CustomException(result, expected)
def test_11(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" {{ var2 }}"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]\n" \
"var\n" \
" [[[ test ]]] test [[[ end ]]]"
Generator.check(autojinja.JinjaTemplate, input, None, expected, None, var = "\"Hello world\"", var2 = "[[[ test ]]][[[ end ]]]")
def test_11_output(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" {{ var2 }}"
expected = " // [[[ {{ var }} ]]]\n" \
" \"Hello world\"\n" \
" // [[[ end ]]]\n" \
"var\n" \
" [[[ test ]]] test [[[ end ]]]"
output = "Test"
Generator.check(autojinja.JinjaTemplate, input, output, expected, None, var = "\"Hello world\"", var2 = "[[[ test ]]][[[ end ]]]")
def test_11_remove_markers(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]\n" \
"var\n" \
" {{ var2 }}"
expected = " \"Hello world\"\n" \
"var\n" \
" test"
Generator.check(autojinja.JinjaTemplate, input, None, expected, True, var = "\"Hello world\"", var2 = "[[[ test ]]][[[ end ]]]")
def test_12(self):
input = "{{ var2 }} [[[ {{ var }} ]]] [[[ end ]]] {{ var2 }} [[[ {{ var }} ]]] [[[ end ]]]"
expected = "[[[ test ]]] test [[[ end ]]] [[[ {{ var }} ]]] \"Hello world\" [[[ end ]]] [[[ test ]]] test [[[ end ]]] [[[ {{ var }} ]]] \"Hello world\" [[[ end ]]]"
Generator.check(autojinja.JinjaTemplate, input, None, expected, None, var = "\"Hello world\"", var2 = "[[[ test ]]][[[ end ]]]")
def test_12_output(self):
input = "{{ var2 }} [[[ {{ var }} ]]] [[[ end ]]] {{ var2 }} [[[ {{ var }} ]]] [[[ end ]]]"
expected = "[[[ test ]]] test [[[ end ]]] [[[ {{ var }} ]]] \"Hello world\" [[[ end ]]] [[[ test ]]] test [[[ end ]]] [[[ {{ var }} ]]] \"Hello world\" [[[ end ]]]"
output = "Test"
Generator.check(autojinja.JinjaTemplate, input, output, expected, None, var = "\"Hello world\"", var2 = "[[[ test ]]][[[ end ]]]")
def test_12_remove_markers(self):
input = "{{ var2 }} [[[ {{ var }} ]]] [[[ end ]]] {{ var2 }} [[[ {{ var }} ]]] [[[ end ]]]"
expected = "test \"Hello world\" test \"Hello world\""
Generator.check(autojinja.JinjaTemplate, input, None, expected, True, var = "\"Hello world\"", var2 = "[[[ test ]]][[[ end ]]]")
def test_13(self):
input = " // [[[ {{ var }} ]]]\n" \
" // [[[ end ]]]"
with open(input_file, 'w') as f:
f.write(input)
try:
template = autojinja.JinjaTemplate.from_file(input_file)
template.context(var = "\"Hello world\"").render_file()
except BaseException as e:
exception = e
else:
exception = None
if exception == None:
raise CustomException(None, AssertionError)
def test_14(self):
input = "<<[ abc ]>>\n" \
"test1\n" \
"<<[ end ]>>\n" \
"<<[ def ]>>\n" \
"test2\n" \
"<<[ end ]>>"
expected = "<<[ abc ]>>\n" \
"dummy\n" \
"<<[ end ]>>\n" \
"<<[ def ]>>\n" \
"test2\n" \
"<<[ end ]>>"
output = "<<[ abc ]>> dummy <<[ end ]>>"
template = autojinja.JinjaTemplate.from_string(input)
dummy = template.edits
result = template.context().render(output)
if result != expected:
raise CustomException(result, expected)
| 44.230361
| 172
| 0.381015
| 3,423
| 41,665
| 4.560327
| 0.033888
| 0.042024
| 0.029148
| 0.035874
| 0.938309
| 0.929148
| 0.919347
| 0.890006
| 0.867008
| 0.848943
| 0
| 0.006383
| 0.420953
| 41,665
| 941
| 173
| 44.277365
| 0.640637
| 0.003336
| 0
| 0.890395
| 0
| 0.00339
| 0.236179
| 0.000554
| 0
| 0
| 0
| 0
| 0.00339
| 1
| 0.089266
| false
| 0
| 0.00339
| 0
| 0.099435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3380f3b7e416d4df932f08399007b42ef363b1b
| 11,046
|
py
|
Python
|
utils.py
|
gaurav71531/bird-acoustics-rcnn
|
0a5ea9f4850cf7f8b84bce973633f083edf752f8
|
[
"MIT"
] | 7
|
2021-07-01T12:48:01.000Z
|
2022-03-16T19:01:19.000Z
|
utils.py
|
gaurav71531/bird-acoustics-rcnn
|
0a5ea9f4850cf7f8b84bce973633f083edf752f8
|
[
"MIT"
] | null | null | null |
utils.py
|
gaurav71531/bird-acoustics-rcnn
|
0a5ea9f4850cf7f8b84bce973633f083edf752f8
|
[
"MIT"
] | 2
|
2021-08-21T18:54:46.000Z
|
2021-09-14T18:38:43.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import torch
import torch.nn as nn
from torch import Tensor
import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data as data_utils
import numpy as np
import librosa
from joblib import Parallel, delayed
import multiprocessing
import h5py
import pickle as pk
import cv2
import os
import matplotlib.pyplot as plt
from fastprogress.fastprogress import progress_bar
class log_results(object):
def __init__(self, file_name = 'log', results_dir = 'Results'):
self.results_dir = results_dir
self.fname = file_name
if not os.path.exists(self.results_dir):
os.makedirs(results_dir)
def update(self, log):
file_path = os.path.join(self.results_dir, self.fname)
if isinstance(log, dict):
pk.dump(log, open(file_path, 'ab'))
else:
print('log has to be in dictionary format')
class SaveBestModel(object):
def __init__(self, monitor = np.inf, PATH = './currTorchModel.pt',
verbose=False):
self.monitor = monitor
self.PATH = PATH
self.verbose = verbose
def check(self, model, currVal, comp='min'):
if comp is 'min':
if currVal < self.monitor:
self.monitor = currVal
torch.save(model.state_dict(), self.PATH)
if self.verbose:
print('saving best model...')
elif comp is 'max':
if currVal > self.monitor:
self.monitor = currVal
torch.save(model.state_dict(), self.PATH)
if self.verbose:
print('saving best model...')
def normalize_mel_sp_slides(X, eps=1e-6):
mean = X.mean()
X = X - mean
std = X.std()
Xstd = X / (std + eps)
_min, _max = Xstd.min(), Xstd.max()
norm_max = _max
norm_min = _min
if (_max - _min) > eps:
# Normalize to [0, 255]
V = Xstd
V[V < norm_min] = norm_min
V[V > norm_max] = norm_max
V = (V - norm_min) / (norm_max - norm_min)
else:
V = np.zeros_like(X, dtype=np.uint8)
return V
def mel_sp_slides_to_image(X, eps=1e-6, resize=False, nrow=224, ncol=224):
mean = X.mean()
X = X - mean
std = X.std()
Xstd = X / (std + eps)
# cmap = plt.cm.jet
cmap = plt.cm.viridis
norm = plt.Normalize(vmin=Xstd.min(), vmax=Xstd.max())
# map the normalized data to colors
# image is now RGBA (nrowxncolx4)
# last channel is alpha value for transparency, set to 1
image = cmap(norm(Xstd))
if resize:
return cv2.resize(
image[:,:,:3], (nrow, ncol),
interpolation=cv2.INTER_LINEAR
)
else:
return image[:,:,:,:3]
def mel_sp_to_image(X, eps=1e-6, nrow=224, ncol=224):
mean = X.mean()
X = X - mean
std = X.std()
Xstd = X / (std + eps)
# cmap = plt.cm.jet
cmap = plt.cm.viridis
norm = plt.Normalize(vmin=Xstd.min(), vmax=Xstd.max())
# map the normalized data to colors
# image is now RGBA (nrowxncolx4)
# last channel is alpha value for transparency, set to 1
image = cmap(norm(Xstd))
return cv2.resize(image[:,:,:3], (nrow, ncol),
interpolation=cv2.INTER_LINEAR
)
def train_seq(model, train_loader, optimizer, epoch, device, verbose = 0,
lr_schedule = None, weight = None, loss_fn = 'crossEnt'):
"""Training"""
if lr_schedule is not None:
optimizer = lr_schedule(optimizer, epoch)
model.train()
for batch_idx, (data, target) in enumerate(progress_bar(train_loader)):
h_s = model.init_hidden(len(data))
if isinstance(h_s, tuple):
h_s = tuple([x.to(device) for x in h_s])
else:
h_s = h_s.to(device)
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data, h_s)
if loss_fn == 'crossEnt':
criteria = nn.CrossEntropyLoss().cuda()
elif loss_fn == 'bceLogit':
criteria = nn.BCEWithLogitsLoss().cuda()
loss = criteria(output, target)
loss.backward()
optimizer.step()
if verbose>0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
return loss.item()
def evalModel_seq(data_loader, model, device, verbose=0, stochastic_pass = True,
compute_metrics=True, activationName = None,
loss_fn = 'crossEnt'):
if stochastic_pass:
model.train()
else:
model.eval()
test_loss = 0
predictions = []
activations = []
correct = 0
with torch.no_grad():
for data, target in data_loader:
data, target = data.to(device), target.to(device)
h_s = model.init_hidden(len(data))
if isinstance(h_s, tuple):
h_s = tuple([x.to(device) for x in h_s])
else:
h_s =h_s.to(device)
output = model(data, h_s)
if compute_metrics:
predictionClasses = output.argmax(dim=1, keepdim=True)
if loss_fn == 'crossEnt':
criteria = nn.CrossEntropyLoss().cuda()
correct += predictionClasses.eq(target.view_as(predictionClasses)).sum().item()
elif loss_fn == 'bceLogit':
criteria = nn.BCEWithLogitsLoss().cuda()
correct += predictionClasses.eq(target.argmax(dim=1).view_as(predictionClasses)).sum().item()
test_loss += criteria(output, target).sum().item()
else:
softmaxed = F.softmax(output.cpu(), dim=1)
predictions.extend(softmaxed.data.numpy())
if compute_metrics:
return test_loss, correct
else:
return predictions, activations
def test_seq(model, test_loader, device, verbose=0, activationName = None,
loss_fn = 'crossEnt'):
"""Testing"""
model.eval()
test_loss = 0
correct = 0
total_test_loss, total_corrections = evalModel_seq(test_loader, model, device=device,
verbose = verbose,
stochastic_pass = False, compute_metrics = True,
activationName = activationName, loss_fn = loss_fn)
test_loss = total_test_loss/ len(test_loader) # loss function already averages over batch size
test_acc = total_corrections / len(test_loader.dataset)
if verbose>0:
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
print('{{"metric": "Eval - cross entropy Loss", "value": {}, "epoch": {}}}'.format(
test_loss, epoch))
print('{{"metric": "Eval - Accuracy", "value": {}, "epoch": {}}}'.format(
100. * correct / len(test_loader.dataset), epoch))
return test_loss, test_acc
def train(model, train_loader, optimizer, epoch, device, verbose = 0,
lr_schedule = None, weight = None, loss_fn = 'crossEnt'):
"""Training"""
if lr_schedule is not None:
optimizer = lr_schedule(optimizer, epoch)
model.train()
for batch_idx, (data, target) in enumerate(progress_bar(train_loader)):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
if loss_fn == 'crossEnt':
criteria = nn.CrossEntropyLoss().cuda()
elif loss_fn == 'bceLogit':
criteria = nn.BCEWithLogitsLoss().cuda()
loss = criteria(output, target)
loss.backward()
optimizer.step()
if verbose>0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
return loss.item()
def evalModel(data_loader, model, device, verbose=0, stochastic_pass = True,
compute_metrics=True, activationName = None,
loss_fn = 'crossEnt'):
if stochastic_pass:
model.train()
else:
model.eval()
test_loss = 0
predictions = []
activations = []
correct = 0
with torch.no_grad():
for data, target in data_loader:
data, target = data.to(device), target.to(device)
output = model(data)
if compute_metrics:
predictionClasses = output.argmax(dim=1, keepdim=True)
if loss_fn == 'crossEnt':
criteria = nn.CrossEntropyLoss().cuda()
correct += predictionClasses.eq(target.view_as(predictionClasses)).sum().item()
elif loss_fn == 'bceLogit':
criteria = nn.BCEWithLogitsLoss().cuda()
correct += predictionClasses.eq(target.argmax(dim=1).view_as(predictionClasses)).sum().item()
test_loss += criteria(output, target).sum().item()
else:
softmaxed = F.softmax(output.cpu(), dim=1)
predictions.extend(softmaxed.data.numpy())
if compute_metrics:
return test_loss, correct
else:
return predictions, activations
def test(model, test_loader, device, verbose=0, activationName = None,
loss_fn = 'crossEnt'):
"""Testing"""
model.eval()
test_loss = 0
correct = 0
total_test_loss, total_corrections = evalModel(test_loader, model, device=device,
verbose = verbose,
stochastic_pass = False, compute_metrics = True,
activationName = activationName,
loss_fn=loss_fn)
test_loss = total_test_loss/ len(test_loader) # loss function already averages over batch size
test_acc = total_corrections / len(test_loader.dataset)
if verbose>0:
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
print('{{"metric": "Eval - cross entropy Loss", "value": {}, "epoch": {}}}'.format(
test_loss, epoch))
print('{{"metric": "Eval - Accuracy", "value": {}, "epoch": {}}}'.format(
100. * correct / len(test_loader.dataset), epoch))
return test_loss, test_acc
| 33.987692
| 113
| 0.561108
| 1,273
| 11,046
| 4.725059
| 0.173606
| 0.0266
| 0.023275
| 0.0266
| 0.814131
| 0.807315
| 0.799501
| 0.799501
| 0.799501
| 0.799501
| 0
| 0.011426
| 0.318577
| 11,046
| 325
| 114
| 33.987692
| 0.787698
| 0.0478
| 0
| 0.717213
| 0
| 0
| 0.065802
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053279
| false
| 0.02459
| 0.065574
| 0
| 0.17623
| 0.045082
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c37aad0af84075d4aa4c3cf6470f7ec9a59b07bf
| 141
|
py
|
Python
|
PythonAPI/carissma_project/lib/python3.5/site-packages/pandas/tests/extension/decimal/__init__.py
|
AbdulHoffmann/carla_carissma
|
8d382769ffa02a6c61a22c57160285505f5ff0a4
|
[
"MIT"
] | 6,989
|
2017-07-18T06:23:18.000Z
|
2022-03-31T15:58:36.000Z
|
PythonAPI/carissma_project/lib/python3.5/site-packages/pandas/tests/extension/decimal/__init__.py
|
AbdulHoffmann/carla_carissma
|
8d382769ffa02a6c61a22c57160285505f5ff0a4
|
[
"MIT"
] | 1,978
|
2017-07-18T09:17:58.000Z
|
2022-03-31T14:28:43.000Z
|
PythonAPI/carissma_project/lib/python3.5/site-packages/pandas/tests/extension/decimal/__init__.py
|
AbdulHoffmann/carla_carissma
|
8d382769ffa02a6c61a22c57160285505f5ff0a4
|
[
"MIT"
] | 1,228
|
2017-07-18T09:03:13.000Z
|
2022-03-29T05:57:40.000Z
|
from .array import DecimalArray, DecimalDtype, to_decimal, make_data
__all__ = ['DecimalArray', 'DecimalDtype', 'to_decimal', 'make_data']
| 28.2
| 69
| 0.765957
| 16
| 141
| 6.25
| 0.625
| 0.48
| 0.52
| 0.66
| 0.82
| 0.82
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 141
| 4
| 70
| 35.25
| 0.793651
| 0
| 0
| 0
| 0
| 0
| 0.304965
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
5eecf92eb0db61d4d7719d5a7c1d9b9df1608c95
| 6,851
|
py
|
Python
|
simpleredial/dataloader/inference_ctx_dataloader.py
|
gmftbyGMFTBY/SimpleReDial-v1
|
f45b8eb23d1499ec617b4cc4f417d83d8f2b6bde
|
[
"MIT"
] | 36
|
2021-10-13T10:32:08.000Z
|
2022-03-20T07:50:05.000Z
|
simpleredial/dataloader/inference_ctx_dataloader.py
|
gmftbyGMFTBY/SimpleReDial-v1
|
f45b8eb23d1499ec617b4cc4f417d83d8f2b6bde
|
[
"MIT"
] | 3
|
2021-11-24T10:57:59.000Z
|
2022-03-27T15:37:40.000Z
|
simpleredial/dataloader/inference_ctx_dataloader.py
|
gmftbyGMFTBY/SimpleReDial-v1
|
f45b8eb23d1499ec617b4cc4f417d83d8f2b6bde
|
[
"MIT"
] | 1
|
2022-03-15T07:13:22.000Z
|
2022-03-15T07:13:22.000Z
|
from header import *
from .utils import *
from .util_func import *
class BERTDualInferenceContextDataset(Dataset):
def __init__(self, vocab, path, **args):
self.args = args
self.vocab = vocab
self.vocab.add_tokens(['[EOS]'])
self.pad = self.vocab.convert_tokens_to_ids('[PAD]')
self.sep = self.vocab.convert_tokens_to_ids('[SEP]')
self.eos = self.vocab.convert_tokens_to_ids('[EOS]')
self.cls = self.vocab.convert_tokens_to_ids('[CLS]')
suffix = args['tokenizer'].replace('/', '_')
self.pp_path = f'{os.path.split(path)[0]}/inference_ctx_{suffix}.pt'
if os.path.exists(self.pp_path):
self.data = torch.load(self.pp_path)
print(f'[!] load preprocessed file from {self.pp_path}')
return None
data = read_text_data_utterances(path, lang=self.args['lang'])
self.data = []
for label, utterances in tqdm(data):
if label == 0:
continue
item = self.vocab.batch_encode_plus(utterances, add_special_tokens=False)['input_ids']
ids = []
for u in item[:-1]:
ids.extend(u + [self.eos])
ids.pop()
ids = ids[-self.args['max_len']+2:]
ids = [self.cls] + ids + [self.sep]
self.data.append({
'ids': ids,
'context': utterances[:-1],
'response': utterances[-1],
})
def __len__(self):
return len(self.data)
def __getitem__(self, i):
bundle = self.data[i]
ids = torch.LongTensor(bundle['ids'])
context = bundle['context']
response = bundle['response']
return ids, context, response
def save(self):
data = torch.save(self.data, self.pp_path)
print(f'[!] save preprocessed dataset into {self.pp_path}')
def collate(self, batch):
ids = [i[0] for i in batch]
context = [i[1] for i in batch]
response = [i[2] for i in batch]
ids = pad_sequence(ids, batch_first=True, padding_value=self.pad)
ids_mask = generate_mask(ids)
ids, ids_mask = to_cuda(ids, ids_mask)
return {
'ids': ids,
'mask': ids_mask,
'context': context,
'response': response
}
class BERTDualInferenceFullContextDataset(Dataset):
def __init__(self, vocab, path, **args):
self.args = args
self.vocab = vocab
self.vocab.add_tokens(['[EOS]'])
self.pad = self.vocab.convert_tokens_to_ids('[PAD]')
self.sep = self.vocab.convert_tokens_to_ids('[SEP]')
self.eos = self.vocab.convert_tokens_to_ids('[EOS]')
self.cls = self.vocab.convert_tokens_to_ids('[CLS]')
suffix = args['tokenizer'].replace('/', '_')
self.pp_path = f'{os.path.split(path)[0]}/inference_full_ctx_{suffix}.pt'
if os.path.exists(self.pp_path):
self.data = torch.load(self.pp_path)
print(f'[!] load preprocessed file from {self.pp_path}')
return None
data = read_text_data_utterances_full(path, lang=self.args['lang'], turn_length=args['full_turn_length'])
self.data = []
for label, utterances in tqdm(data):
if label == 0:
continue
item = self.vocab.batch_encode_plus(utterances, add_special_tokens=False)['input_ids']
ids = []
for u in item[:-1]:
ids.extend(u + [self.eos])
ids.pop()
ids = ids[-self.args['max_len']+2:]
ids = [self.cls] + ids + [self.sep]
self.data.append({
'ids': ids,
'context': utterances[:-1],
'response': utterances[-1],
})
def __len__(self):
return len(self.data)
def __getitem__(self, i):
bundle = self.data[i]
ids = torch.LongTensor(bundle['ids'])
context = bundle['context']
response = bundle['response']
return ids, context, response
def save(self):
data = torch.save(self.data, self.pp_path)
print(f'[!] save preprocessed dataset into {self.pp_path}')
def collate(self, batch):
ids = [i[0] for i in batch]
context = [i[1] for i in batch]
response = [i[2] for i in batch]
ids = pad_sequence(ids, batch_first=True, padding_value=self.pad)
ids_mask = generate_mask(ids)
ids, ids_mask = to_cuda(ids, ids_mask)
return {
'ids': ids,
'mask': ids_mask,
'context': context,
'response': response
}
class BERTDualInferenceFullContextSingleExtendDataset(Dataset):
'''each in-dataset utterance will be treated as the extended context for training'''
def __init__(self, vocab, path, **args):
self.args = args
self.vocab = vocab
self.vocab.add_tokens(['[EOS]'])
self.pad = self.vocab.convert_tokens_to_ids('[PAD]')
self.sep = self.vocab.convert_tokens_to_ids('[SEP]')
self.eos = self.vocab.convert_tokens_to_ids('[EOS]')
self.cls = self.vocab.convert_tokens_to_ids('[CLS]')
suffix = args['tokenizer'].replace('/', '_')
self.pp_path = f'{os.path.split(path)[0]}/inference_full_ctx_ext_{suffix}.pt'
if os.path.exists(self.pp_path):
self.data = torch.load(self.pp_path)
print(f'[!] load preprocessed file from {self.pp_path}')
return None
data = read_response_data_full(path, lang=self.args['lang'], turn_length=5)
self.data = []
for utterance in tqdm(data):
ids = self.vocab.encode(utterance, add_special_tokens=False)
ids = ids[-self.args['max_len']+2:]
ids = [self.cls] + ids + [self.sep]
self.data.append({
'ids': ids,
'context': utterance,
})
def __len__(self):
return len(self.data)
def __getitem__(self, i):
bundle = self.data[i]
ids = torch.LongTensor(bundle['ids'])
context = bundle['context']
return ids, context
def save(self):
data = torch.save(self.data, self.pp_path)
print(f'[!] save preprocessed dataset into {self.pp_path}')
def collate(self, batch):
ids = [i[0] for i in batch]
context = [i[1] for i in batch]
ids = pad_sequence(ids, batch_first=True, padding_value=self.pad)
ids_mask = generate_mask(ids)
ids, ids_mask = to_cuda(ids, ids_mask)
return {
'ids': ids,
'mask': ids_mask,
'context': context,
'response': context,
}
| 37.032432
| 113
| 0.553496
| 836
| 6,851
| 4.342105
| 0.12201
| 0.059504
| 0.049587
| 0.072727
| 0.894215
| 0.888705
| 0.888705
| 0.888705
| 0.869972
| 0.869972
| 0
| 0.004874
| 0.311195
| 6,851
| 184
| 114
| 37.233696
| 0.764357
| 0.011385
| 0
| 0.876543
| 0
| 0
| 0.115561
| 0.024235
| 0
| 0
| 0
| 0
| 0
| 1
| 0.092593
| false
| 0
| 0.018519
| 0.018519
| 0.203704
| 0.037037
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5ef05a43d7566791d8e79fe708dd2d90bde5ca46
| 2,842
|
py
|
Python
|
data_load.py
|
zhaitongqing233/Backdoor-attack-against-speaker-verification
|
5f9af97fdfba0e9be19c2a699964abbb929e3898
|
[
"Apache-2.0"
] | 10
|
2021-02-01T07:18:59.000Z
|
2021-12-28T08:43:23.000Z
|
data_load.py
|
zhaitongqing233/Backdoor-attack-against-speaker-verification
|
5f9af97fdfba0e9be19c2a699964abbb929e3898
|
[
"Apache-2.0"
] | 4
|
2021-03-03T12:55:29.000Z
|
2021-11-01T03:41:06.000Z
|
data_load.py
|
zhaitongqing233/Backdoor-attack-against-speaker-verification
|
5f9af97fdfba0e9be19c2a699964abbb929e3898
|
[
"Apache-2.0"
] | 10
|
2021-02-01T07:18:59.000Z
|
2022-03-17T04:38:19.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 6 20:55:52 2018
@author: harry
"""
import glob
import numpy as np
import os
import random
import torch
from torch.utils.data import Dataset
from hparam import hparam as hp
class SpeakerDatasetTIMIT_poison(Dataset):
#just for the test dataset in poisoning test part
def __init__(self, shuffle=True, utter_start=0):
# data path
assert hp.training == False
self.path = hp.data.test_path
self.utter_num = hp.poison.num_centers * 2
self.file_list = os.listdir(self.path)
self.shuffle=shuffle
self.utter_start = utter_start
def __len__(self):
return len(self.file_list)
def __getitem__(self, idx):
np_file_list = os.listdir(self.path)
if self.shuffle:
selected_file = random.sample(np_file_list, 1)[0] # select random speaker
else:
selected_file = np_file_list[idx]
utters = np.load(os.path.join(self.path, selected_file)) # load utterance spectrogram of selected speaker
utter_index = np.random.randint(0, utters.shape[0], self.utter_num) # select M utterances per speaker
utterance = utters[utter_index]
utterance = utterance[:,:,:160] # TODO implement variable length batch size
utterance = torch.tensor(np.transpose(utterance, axes=(0,2,1))) # transpose [batch, frames, n_mels]
return utterance
class SpeakerDatasetTIMITPreprocessed(Dataset):
def __init__(self, shuffle=True, utter_start=0):
# data path
if hp.training:
self.path = hp.data.train_path
self.utter_num = hp.train.M
else:
self.path = hp.data.test_path
self.utter_num = hp.test.M
self.file_list = os.listdir(self.path)
self.shuffle=shuffle
self.utter_start = utter_start
def __len__(self):
return len(self.file_list)
def __getitem__(self, idx):
np_file_list = os.listdir(self.path)
if self.shuffle:
selected_file = random.sample(np_file_list, 1)[0] # select random speaker
else:
selected_file = np_file_list[idx]
utters = np.load(os.path.join(self.path, selected_file)) # load utterance spectrogram of selected speaker
utter_index = np.random.randint(0, utters.shape[0], self.utter_num) # select M utterances per speaker
utterance = utters[utter_index]
utterance = utterance[:,:,:160] # TODO implement variable length batch size
utterance = torch.tensor(np.transpose(utterance, axes=(0,2,1))) # transpose [batch, frames, n_mels]
return utterance
| 35.974684
| 120
| 0.62069
| 362
| 2,842
| 4.685083
| 0.270718
| 0.04717
| 0.035377
| 0.040094
| 0.775943
| 0.76533
| 0.76533
| 0.76533
| 0.76533
| 0.76533
| 0
| 0.017751
| 0.286418
| 2,842
| 78
| 121
| 36.435897
| 0.81854
| 0.181914
| 0
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012821
| 0.018519
| 1
| 0.111111
| false
| 0
| 0.12963
| 0.037037
| 0.351852
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f41664ee4a461670685538d935a82ab82969a79
| 250
|
py
|
Python
|
rotten_tomatoes_client/__init__.py
|
seanbreckenridge/rotten_tomatoes_client
|
e5b9cfc37ae0eafd043f8d6b7674b086cee81294
|
[
"MIT"
] | 19
|
2017-06-04T18:01:50.000Z
|
2021-06-14T04:50:50.000Z
|
rotten_tomatoes_client/__init__.py
|
seanbreckenridge/rotten_tomatoes_client
|
e5b9cfc37ae0eafd043f8d6b7674b086cee81294
|
[
"MIT"
] | 18
|
2017-05-27T04:23:25.000Z
|
2022-01-27T14:49:34.000Z
|
rotten_tomatoes_client/__init__.py
|
seanbreckenridge/rotten_tomatoes_client
|
e5b9cfc37ae0eafd043f8d6b7674b086cee81294
|
[
"MIT"
] | 6
|
2020-06-01T09:47:41.000Z
|
2022-03-20T21:59:26.000Z
|
from rotten_tomatoes_client.client import RottenTomatoesClient
from rotten_tomatoes_client.query import MovieBrowsingQuery
from rotten_tomatoes_client.query.parameters.browsing import TvBrowsingCategory, Service, SortBy, MovieBrowsingCategory, Genre
| 62.5
| 126
| 0.9
| 27
| 250
| 8.111111
| 0.555556
| 0.136986
| 0.246575
| 0.328767
| 0.26484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064
| 250
| 3
| 127
| 83.333333
| 0.935897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6f6d3688c668b52ec2a26701f0d001c719e71c70
| 3,376
|
py
|
Python
|
buildcatrust/tests/test_certstore_parser.py
|
lukegb/buildcatrust
|
2f613c36ddb3d0e032d2e145fe05bc4c7eb3d7d9
|
[
"MIT"
] | 2
|
2021-09-05T17:51:01.000Z
|
2022-02-12T13:09:55.000Z
|
buildcatrust/tests/test_certstore_parser.py
|
lukegb/buildcatrust
|
2f613c36ddb3d0e032d2e145fe05bc4c7eb3d7d9
|
[
"MIT"
] | null | null | null |
buildcatrust/tests/test_certstore_parser.py
|
lukegb/buildcatrust
|
2f613c36ddb3d0e032d2e145fe05bc4c7eb3d7d9
|
[
"MIT"
] | null | null | null |
# SPDX-FileCopyrightText: 2021 Luke Granger-Brown <git@lukegb.com>
#
# SPDX-License-Identifier: MIT
import io
from buildcatrust import certstore_parser
from buildcatrust import enums
from buildcatrust import types
CERTUM_NAME = "cn=Certum_EC-384_CA:6b328085:788f275c81125220a504d02dddba73f4"
def test_read_certificates_raw_pem():
certum_pem = """\
some garbage
-----BEGIN CERTIFICATE-----
MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw
CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw
JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT
EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0
WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT
LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX
BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE
KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm
Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj
QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8
EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J
UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn
nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k=
-----END CERTIFICATE-----
"""
fp = io.StringIO(certum_pem)
certs = certstore_parser.read_certificates(fp)
assert len(certs) == 1
certdb = types.CertDB()
certdb.add_certs(certs)
assert CERTUM_NAME in certdb.certmap
assert CERTUM_NAME in certdb.trustmap
trust = certdb.trustmap[CERTUM_NAME]
assert not trust.trust_step_up_approved
assert trust.trust_server_auth == enums.TrustType.TRUSTED_DELEGATOR
assert trust.trust_client_auth == enums.TrustType.TRUSTED_DELEGATOR
assert trust.trust_ipsec_user == enums.TrustType.UNKNOWN
def test_read_certificates_openssl_trusted():
certum_pem = """\
more garbage
-----BEGIN TRUSTED CERTIFICATE-----
MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw
CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw
JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT
EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0
WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT
LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX
BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE
KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm
Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj
QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8
EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J
UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn
nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8kwGDAKBggr
BgEFBQcDAaAKBggrBgEFBQcDBw==
-----END TRUSTED CERTIFICATE-----
"""
fp = io.StringIO(certum_pem)
certs = certstore_parser.read_certificates(fp)
assert len(certs) == 1
certdb = types.CertDB()
certdb.add_certs(certs)
assert CERTUM_NAME in certdb.certmap
assert CERTUM_NAME in certdb.trustmap
trust = certdb.trustmap[CERTUM_NAME]
assert not trust.trust_step_up_approved
assert trust.trust_server_auth == enums.TrustType.TRUSTED_DELEGATOR
assert trust.trust_client_auth == enums.TrustType.UNKNOWN
assert trust.trust_ipsec_user == enums.TrustType.NOT_TRUSTED
| 39.717647
| 77
| 0.861967
| 260
| 3,376
| 11
| 0.353846
| 0.027972
| 0.033566
| 0.025175
| 0.844755
| 0.844755
| 0.844755
| 0.823077
| 0.811888
| 0.811888
| 0
| 0.069055
| 0.08205
| 3,376
| 84
| 78
| 40.190476
| 0.853824
| 0.027547
| 0
| 0.705882
| 0
| 0
| 0.587374
| 0.532174
| 0
| 1
| 0
| 0
| 0.205882
| 1
| 0.029412
| false
| 0
| 0.058824
| 0
| 0.088235
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
48aea5ebe569e33a1ade967b45bb52244abd24bc
| 17,685
|
py
|
Python
|
code/explain/offline_teachers.py
|
macaodha/explain_teach
|
357963503603b27ed1f01747c9bd214c5cbcf36e
|
[
"MIT"
] | 10
|
2018-06-26T07:18:03.000Z
|
2021-05-13T09:19:28.000Z
|
code/explain/offline_teachers.py
|
macaodha/explain_teach
|
357963503603b27ed1f01747c9bd214c5cbcf36e
|
[
"MIT"
] | null | null | null |
code/explain/offline_teachers.py
|
macaodha/explain_teach
|
357963503603b27ed1f01747c9bd214c5cbcf36e
|
[
"MIT"
] | null | null | null |
import numpy as np
from scipy.stats import entropy
def user_model_binary(w, x, y, alpha):
# binary user model - w is D and X is NxD
# prob is probability that the hyp agrees with the datapoint
# need to make prob = 1.0 / (1.0 + np.exp(-z*2*(2*y-1))) to be same as softmax
if len(w.shape) == 2:
z = alpha*np.dot(x, w[1,:])
else:
z = alpha*np.dot(x, w)
pred_class = (z>0).astype(np.int) # will be 0 or 1
prob = 1.0 / (1.0 + np.exp(-z*(2*y-1))) # make y={-1,1}
return prob, pred_class
def user_model(w, x, y, alpha):
# multi-class user model - w is CxD and X is NxD
# prob is probability that the hyp agrees with the datapoint
z = alpha*np.dot(x, w.T)
pred_class = np.argmax(z,1)
z_norm = np.exp(z) / np.exp(z).sum(1)[..., np.newaxis]
prob = z_norm[np.arange(x.shape[0]), pred_class] # pred_class == y
inds = np.where(pred_class != y)[0]
prob[inds] = 1.0 - prob[inds] # pred_class != y
return prob, pred_class
def teaching_stats(cur_post, pred, err_hyp, err_hyp_test):
cur_post_norm = cur_post/cur_post.sum()
exp_err = (cur_post_norm*err_hyp).sum()
exp_err_test = (cur_post_norm*err_hyp_test).sum()
ent = entropy(cur_post_norm)
z = (cur_post_norm*pred).sum() + 0.0000000001 # add small noise
difficulty = -(z*np.log2(z) + (1-z)*np.log2(1-z))
return exp_err, exp_err_test, ent, difficulty
def teaching_stats_one_vs_all(cur_post, pred, err_hyp, err_hyp_test):
exp_err = np.empty(cur_post.shape)
exp_err_test = np.empty(cur_post.shape)
entropy = np.empty(cur_post.shape)
difficulty = np.empty(cur_post.shape)
for cc in range(cur_post.shape[0]):
exp_err[cc, :], exp_err_test[cc, :], entropy[cc, :], difficulty[cc, :] = teaching_stats(cur_post[cc, :], pred[cc, :], err_hyp[cc], err_hyp_test[cc])
return exp_err.mean(), exp_err_test.mean(), entropy.mean(), difficulty.mean()
class StrictTeacher:
# Singla et al. Near-Optimally Teaching the Crowd to Classify
# https://arxiv.org/abs/1402.2092
def __init__(self, dataset, alpha, prior_h):
self.initialize(dataset['X'], dataset['Y'], alpha, prior_h)
def initialize(self, X, Y, alpha, prior_h):
self.teaching_exs = []
self.unseen_exs = np.arange(X.shape[0])
self.prior_h = prior_h
self.cur_post = prior_h.copy()
self.alpha = alpha
self.exp_err = []
self.exp_err_test = []
self.hyp_entropy = []
self.difficulty = []
def posterior(self):
return self.cur_post/self.cur_post.sum()
def run_teaching(self, num_teaching_itrs, dataset, likelihood, hyps, err_hyp, err_hyp_test):
for tt in range(num_teaching_itrs):
self.teaching_iteration(dataset['X'], dataset['Y'], likelihood, hyps, err_hyp, err_hyp_test)
def teaching_iteration(self, X, Y, likelihood, hyps, err_hyp, err_hyp_test):
# this is eqivalent to looping over h and x
# comes from separating P(h|(A U x)) into P(h|A)P(h|x)
err = -np.dot(self.cur_post*err_hyp, likelihood)
selected_ind = self.unseen_exs[np.argmax(err[self.unseen_exs])]
# update the posterior with the selected example
self.cur_post *= likelihood[:, selected_ind]
# get predictions for each hyp for selected example
pred = np.zeros(len(hyps))
for hh in range(len(hyps)):
pred[hh], _ = user_model(hyps[hh], X[selected_ind,:][np.newaxis, ...], Y[selected_ind], self.alpha)
# bookkeeping and compute stats
print len(self.teaching_exs), '\t', Y[selected_ind], '\t', selected_ind, '\t', round(err[self.unseen_exs].max(),4)
ee, ee_test, ent, diff = teaching_stats(self.cur_post, pred, err_hyp, err_hyp_test)
self.exp_err.append(ee)
self.exp_err_test.append(ee_test)
self.hyp_entropy.append(ent)
self.difficulty.append(diff)
self.teaching_exs.append(selected_ind)
self.unseen_exs = np.setdiff1d(np.arange(X.shape[0]), self.teaching_exs)
def teaching_iteration_slow(self, X, Y, hyps, err_hyp):
eer = np.zeros(self.unseen_exs.shape[0])
for ii, ex in enumerate(self.unseen_exs):
cur_post_delta = np.ones(len(hyps))
for hh in range(len(hyps)):
# can store a H*X matrix where it will be 1 where hyp gets it correct and y_p else where
y_p, pred_class = user_model(hyps[hh], X[ex,:][np.newaxis, ...], Y[ex], self.alpha)
if pred_class != Y[ex]:
cur_post_delta[hh] *= y_p
eer[ii] += (self.prior_h[hh] - (self.cur_post[hh]*cur_post_delta[hh]))*err_hyp[hh]
#eer[ii] += -(self.cur_post[hh]*cur_post_delta[hh])*err_hyp[hh] # dont need to subtract prior
# recompute the posterior of the selected example
selected_ind = self.unseen_exs[np.argmax(eer)]
pred = np.zeros(len(hyps))
for hh in range(len(hyps)):
pred[hh], pred_class = user_model(hyps[hh], X[selected_ind,:][np.newaxis, ...], Y[selected_ind], self.alpha)
if pred_class != Y[selected_ind]:
self.cur_post[hh] *= pred[hh]
# bookkeeping and compute stats
print len(self.teaching_exs), '\t', selected_ind, '\t', round(eer.max(),4)
ee, ee_test, ent, diff = teaching_stats(self.cur_post, pred, err_hyp, err_hyp_test)
self.exp_err.append(ee)
self.exp_err_test.append(ee_test)
self.hyp_entropy.append(ent)
self.difficulty.append(diff)
self.teaching_exs.append(selected_ind)
self.unseen_exs = np.setdiff1d(np.arange(X.shape[0]), self.teaching_exs)
class StrictTeacherOneVsAll:
# 1 vs all version of
# Singla et al. Near-Optimally Teaching the Crowd to Classify
# https://arxiv.org/abs/1402.2092
def __init__(self, dataset, alpha, prior_h):
self.initialize(dataset['X'], dataset['Y'], alpha, prior_h)
def initialize(self, X, Y, alpha, prior_h):
self.teaching_exs = []
self.unseen_exs = np.arange(X.shape[0])
self.classes = np.unique(Y) # TODO need to update this for binary
self.prior_h = np.tile(prior_h, (len(self.classes), 1))
self.cur_post = np.tile(prior_h.copy(), (len(self.classes), 1))
self.alpha = alpha
self.exp_err = []
self.exp_err_test = []
self.hyp_entropy = []
self.difficulty = []
def posterior(self):
return self.cur_post/self.cur_post.sum(1)[..., np.newaxis]
def run_teaching(self, num_teaching_itrs, dataset, likelihood, hyps, err_hyp, err_hyp_test):
for tt in range(num_teaching_itrs):
self.teaching_iteration(dataset['X'], dataset['Y'], likelihood, hyps, err_hyp, err_hyp_test)
def teaching_iteration(self, X, Y, likelihood, hyps, err_hyp, err_hyp_test):
err = np.empty((len(self.classes), X.shape[0]))
for cc in self.classes:
# this is eqivalent to looping over h and x
# comes from separating P(h|(A U x)) into P(h|A)P(h|x)
err[cc, :] = -np.dot(self.cur_post[cc, :]*err_hyp[cc], likelihood[cc])
if len(self.classes) > 2:
err = err.sum(0) # could try other methods for combining, min, max, ...
selected_ind = self.unseen_exs[np.argmax(err[self.unseen_exs])]
# update the posterior with the selected example
for cc in self.classes:
self.cur_post[cc, :] *= likelihood[cc][:, selected_ind]
# get predictions for each hyp for selected example
pred = np.zeros((len(self.classes), len(hyps)))
for cc in self.classes:
Y_bin = int(Y[selected_ind] == cc)
for hh in range(len(hyps)):
pred[cc, hh], _ = user_model_binary(hyps[hh], X[selected_ind,:][np.newaxis, ...], Y_bin, self.alpha)
# bookkeeping and compute stats
print len(self.teaching_exs), '\t', Y[selected_ind], '\t', selected_ind, '\t', round(err[self.unseen_exs].max(),4)
ee, ee_test, ent, diff = teaching_stats_one_vs_all(self.cur_post, pred, err_hyp, err_hyp_test)
self.exp_err.append(ee)
self.exp_err_test.append(ee_test)
self.hyp_entropy.append(ent)
self.difficulty.append(diff)
self.teaching_exs.append(selected_ind)
self.unseen_exs = np.setdiff1d(np.arange(X.shape[0]), self.teaching_exs)
class ExplainTeacher:
def __init__(self, dataset, alpha, prior_h):
self.initialize(dataset['X'], dataset['Y'], alpha, prior_h)
def initialize(self, X, Y, alpha, prior_h):
self.teaching_exs = []
self.unseen_exs = np.arange(X.shape[0])
self.prior_h = prior_h
self.cur_post = prior_h.copy()
self.alpha = alpha
self.exp_err = []
self.exp_err_test = []
self.hyp_entropy = []
self.difficulty = []
def posterior(self):
return self.cur_post/self.cur_post.sum()
def run_teaching(self, num_teaching_itrs, dataset, likelihood, hyps, err_hyp, err_hyp_test):
for tt in range(num_teaching_itrs):
self.teaching_iteration(dataset['X'], dataset['Y'], dataset['X_density'], dataset['explain_interp'], likelihood, hyps, err_hyp, err_hyp_test)
def teaching_iteration(self, X, Y, X_density, interpretability, likelihood, hyps, err_hyp, err_hyp_test):
# X_density is how representative points are - dont want to select outliers
# interpretability is how easy it is for user to make sense of explanation
# this is eqivalent to looping over h and x
# comes from separating P(h|(A U x)) into P(h|A)P(h|x)
# err is negative, we want to find max. To increase it we multiply by smaller numbers
# this has the effect of discounting less the relevant ones
err = -np.dot(self.cur_post*err_hyp, likelihood)
err = err*X_density*interpretability
selected_ind = self.unseen_exs[np.argmax(err[self.unseen_exs])]
# update the posterior with the selected example
self.cur_post *= likelihood[:, selected_ind]*X_density[selected_ind]*interpretability[selected_ind]
#self.cur_post = self.cur_post / self.cur_post.sum() # don't need to renormalize
# get predictions for each hyp for selected example
pred = np.zeros(len(hyps))
for hh in range(len(hyps)):
pred[hh], _ = user_model(hyps[hh], X[selected_ind,:][np.newaxis, ...], Y[selected_ind], self.alpha)
# bookkeeping and compute stats
print len(self.teaching_exs), '\t', Y[selected_ind], '\t', selected_ind, '\t', round(err[self.unseen_exs].max(),4)
ee, ee_test, ent, diff = teaching_stats(self.cur_post, pred, err_hyp, err_hyp_test)
self.exp_err.append(ee)
self.exp_err_test.append(ee_test)
self.hyp_entropy.append(ent)
self.difficulty.append(diff)
self.teaching_exs.append(selected_ind)
self.unseen_exs = np.setdiff1d(np.arange(X.shape[0]), self.teaching_exs)
class ExplainTeacherOneVsAll:
# 1 vs all version
def __init__(self, dataset, alpha, prior_h):
self.initialize(dataset['X'], dataset['Y'], alpha, prior_h)
def initialize(self, X, Y, alpha, prior_h):
self.teaching_exs = []
self.unseen_exs = np.arange(X.shape[0])
self.classes = np.unique(Y)
self.prior_h = np.tile(prior_h, (len(self.classes), 1))
self.cur_post = np.tile(prior_h.copy(), (len(self.classes), 1))
self.alpha = alpha
self.exp_err = []
self.exp_err_test = []
self.hyp_entropy = []
self.difficulty = []
def posterior(self):
return self.cur_post/self.cur_post.sum(1)[..., np.newaxis]
def run_teaching(self, num_teaching_itrs, dataset, likelihood, hyps, err_hyp, err_hyp_test):
for tt in range(num_teaching_itrs):
self.teaching_iteration(dataset['X'], dataset['Y'], dataset['X_density'], dataset['explain_interp'], likelihood, hyps, err_hyp, err_hyp_test)
def teaching_iteration(self, X, Y, X_density, interpretability, likelihood, hyps, err_hyp, err_hyp_test):
# X_density is how representative points are - dont want to select outliers
# interpretability is how easy it is for user to make sense of explanation
err = np.empty((len(self.classes), X.shape[0]))
for cc in self.classes:
# this is eqivalent to looping over h and x
# comes from separating P(h|(A U x)) into P(h|A)P(h|x)
err[cc, :] = -np.dot(self.cur_post[cc, :]*err_hyp[cc], likelihood[cc])
# TODO should interpretability be per class or just for GT?
err[cc, :] = err[cc, :]*X_density*interpretability
if len(self.classes) > 2:
err = err.sum(0) # could try other methods for combining, min, max, ...
selected_ind = self.unseen_exs[np.argmax(err[self.unseen_exs])]
# update the posterior with the selected example
for cc in self.classes:
#self.cur_post[cc, :] *= likelihood[cc][:, selected_ind]
self.cur_post[cc, :] *= likelihood[cc][:, selected_ind]*X_density[selected_ind]*interpretability[selected_ind]
# get predictions for each hyp for selected example
pred = np.zeros((len(self.classes), len(hyps)))
for cc in self.classes:
Y_bin = int(Y[selected_ind] == cc)
for hh in range(len(hyps)):
pred[cc, hh], _ = user_model_binary(hyps[hh], X[selected_ind,:][np.newaxis, ...], Y_bin, self.alpha)
# bookkeeping and compute stats
print len(self.teaching_exs), '\t', Y[selected_ind], '\t', selected_ind, '\t', round(err[self.unseen_exs].max(),4)
ee, ee_test, ent, diff = teaching_stats_one_vs_all(self.cur_post, pred, err_hyp, err_hyp_test)
self.exp_err.append(ee)
self.exp_err_test.append(ee_test)
self.hyp_entropy.append(ent)
self.difficulty.append(diff)
self.teaching_exs.append(selected_ind)
self.unseen_exs = np.setdiff1d(np.arange(X.shape[0]), self.teaching_exs)
class RandomImageTeacher:
# assumes CxD hypotheses
def __init__(self, dataset, alpha, prior_h):
self.initialize(alpha, prior_h)
def initialize(self, alpha, prior_h):
self.teaching_exs = []
self.alpha = alpha
self.cur_post = prior_h.copy()
self.exp_err = []
self.exp_err_test = []
self.hyp_entropy = []
self.difficulty = []
def posterior(self):
return self.cur_post/self.cur_post.sum()
def run_teaching(self, num_teaching_itrs, dataset, likelihood, hyps, err_hyp, err_hyp_test):
X = dataset['X']
Y = dataset['Y']
self.teaching_exs = np.random.choice(X.shape[0], num_teaching_itrs, replace=False)
for teaching_ex in self.teaching_exs:
# compute the posterior of the selected example
pred = np.zeros(len(hyps))
for hh in range(len(hyps)):
pred[hh], pred_class = user_model(hyps[hh], X[teaching_ex,:][np.newaxis, ...], Y[teaching_ex], self.alpha)
if pred_class != Y[teaching_ex]:
self.cur_post[hh] *= pred[hh]
# bookkeeping and compute stats
ee, ee_test, ent, diff = teaching_stats(self.cur_post, pred, err_hyp, err_hyp_test)
self.exp_err.append(ee)
self.exp_err_test.append(ee_test)
self.hyp_entropy.append(ent)
self.difficulty.append(diff)
class RandomImageTeacherOneVsAll:
# assumes 1xD hypotheses
def __init__(self, dataset, alpha, prior_h):
self.initialize(dataset['X'], dataset['Y'], alpha, prior_h)
def initialize(self, X, Y, alpha, prior_h):
self.teaching_exs = []
self.unseen_exs = np.arange(X.shape[0])
self.alpha = alpha
self.classes = np.unique(Y)
self.prior_h = np.tile(prior_h, (len(self.classes), 1))
self.cur_post = np.tile(prior_h.copy(), (len(self.classes), 1))
self.exp_err = []
self.exp_err_test = []
self.hyp_entropy = []
self.difficulty = []
def posterior(self):
return self.cur_post/self.cur_post.sum(1)[..., np.newaxis]
def run_teaching(self, num_teaching_itrs, dataset, likelihood, hyps, err_hyp, err_hyp_test):
for tt in range(num_teaching_itrs):
self.teaching_iteration(dataset['X'], dataset['Y'], likelihood, hyps, err_hyp, err_hyp_test)
def teaching_iteration(self, X, Y, likelihood, hyps, err_hyp, err_hyp_test):
selected_ind = np.random.choice(self.unseen_exs)
# update the posterior with the selected example
for cc in self.classes:
self.cur_post[cc, :] *= likelihood[cc][:, selected_ind]
# get predictions for each hyp for selected example
pred = np.zeros((len(self.classes), len(hyps)))
for cc in self.classes:
Y_bin = int(Y[selected_ind] == cc)
for hh in range(len(hyps)):
pred[cc, hh], _ = user_model_binary(hyps[hh], X[selected_ind,:][np.newaxis, ...], Y_bin, self.alpha)
# bookkeeping and compute stats
print len(self.teaching_exs), '\t', Y[selected_ind], '\t', selected_ind
ee, ee_test, ent, diff = teaching_stats_one_vs_all(self.cur_post, pred, err_hyp, err_hyp_test)
self.exp_err.append(ee)
self.exp_err_test.append(ee_test)
self.hyp_entropy.append(ent)
self.difficulty.append(diff)
self.teaching_exs.append(selected_ind)
self.unseen_exs = np.setdiff1d(np.arange(X.shape[0]), self.teaching_exs)
| 43.452088
| 156
| 0.633418
| 2,634
| 17,685
| 4.055049
| 0.082005
| 0.039978
| 0.043254
| 0.028087
| 0.844209
| 0.823799
| 0.810786
| 0.794027
| 0.784103
| 0.766408
| 0
| 0.007478
| 0.236302
| 17,685
| 406
| 157
| 43.559113
| 0.783356
| 0.155103
| 0
| 0.760148
| 0
| 0
| 0.00672
| 0
| 0
| 0
| 0
| 0.002463
| 0
| 0
| null | null | 0
| 0.00738
| null | null | 0.02214
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
82c8988b4b821f127c1551f92d1f79f0fa987e69
| 128,683
|
py
|
Python
|
soccer.py
|
alexpeterbloom/soccer
|
d320bfb5a7d94ff54a9f6e23ff71d765f34e3b8e
|
[
"Apache-2.0"
] | null | null | null |
soccer.py
|
alexpeterbloom/soccer
|
d320bfb5a7d94ff54a9f6e23ff71d765f34e3b8e
|
[
"Apache-2.0"
] | null | null | null |
soccer.py
|
alexpeterbloom/soccer
|
d320bfb5a7d94ff54a9f6e23ff71d765f34e3b8e
|
[
"Apache-2.0"
] | null | null | null |
import random
import time
import math
ahd = 0
while ahd < 40:
print("")
ahd += 1
team1 = input("Player one: What is your team name? ")
print("")
team2 = input("Player two: What is your team name? ")
ahd = 0
while ahd < 40:
print("")
ahd += 1
RaheemSterling = [1.6, 2.5, 7.6, "Raheem Sterling(A)", 0]
MarcusRashford = [8.8, 1.2, 3.2, "Marcus Rashford(A)", 0]
HarryKane = [9.3, 2.5, 3.1, "Harry Kane(A)", 0]
JackGrealish = [6.2, 3.0, 5.2, 5.4, "Jack Grealish(M)", 0]
KalvinPhilips = [9.5, 9.0, 5.2, 8.0, "Kalvin Philips(M)", 0]
BukayoSaka = [7.2, 6.1, 4.5, 5.9, "Bukayo Saka(M)", 0]
JudeBellingham = [6.1, 2.0, 5.7, 5.6, "Jude Bellingham(M)", 0]
PhilFoden = [6.4, 2.3, 5.1, 5.8, "Phil Foden(M)", 0]
JohnStones = [9.9, 18.5, 9.9, "John Stones(D)", 0]
KyleWalker = [7.6, 9.4, 5.1, "Kyle Walker(D)", 0]
BenChilwell = [6.2, 3.8, 4.4, "Ben Chilwell(D)", 0]
TrentAlexander = [6.4, 3.2, 4.5, "Trent Alexander-Arnold(D)", 0]
TyroneMings = [4.6, 0.5, 4.3, "Tryone Mings(D)", 0]
JordanPickford = [9.0, 5.1, 2.1, "Jordan Pickford(G)", 0]
SamJohnstone = [5.1, 9.3, 2.0, "Sam Johnstone(G)", 0]
RaheemSterling2 = [1.6, 2.5, 7.6, "Raheem Sterling(A2)", 0]
MarcusRashford2 = [8.8, 1.2, 3.2, "Marcus Rashford(A2)", 0]
HarryKane2 = [9.3, 2.5, 3.1, "Harry Kane(A2)", 0]
JackGrealish2 = [6.2, 3.0, 5.2, 5.4, "Jack Grealish(M2)", 0]
KalvinPhilips2 = [9.5, 9.0, 5.2, 8.0, "Kalvin Philips(M2)", 0]
BukayoSaka2 = [7.2, 6.1, 4.5, 5.9, "Bukayo Saka(M2)", 0]
JudeBellingham2 = [6.1, 2.0, 5.7, 5.6, "Jude Bellingham(M2)", 0]
PhilFoden2 = [6.4, 2.3, 5.1, 5.8, "Phil Foden(M2)", 0]
JohnStones2 = [9.9, 18.5, 9.9, "John Stones(D2)", 0]
KyleWalker2 = [7.6, 9.4, 5.1, "Kyle Walker(D2)", 0]
BenChilwell2 = [6.2, 3.8, 4.4, "Ben Chilwell(D2)", 0]
TrentAlexander2 = [6.4, 3.2, 4.5, "Trent Alexander-Arnold(D2)", 0]
TyroneMings2 = [4.6, 0.5, 4.3, "Tryone Mings(D2)", 0]
JordanPickford2 = [9.0, 5.1, 2.1, "Jordan Pickford(G2)", 0]
SamJohnstone2 = [5.1, 9.3, 2.1, "Sam Johnstone(G2)", 0]
print("one\t\t|")
print("Attackers\t\t\t|" + "Goal Scoring" + "\t|\t" "Risk of being carded" + " | " + "Penalty Skill")
print("Harry Kane\t\t\t|" + str(HarryKane[0]) + "| " + str(HarryKane[1]) + " | " + str(HarryKane[2]))
print("Marcus Rashford\t\t|" + str(MarcusRashford[0]) + " | " + str(MarcusRashford[1]) + " | " + str(MarcusRashford[2]))
print("Raheem Sterling\t\t|" + str(RaheemSterling[0]) + " | " + str(RaheemSterling[1]) + " | " + str(RaheemSterling[2]))
print("")
T1A1 = list(input("Who is your first forward " + team1 + "? "))
if T1A1[0] == "H" or T1A1[0] == "h":
T1A1 = HarryKane
elif T1A1[0] == "M" or T1A1[0] == "m":
T1A1 = MarcusRashford
elif T1A1[0] == "R" or T1A1[0] == "r":
T1A1 = RaheemSterling
print("")
T1A2 = list(input("Who is your second forward " + team1 + "? "))
if T1A2[0] == "H" or T1A2[0] == "h":
T1A2 = HarryKane
elif T1A2[0] == "M" or T1A2[0] == "m":
T1A2 = MarcusRashford
elif T1A2[0] == "R" or T1A2[0] == "r":
T1A2 = RaheemSterling
print("")
print("")
print("")
ahd = 0
while ahd < 40:
print("")
ahd += 1
print("Midfielders" + " | " + " Goal Scoring " + " | " + "Risk of being carded" + " | " + "Penalty Skill" + " | " + "Defending Skill")
print("Jack Grealish" + " | " + str(JackGrealish[0]) + " | " + str(JackGrealish[1]) + " | " + str(JackGrealish[2]) + " | " + str(JackGrealish[3]))
print("Kalvin Philips" + " | " + str(KalvinPhilips[0]) + " | " + str(KalvinPhilips[1]) + " | " + str(KalvinPhilips[2]) + " | " + str(KalvinPhilips[3]))
print("Bukayo Saka" + " | " + str(BukayoSaka[0]) + " | " + str(BukayoSaka[1]) + " | " + str(BukayoSaka[2]) + " | " + str(BukayoSaka[3]))
print("Jude Bellingham" + " | " + str(JudeBellingham[0]) + " | " + str(JudeBellingham[1]) + " | " + str(JudeBellingham[2]) + " | " + str(JudeBellingham[3]))
print("Phil Foden" + " | " + str(PhilFoden[0]) + " | " + str(PhilFoden[1]) + " | " + str(PhilFoden[2]) + " | " + str(PhilFoden[3]))
print("")
print("")
print("")
T1M1 = list(input("Who is your first midfielder? " + team1 + " "))
if T1M1[2] == "c":
T1M1 = JackGrealish
elif T1M1[2] == "l":
T1M1 = KalvinPhilips
elif T1M1[2] == "k":
T1M1 = BukayoSaka
elif T1M1[2] == "d":
T1M1 = JudeBellingham
elif T1M1[2] == "i":
T1M1 = PhilFoden
print("")
T1M2 = list(input("Who is your second midfielder? " + team1 + " "))
if T1M2[2] == "c":
T1M2 = JackGrealish
elif T1M2[2] == "l":
T1M2 = KalvinPhilips
elif T1M2[2] == "k":
T1M2 = BukayoSaka
elif T1M2[2] == "d":
T1M2 = JudeBellingham
elif T1M2[2] == "i":
T1M2 = PhilFoden
print("")
T1M3 = list(input("Who is your third midfielder? " + team1 + " "))
if T1M3[2] == "c":
T1M3 = JackGrealish
elif T1M3[2] == "l":
T1M3 = KalvinPhilips
elif T1M3[2] == "k":
T1M3 = BukayoSaka
elif T1M3[2] == "d":
T1M3 = JudeBellingham
elif T1M3[2] == "i":
T1M3 = PhilFoden
ahd = 0
while ahd < 40:
print("")
ahd += 1
possession1 = 0
shot1 = 0
shot2 = 0
target1 = 0
target2 = 0
print("Defenders" + " | " + "Defending Skill" + " | " + "Risk of being carded" + " | " + "Penalty Skill")
print("John Stones" + " | " + str(JohnStones[0]) + " | " + str(JohnStones[1]) + " | " + str(JohnStones[2]))
print("Kyle Walker" + " | " + str(KyleWalker[0]) + " | " + str(KyleWalker[1]) + " | " + str(KyleWalker[2]))
print("Ben Chilwell" + " | " + str(BenChilwell[0]) + " | " + str(BenChilwell[1]) + " | " + str(BenChilwell[2]))
print("Trent Alexander" + " | " + str(TrentAlexander[0]) + " | " + str(TrentAlexander[1]) + " | " + str(TrentAlexander[2]))
print("Tyrone Mings" + " | " + str(TyroneMings[0]) + " | " + str(TyroneMings[1]) + " | " + str(TyroneMings[2]))
print("")
T1D1 = list(input("Who is your first defender " + team1 + "? "))
if T1D1[2] == "h":
T1D1 = JohnStones
elif T1D1[2] == "l":
T1D1 = KyleWalker
elif T1D1[2] == "n":
T1D1 = BenChilwell
elif T1D1[2] == "e":
T1D1 = TrentAlexander
elif T1D1[2] == "r":
T1D1 = TyroneMings
print("")
T1D2 = list(input("Who is your second defender " + team1 + "? "))
if T1D2[2] == "h":
T1D2 = JohnStones
elif T1D2[2] == "l":
T1D2 = KyleWalker
elif T1D2[2] == "n":
T1D2 = BenChilwell
elif T1D2[2] == "e":
T1D2 = TrentAlexander
elif T1D2[2] == "r":
T1D2 = TyroneMings
print("")
T1D3 = list(input("Who is your third defender " + team1 + "? "))
if T1D3[2] == "h":
T1D3 = JohnStones
elif T1D3[2] == "l":
T1D3 = KyleWalker
elif T1D3[2] == "n":
T1D3 = BenChilwell
elif T1D3[2] == "e":
T1D3 = TrentAlexander
elif T1D3[2] == "r":
T1D3 = TyroneMings
ahd = 0
while ahd < 40:
print("")
ahd += 1
print("Goalkeepers" + " | " + " Regular saving " + " | " + "Penalties saving" " | " + " Risk of being carded")
print("Jordan Pickford" + " | " + str(JordanPickford[0]) + " | " + str(JordanPickford[1]) + " | " + str(JordanPickford[2]))
print("Sam Johnstone" + " | " + str(SamJohnstone[0]) + " | " + str(SamJohnstone[1]) + " | " + str(SamJohnstone[2]))
print("")
T1K = input("Who is your goalie " + team1 + "? ")
if T1K[0] == "j" or T1K[0] == "J":
T1K = JordanPickford
elif T1K[0] == "s" or T1K[0] == "S":
T1K = SamJohnstone
ahd = 0
while ahd < 40:
print("")
ahd += 1
print("Attackers" + " | " "Goal Scoring" + " | " "Risk of being carded" + " | " + "Penalty Skill")
print("Harry Kane" + " | " + str(HarryKane[0]) + " | " + str(HarryKane[1]) + " | " + str(HarryKane[2]))
print("Marcus Rashford" + " | " + str(MarcusRashford[0]) + " | " + str(MarcusRashford[1]) + " | " + str(MarcusRashford[2]))
print("Raheem Sterling" + " | " + str(RaheemSterling[0]) + " | " + str(RaheemSterling[1]) + " | " + str(RaheemSterling[2]))
print("")
T2A1 = list(input("Who is your first forward " + team2 + "? "))
if T2A1[0] == "H" or T2A1[0] == "h":
T2A1 = HarryKane2
elif T2A1[0] == "M" or T2A1[0] == "m":
T2A1 = MarcusRashford2
elif T2A1[0] == "R" or T2A1[0] == "r":
T2A1 = RaheemSterling2
print("")
T2A2 = list(input("Who is your second forward " + team2 + "? "))
if T2A2[0] == "H" or T2A2[0] == "h":
T2A2 = HarryKane2
elif T2A2[0] == "M" or T2A2[0] == "m":
T2A2 = MarcusRashford2
elif T2A2[0] == "R" or T2A2[0] == "r":
T2A2 = RaheemSterling2
ahd = 0
while ahd < 40:
print("")
ahd += 1
print("Midfielders" + " | " + " Goal Scoring " + " | " + "Risk of being carded" + " | " + "Penalty Skill" + " | " + "Defending Skill")
print("Jack Grealish" + " | " + str(JackGrealish[0]) + " | " + str(JackGrealish[1]) + " | " + str(JackGrealish[2]) + " | " + str(JackGrealish[3]))
print("Kalvin Philips" + " | " + str(KalvinPhilips[0]) + " | " + str(KalvinPhilips[1]) + " | " + str(KalvinPhilips[2]) + " | " + str(KalvinPhilips[3]))
print("Bukayo Saka" + " | " + str(BukayoSaka[0]) + " | " + str(BukayoSaka[1]) + " | " + str(BukayoSaka[2]) + " | " + str(BukayoSaka[3]))
print("Jude Bellingham" + " | " + str(JudeBellingham[0]) + " | " + str(JudeBellingham[1]) + " | " + str(JudeBellingham[2]) + " | " + str(JudeBellingham[3]))
print("Phil Foden" + " | " + str(PhilFoden[0]) + " | " + str(PhilFoden[1]) + " | " + str(PhilFoden[2]) + " | " + str(PhilFoden[3]))
print("")
T2M1 = list(input("Who is your first midfielder? " + team2 + " "))
if T2M1[2] == "c":
T2M1 = JackGrealish2
elif T2M1[2] == "l":
T2M1 = KalvinPhilips2
elif T2M1[2] == "k":
T2M1 = BukayoSaka2
elif T2M1[2] == "d":
T2M1 = JudeBellingham2
elif T2M1[2] == "i":
T2M1 = PhilFoden2
print("")
T2M2 = list(input("Who is your second midfielder? " + team2 + " "))
if T2M2[2] == "c":
T2M2 = JackGrealish2
elif T2M2[2] == "l":
T2M2 = KalvinPhilips2
elif T2M2[2] == "k":
T2M2 = BukayoSaka2
elif T2M2[2] == "d":
T2M2 = JudeBellingham2
elif T2M2[2] == "i":
T2M2 = PhilFoden2
print("")
T2M3 = list(input("Who is your third midfielder? " + team2 + " "))
if T2M3[2] == "c":
T2M3 = JackGrealish2
elif T2M3[2] == "l":
T2M3 = KalvinPhilips2
elif T2M3[2] == "k":
T2M3 = BukayoSaka2
elif T2M3[2] == "d":
T2M3 = JudeBellingham2
elif T2M3[2] == "i":
T2M3 = PhilFoden2
print("")
print("")
print("")
ahd = 0
while ahd < 40:
print("")
ahd += 1
print("Defenders" + " | " + "Defending Skill" + " | " + "Risk of being carded" + " | " + "Penalty Skill")
print("John Stones" + " | " + str(JohnStones[0]) + " | " + str(JohnStones[1]) + " | " + str(JohnStones[2]))
print("Kyle Walker" + " | " + str(KyleWalker[0]) + " | " + str(KyleWalker[1]) + " | " + str(KyleWalker[2]))
print("Ben Chilwell" + " | " + str(BenChilwell[0]) + " | " + str(BenChilwell[1]) + " | " + str(BenChilwell[2]))
print("Trent Alexander" + " | " + str(TrentAlexander[0]) + " | " + str(TrentAlexander[1]) + " | " + str(TrentAlexander[2]))
print("Tyrone Mings" + " | " + str(TyroneMings[0]) + " | " + str(TyroneMings[1]) + " | " + str(TyroneMings[2]))
print("")
T2D1 = list(input("Who is your first defender " + team2 + "? "))
if T2D1[2] == "h":
T2D1 = JohnStones2
elif T2D1[2] == "l":
T2D1 = KyleWalker2
elif T2D1[2] == "n":
T2D1 = BenChilwell2
elif T2D1[2] == "e":
T2D1 = TrentAlexander2
elif T2D1[2] == "r":
T2D1 = TyroneMings2
print("")
T2D2 = list(input("Who is your second defender " + team2 + "? "))
if T2D2[2] == "h":
T2D2 = JohnStones2
elif T2D2[2] == "l":
T2D2 = KyleWalker2
elif T2D2[2] == "n":
T2D2 = BenChilwell2
elif T2D2[2] == "e":
T2D2 = TrentAlexander2
elif T2D2[2] == "r":
T2D2 = TyroneMings2
print("")
T2D3 = list(input("Who is your third defender " + team2 + "? "))
if T2D3[2] == "h":
T2D3 = JohnStones2
elif T2D3[2] == "l":
T2D3 = KyleWalker2
elif T2D3[2] == "n":
T2D3 = BenChilwell2
elif T2D3[2] == "e":
T2D3 = TrentAlexander2
elif T2D3[2] == "r":
T2D3 = TyroneMings2
ahd = 0
while ahd < 40:
print("")
ahd += 1
print("Goalkeepers" + " | " + " Regular saving " + " | " + "Penalties saving" " | " + " Risk of being carded")
print("Jordan Pickford" + " | " + str(JordanPickford[0]) + " | " + str(JordanPickford[1]) + " | " + str(JordanPickford[2]))
print("Sam Johnstone" + " | " + str(SamJohnstone[0]) + " | " + str(SamJohnstone[1]) + " | " + str(SamJohnstone[2]))
print("")
T2K = input("Who is your goalie " + team2 + "? ")
if T2K[0] == "j" or T2K[0] == "J":
T2K = JordanPickford2
elif T2K[0] == "s" or T2K[0] == "S":
T2K = SamJohnstone2
ahd = 0
while ahd < 40:
print("")
ahd += 1
ahd = 0
while ahd < 40:
print("")
ahd += 1
team1attack = T1A1[0] + T1A2[0]
team1midfielda = T1M1[0] + T1M2[0] + T1M3[0]
team1midfieldd = T1M1[3] + T1M2[3] + T1M3[3]
team1defense = T1D1[0] + T1D2[0] + T1D3[0]
team2midfielda = T2M1[0] + T2M2[0] + T2M3[0]
team2midfieldd = T2M1[3] + T2M2[3] + T2M3[3]
team2defense = T2D1[0] + T2D2[0] + T2D3[0]
team2attack = T2A1[0] + T2A2[0]
team1players = [T1A1, T1A2, T1M1, T1M2, T1M3, T1D1, T1D2, T1D3]
team1playersa = [T1A1, T1A2]
team1playersm = [T1M1, T1M2, T1M3]
team1playersd = [T1D1, T1D2, T1D3]
team2players = [T2A1, T2A2, T2M1, T2M2, T2M3, T2D1, T2D2, T2D3]
team2playersa = [T2A1, T2A2]
team2playersm = [T2M1, T2M2, T2M3]
team2playersd = [T2D1, T2D2, T2D3]
dkd = 0
while dkd < 40:
print("")
dkd += 1
adlk = [-1, 1]
ball = random.choice(adlk)
startplacement = ball
us = 0
team1score = 0
team2score = 0
half = 46
full = 90
while us < full:
if ball > 1:
possession1 += 1
if us != 0 and us != 45:
time.sleep(5.5)
print("")
if us % 10 == 0 and us != 0:
print(str(us) + "'")
print("")
time.sleep(0.5)
if us == half + 2:
ball = -startplacement
us += 2
if ball == 1:
player = random.choice(team1playersa)
player2 = random.choice(team1playersm)
print(team1 + " start with the ball.")
time.sleep(2)
print("")
x = random.randint(1, 2)
if x == 1:
print(player[3] + " starts off by passing to " + player2[4])
if x == 2:
print("The game commences with " + player[3] + " passing to " + player2[4])
ball = 2
elif ball == 2:
mk = 0
entered = 0
x = random.randint(1, 100)
if x < 60 * (player[0]) / (team2midfieldd):
ball = -2
entered = 1
x = random.randint(1, 4)
player = random.choice(team2playersm)
if x == 1:
print(player[4] + " steals the ball from " + player2[4])
if x == 2:
print("A bad pass from " + player2[4] + " leads to " + player[4] + " getting the ball for " + team2)
if x == 3:
print(player[4] + " cuts out " + player2[4] + "'s pass")
if x == 4:
print("A poor piece of play by " + player2[4] + " leads to an interception by " + player[4])
else:
for player in team2playersm:
x = random.randint(1, 100)
if x < (player[0] * 1.5) and entered == 0:
entered = 1
ha = random.randint(1, 20)
ball = -7
if ha == 1 and player[5] != 1:
player[5] += 2
ak = random.randint(1, 2)
if ak == 1:
print(player[4] + " slides into " + player2[4] + " out of nowhere, entirely missing the ball, and is given a RED CARD!")
if ak == 2:
print("A simply horrible challenge by " + player[4] + " on " + player2[4] + " gets him sent off")
team2playersm.remove(player)
team2midfieldd -= player[3]
team2midfielda -= player[0]
elif ha < 5:
player[5] += 1
if player[5] == 1:
ei = random.randint(1, 3)
if ei == 1:
print("A bad challenge by " + player[4] + " leads to a yellow card.")
if ei == 2:
print("The ref gives a yellow card to " + player[4] + " after they catch more of " + player2[4] + " than the ball.")
if ei == 3:
print("A sliding tackle by " + player[4] + " into the leg of " + player2[4] + " gives him a yellow card.")
if player[5] == 2:
ei = random.randint(1, 2)
if ei == 1:
print("A HORRIBLE CHALLENGE BY " + player[4] + "! He slides into the leg of " + player2[4] + " getting a second yellow card. He is sent off!")
team2playersm.remove(a)
team2midfieldd -= player[3]
team2midfielda -= player[0]
else:
aj = random.randint(1, 6)
if aj == 1:
print(player[4] + " fouls " + player2[4] + ". " + team1 + " gets a freekick from the middle, but no card is given.")
if aj == 2:
print("Freekick from the middle after a foul by " + player[4] + ".")
if aj == 3:
print("Challenge is poorly timed by " + player[4] + " and " + team1 + " gets a freekick")
if aj == 4:
print(player[4] + " caught more of the man than the ball and gives away a freekick")
if aj == 5:
print("Despite protests from many players on the team, the ref gives a freekick away after a challenge by " + player[4] + ".")
if aj == 6:
print(player[4] + "'s foot bounces off the ball onto the leg of " + player2[4] + ", showing studs and giving away a freekick.")
if entered == 0:
z = random.randint(1, 100)
if z < 15:
ball = 4
x = random.randint(1, 4)
player = random.choice(team1playersa)
if x == 1:
print("A beautiful ball by " + player2[4] + " arrives right at the feet of " + player[3] + " in the box.")
if x == 2:
print(player2[4] + " sees an opportunity and seizes it, passing the ball into the box at the feet of " + player[3] + ".")
if x == 3:
print(player2[4] + " sails the ball over the heads of the midfield of " + team2 + " right to the feet of " + player[3] + ".")
if x == 4:
print("What a spectacular through ball by " + player2[4] + " to give a ball right at the feet of " + player[3] + " in the box!")
elif z < 70:
ball = 3
x = random.randint(1, 4)
player = random.choice(team1playersa)
if x == 1:
print(player2[4] + " plays a dangerous through ball to " + player[3] + " at the edge of the box.")
if x == 2:
print(player[3] + " receives a smart ball from " + player2[4] + " and is in open space near the box.")
if x == 3:
print(player2[4] + " advances with the ball and plays it off to " + player[3] + " who is in a dangerous position.")
if x == 4:
print(player2[4] + " plays a quick pass to " + player[3] + " who uses it well and starts advancing near the box.")
else:
ball = 2
player = random.choice(team1playersm)
while player == player2:
player = random.choice(team1playersm)
fh = random.randint(1, 3)
if fh == 1:
print(team1 + " are currently content to keep possession, and " + player2[4] + " passes it laterally to " + player[4])
if fh == 2:
print(player2[4] + " passes the ball to " + player[4] + ".")
if fh == 3:
print("The ball remains in the middle, as " + player2[4] + " gives the ball to " + player[4] + ".")
player2 = player
elif ball == 3:
entered = 0
for x in team2playersd:
ks = random.randint(1, 100)
if ks < 5 * (x[1]) and entered == 0:
entered = 1
ball = 6
b = random.randint(1, 3)
if b == 1:
print(x[3] + " tries to stop the threat with a challenge, but gives away a freekick.")
if b == 2:
print(player2[3] + " goes down, and gets a freekick")
if b == 3:
print(x[3] + " gets more of " + player2[3] + "'s leg than the ball and gives away a freekick.")
b = random.randint(1, 3)
if b == 1:
time.sleep(3)
print("")
x[4] += 1
if x[4] == 2:
print("The ref gives a second yellow card to " + x[3] + ", and he is sent off.")
team2playersm.remove(a)
team2midfieldd -= x[3]
team2midfielda -= x[0]
else:
y = random.randint(1, 3)
if y == 1:
print("The ref gives a yellow card to " + x[3])
if y == 2:
print(x[3] + " gets a yellow card for this challenge.")
if y == 3:
print("On top of the freekick given away, " + x[3] + " gets a yellow card.")
pass
if entered == 0:
x = random.randint(1, 100)
if x < 14 * (team2defense) / (player2[0]):
ball = -9
en = random.randint(1, 4)
player = random.choice(team2playersd)
if en == 1:
print(player2[3] + " gets the ball stripped off him by " + player[3] + ".")
if en == 2:
print(player2[3] + " pulls his foot back for a shot, but " + player[3] + " steals the ball with an excellent sliding tackle.")
if en == 3:
print(player2[3] + " dribbles around one player, but makes a mistake and " + player[3] + " gets the ball.")
if en == 4:
print(player2[3] + " shoots, but it is blocked by the defense, and " + player[3] + " gets the rebound")
elif x < 65:
ball = 4
player = random.choice(team1playersa)
while player == player2:
player = random.choice(team1playersa)
lkd = random.randint(1, 3)
if lkd == 1:
print(player2[3] + " plays a through ball into the box straight to the feet of " + player[3] + ".")
if lkd == 2:
print(player[3] + " receives a brilliant ball from " + player2[3] + " in a dangerous position in the box")
if lkd == 3:
print("Danger for " + team2 + " as a ball comes from " + player2[3] + " to " + player[3] + " in the box.")
else:
x = random.randint(1, 100)
kda = random.randint(1, 3)
if kda == 1:
print(player[3] + " takes a shot at goal.")
if kda == 2:
print(player[3] + " puts his foot behind the ball and hits a powerful shot.")
if kda == 3:
print(player[3] + " is the fortunate reciever of bad marking, and manages to get into space for a clear shot.")
time.sleep(4)
print("")
shot1 += 1
if x < 40 * (player[0] / T2K[0]):
ksj = random.randint(1, 4)
ball = 11
if ksj == 1:
print(player[3] + " sails the ball into the top right corner and scores!")
if ksj == 2:
print(player[3] + "'s shot takes a deflection, and goes past the goalkeeper into the bottom right corner!")
if ksj == 3:
print(player[3] + " hits a brilliant shot that curves into the top left corner past the goalkeeper's outstretched arms.")
if ksj == 4:
print(player[3] + "'s ball travels through the legs of defenders, making it difficult for the keeper to follow, and goes into the goal!")
target1 += 1
else:
ksk = random.randint(1, 150)
if ksk < 8 * T2K[0]:
ball = -10
dkd = random.randint(1, 3)
if dkd == 1:
print(T2K[3] + "makes a great save and regains control of the ball.")
if dkd == 2:
print("A goal looked certain for a moment, but in the end, " + T2K[3] + " was able to barely grab the ball.")
if dkd == 3:
print("The save was fairly simple for " + T2K[3] + " as the shot failed to find the top corner of the goal.")
target1 += 1
else:
ball = 5
jfd = random.randint(1, 150)
if jfd < 10 * player[0]:
kds = random.randint(1, 3)
if kds == 1:
print(T2K[3] + " palms the ball wide, and it goes out for a corner.")
if kds == 2:
print(T2K[3] + " barely gets to the ball, but in the end, is able to nudge the ball out for a corner.")
if kds == 3:
print(T2K[3] + "'s fingertips just reach the ball, and it goes out for a corner.")
target1 += 1
else:
ball = -10
kd = random.randint(1, 3)
if kd == 1:
print(player[3] + "'s shot had some power behind it, but was unaccurate and went over the bar for a goalkick.")
if kd == 2:
print(player[3] + "'s shot barely goes wide, nearly bringing a brilliant goal.")
if kd == 3:
print(player[3] + "'s shot had just a bit too much curve, and went wide for a goalkick.")
player2 = player
elif ball == 4:
entered = 0
for a in team2playersd:
x = random.randint(1, 25)
if x < a[1] and entered == 0:
entered = 1
ball = 8
h = random.randint(1, 3)
if h == 1:
print(a[3] + " slides into " + player2[3] + "'s legs, completely missing the ball. PENALTY!")
if h == 2:
print(a[3] + " tries and fails to get the ball from " + player2[3] + " but gets a lot of the player. The ref says PENALTY")
if h == 3:
print(a[3] + " tries to pull " + player2[3] + " back, and in the end succeeds, but the ref gives a PENALTY for the poor challenge.")
x = random.randint(1, 3)
if x == 1:
print("")
a[4] += 1
if a[4] == 2:
print("The ref gives a second yellow card to " + a[3] + ", and he is sent off.")
team2playersm.remove(a)
team2midfieldd -= a[3]
team2midfielda -= a[0]
else:
time.sleep(3)
print("")
y = random.randint(1, 3)
if y == 1:
print("The ref gives a yellow card to " + a[3])
if y == 2:
print(a[3] + " gets a yellow card for this challenge.")
if y == 3:
print("On top of the penalty given away, " + a[3] + " gets a yellow card.")
player2 = player
x = random.randint(1, 100)
if entered == 0:
shot1 += 1
if x < 40 * (player2[0] / T2K[0]):
target1 += 1
ball = 11
h = random.randint(1, 3)
if h == 1:
print(T2K[3] + " rushes out to tackle " + player2[3] + " but he slides the ball through the goalkeepers legs. GOAL!")
elif h == 2:
print(player2[3] + " weaves round defenders and shoots the ball into the corner of the goal.")
elif h == 3:
print("Poor defending leaves " + player2[3] + " in empty space, and he seizes the opportunity, putting the ball in the bottom left corner of the goal.")
elif x < 70 * (player2[0] / T2K[0]):
target1 += 1
ball = 5
kds = random.randint(1, 3)
if kds == 1:
print(T2K[3] + " palms the ball wide, and it goes out for a corner.")
if kds == 2:
print(T2K[3] + " barely gets to the ball, but in the end, is able to nudge the ball out for a corner.")
if kds == 3:
print(T2K[3] + "'s fingertips just reach the ball, and it goes out for a corner.")
else:
ball = -10
x = random.randint(1, 2)
if x == 1:
kd = random.randint(1, 3)
if kd == 1:
print(player2[3] + "'s shot had some power behind it, but was unaccurate and went over the bar for a goalkick.")
if kd == 2:
print(player2[3] + "'s shot barely goes wide, nearly bringing a brilliant goal.")
if kd == 3:
print(player2[3] + "'s shot had just a bit too much curve, and went wide for a goalkick.")
if x == 2:
target1 += 1
dkd = random.randint(1, 3)
if dkd == 1:
print(T2K[3] + "makes a great save and regains control of the ball.")
if dkd == 2:
print("A goal looked certain for a moment, but in the end, " + T2K[3] + " was able to barely grab the ball.")
if dkd == 3:
print("The save was fairly simple for " + T2K[3] + " as the shot failed to find the top corner of the goal.")
elif ball == 5:
y = random.randint(1, 2)
if y == 1:
player = random.choice(team1playersa)
print(player[3] + " will take the corner.")
if y == 2:
player = random.choice(team1playersm)
print(player[4] + " decides to take the corner.")
time.sleep(3)
print("")
x = random.randint(1, 100)
arrived = 0
if x > player[0] * 9:
ball = -10
y = random.randint(1, 3)
if y == 1:
print("The ball goes straight towards " + T2K[3] + ".")
if y == 2:
print("The ball flies way long, and goes out for a goalkick.")
if y == 3:
print("No one is near the ball, leading it to go wide for a goalkick.")
else:
y = random.randint(1, 4)
if y == 1:
print("A promising ball flies into the box.")
elif y == 2:
print("The corner is good and curves towards the goal.")
elif y == 3:
print("The corner heads towards the middle of the box.")
elif y == 4:
print("It is well taken, and it curves into the middle of the box.")
time.sleep(4)
print("")
x = random.randint(1, 100)
if x < 4 * T2K[0]:
y = random.randint(1, 3)
ball = -10
if y == 1:
print(T2K[3] + " comes out and grabs the ball.")
elif y == 2:
print("The ball strays too close to the goal and " + T2K[3] + " is able to grab it.")
elif y == 3:
print(T2K[3] + " takes a risk and comes out, but he is able to get the ball.")
else:
x = random.randint(1, 100)
b = random.randint(1, 2)
if b == 1:
player = random.choice(team1playersa)
k = 3
else:
player = random.choice(team1playersm)
k = 4
b = random.randint(1, 2)
if b == 1:
player2 = random.choice(team2playersm)
n = 3
h = 4
else:
player2 = random.choice(team2playersd)
n = 0
h = 3
if x < 30 * (player2[n] / player[0]):
ball = -10
y = random.randint(1, 3)
if y == 1:
print(player2[h] + " is able to fend off " + player[k] + " and the ball goes out for a goalkick.")
elif y == 2:
print(player2[h] + " and " + player[k] + " compete for the ball, and in the end, neither of them get it, and it goes out for a goalkick")
elif y == 3:
print(player2[h] + " successfully defends " + player[k] + " and the ball flies long for a goalkick.")
elif x < 50 * (player2[n] / player[0]):
ball = 5
y = random.randint(1, 3)
if y == 1:
print(player2[h] + " is able to prevent " + player[k] + " from getting the ball, and heads it out for another corner.")
elif y == 2:
print(player2[h] + " gets to the ball first, but can only head it back over the goal for another corner.")
elif y == 3:
print(player[k] + " looked dangerous for a second, but " + player2[h] + " heads the ball long for another corner.")
else:
y = random.randint(1, 4)
shot1 += 1
target1 += 1
if y == 1:
print(player2[h] + " is not able to fend off " + player[k] + " who heads the ball at the goal.")
elif y == 2:
print(player[k] + " beats " + player2[h] + " and heads the ball towards the bottom corner.")
elif y == 3:
print(player[k] + " jumps high in the air and heads it towards the goal.")
else:
print(player[k] + " and " + player2[h] + " both jump for the ball, but in the end " + player[k] + " wins it and heads it towards the goal.")
print("")
time.sleep(5)
x = random.randint(1, 100)
if x < 7 * T2K[0]:
ball = -10
y = random.randint(1, 3)
if y == 1:
print(T2K[3] + " makes a brilliant save, and is able to hold onto the ball.")
elif y == 2:
print(T2K[3] + " leaps and is just able to grab the ball with his outstretched arms.")
elif y == 3:
print(T2K[3] + " reacts quickly and is able to hug the ball to his chest.")
else:
ball = 11
y = random.randint(1, 3)
if y == 1:
print(T2K[3] + " lungs for the ball, but he is too slow and it goes into the back of the net.")
elif y == 2:
print(T2K[3] + " dives towards the ball, but is too slow, and the ball flies into the back of net.")
elif y == 3:
print("The shot is accurate, and " + T2K[3] + " barely misses the chance to push it wide with his fingertips.")
elif ball == 6:
x = str(input("Who do you want to take the freekick? "))
print("")
playerchosen = 0
for a in team1playersa:
if x == a[3]:
player = a
playerchosen = 1
print(a[3] + " will take the freekick.")
for a in team1playersm:
if x == a[4]:
player = a
playerchosen = 1
print(a[4] + " will take the freekick.")
print("")
time.sleep(3)
while playerchosen == 0:
print("")
print("Please give the player's full name, with caps and spaces. It also must be an attacker or midfielder. Please add (M) or (A) after their name. ")
time.sleep(1)
x = str(input("Who do you want to take the freekick? "))
print("")
playerchosen = 0
time.sleep(2)
for a in team1playersa:
if x == a[3]:
player = a
playerchosen = 1
print(a[3] + " will take the freekick.")
time.sleep(3)
print("")
for a in team1playersm:
if x == a[4]:
player = a
playerchosen = 1
print(a[4] + " will take the freekick.")
time.sleep(3)
print("")
shot1 += 1
x = random.randint(1, 100)
if x > 9 * player[0]:
b = random.randint(1, 3)
if b == 1:
print("The kick is poor and the ball sails over the goal.")
elif b == 2:
print("The freekick had some power to it, but the aim was off, and the ball is sent into the stands.")
elif b == 3:
print("The ball has some curl on it, but not enough, and goes past the goal for a goalkick.")
ball = -10
else:
target1 += 1
x = random.randint(1, 100)
if x < 6 * T2K[1]:
ball = -10
b = random.randint(1, 3)
if b == 1:
print("It was a decent shot, but a comfortable save for " + T2K[3] + " nonetheless.")
if b == 2:
print("The ball curls towards the top left corner, but a fantastic save by " + T2K[3] + " ends up with the ball in his hands.")
if b == 3:
print("The ball has some power on it, but " + T2K[3] + " is able to grab the ball.")
elif x < 8 * T2K[0]:
b = random.randint(1, 2)
ball = 5
if b == 1:
print("The ball has spin and power, but " + T2K[3] + " is able to knock it wide for a corner.")
if b == 2:
print("The ball curves towards the top corner, but " + T2K[3] + " just gets a hand to it and knocks it wide for a corner.")
else:
ball = 11
b = random.randint(1, 3)
if b == 1:
print("GOAL! The ball flies into the corner, and there was nothing " + T2K[3] + " could do to stop it.")
if b == 2:
print("He really got his foot behind it. The ball flies past " + T2K[3] + "'s fingertips into the goal.")
if b == 3:
print("An unfortunate deflection for " + team2 + " ends up with the ball in the back of the net.")
player2 = player
elif ball == 7:
player2 = random.choice(team1playersm)
x = random.randint(1, 4)
if x == 1:
print(player2[4] + " takes the freekick from the middle quickly.")
elif x == 2:
print(team1 + " decide to move with speed, and " + player2[4] + " is quick to take the freekick.")
elif x == 3:
print("The freekick has little menace, but " + team1 + " are happy for the possession. " + player2[4] + " will take the freekick.")
elif x == 4:
print("The freekick is much to far away for a shot, and will be taken as a pass. It will be taken by " + player2[4])
time.sleep(3.4)
print("")
x = random.randint(1, 100)
if x < 30 / player[0]:
ball = -2
b = random.randint(1, 2)
x = random.randint(1, 100)
if x < 100 * (T2M1[0] / team2midfieldd):
player = T2M1
elif x < 100 * ((T2M1[0] + T2M2[0]) / team2midfieldd):
player = T2M2
else:
player = T2M3
if b == 1:
print("The pass by " + player2[4] + " is horrible and is intercepted by " + player[4] + ".")
if b == 2:
print(player2[4] + "'s pass was weak and inacurate, and it was intercepted by " + player[4] + ".")
else:
x = random.randint(1, 100)
if x < 15:
x = random.randint(1, 100)
if x < 6 * player2[0]:
x = random.randint(1, 100)
if x < 100 * (T2M1[0] / team2midfieldd):
player = T2M1
elif x < 100 * ((T2M1[0] + T2M2[0]) / team2midfieldd):
player = T2M2
else:
player = T2M3
ball = -2
b = random.randint(1, 2)
if b == 1:
print(player2[4] + " attempts a long pass to right outside the box, but it is intercepted by " + player[4] + ".")
if b == 2:
print(player2[4] + " sees an opportunity for a long pass to right outside the box, but " + player[4] + " cuts it out.")
else:
x = random.randint(1, 100)
ball = 3
if x < 100 * (T1A1[0] / team1attack):
player = T1A1
elif x < 100 * ((T1A1[0] + T1A2[0]) / team1attack):
player = T1A2
else:
player = T1A2
while player2 == player:
x = random.randint(1, 100)
if x < 100 * (T1A1[0] / team1attack):
player = T1A1
elif x < 100 * ((T1A1[0] + T1A2[0]) / team1attack):
player = T1A2
else:
player = T1A3
b = random.randint(1, 2)
if b == 1:
print(player2[4] + " sees an opportunity, and curves a ball up to " + player[3] + " who is right outside the box.")
elif b == 2:
print(player2[4] + " passes it over the heads of the opponent midfielders straight to the feet of " + player[3] + " who is standing right outside the box.")
else:
player = random.choice(team1playersm)
while player2 == player:
player = random.choice(team1playersm)
x = random.randint(1, 3)
ball = 2
if x == 1:
print(player2[4] + " plays a simple ball to " + player[4] + ".")
if x == 2:
print(player2[4] + " plays a short pass to " + player[4] + ".")
elif x == 3:
print("The game resumes with " + player2[4] + " passing to " + player[4] + ".")
player2 = player
elif ball == 8:
x = str(input("Who do you want to take the penalty? "))
print("")
playerchosen = 0
for a in team1playersa:
if x == a[3]:
player = a
playerchosen = 1
n = 3
time.sleep(3)
print(a[3] + " will take the penalty.")
print("")
for a in team1playersm:
if x == a[4]:
player = a
playerchosen = 1
print(a[4] + " will take the penalty.")
n = 4
time.sleep(3)
print("")
shot1 += 1
while playerchosen == 0:
time.sleep(3)
print("")
print("Please give the player's full name, with caps and spaces. It also must be an attacker or midfielder. Please add (M) or (A) after their name. ")
time.sleep(1)
x = str(input("Who do you want to take the penalty? "))
print("")
playerchosen = 0
time.sleep(2)
for a in team1playersa:
if x == a[3]:
player = a
playerchosen = 1
n = 3
print(a[3] + " will take the penalty.")
print("")
for a in team1playersm:
if x == a[4]:
player = a
playerchosen = 1
n = 4
print(a[4] + " will take the penalty.")
print("")
time.sleep(3)
x = random.randint(1, 2)
if x == 1:
print(T2K[3] + " dives right.")
dive = 1
elif x == 2:
print(T2K[3] + " dives left.")
dive = 2
time.sleep(3)
print("")
x = random.randint(1, 100)
if x < 45:
print(player[n] + " shoots left.")
shoot = 2
elif x > 55:
print(player[n] + " shoots right.")
shoot = 1
else:
print(player[n] + " goes down the middle.")
shoot = 6
time.sleep(3)
print("")
x = random.randint(1, 100)
if x < 50 / a[2]:
x = random.randint(1, 3)
if x == 1:
print("The ball ricochets off the post!")
if x == 2:
print("The shot has little aim, and the ball is sent long.")
if x == 3:
print("He went for the perfect shot, but ended up completely missing the goal.")
ball = -10
if ball != 10:
if shoot == dive:
x = random.randint(1, 100)
if x < (40 * a[2] / T2K[1]):
ball = 11
b = random.randint(1, 3)
if b == 1:
print(player[n] + " just sails the ball past the goalkeepers outstretched arms.")
if b == 2:
print(T2K[3] + "'s fingers are mere inches away from the ball, but it sails into the net.")
if b == 3:
print("The ball flies into the top corner, and the goalkeeper cannot save it.")
target1 += 1
elif (60 * a[2] / T2K[1]):
x = random.randint(1, 3)
if x == 1:
print(T2K[3] + " is able to just get his palms to it, and the ball rebounds back out into the attacking players.")
elif x == 2:
print(T2K[3] + " is near the ball, but isn't able to grab the ball, and it rebounds off back into the fray of players.")
elif x == 3:
print("The ball doesn't go wide enough to make past the keeper, but " + T2K[3] + " isn't able to grab it, and it bounces off his palms.")
target1 += 1
x = random.randint(1, 100)
time.sleep(3)
print("")
if x < 15 * (team1attack + team1midfielda + 7 * player[0]) / (team2defense):
x = random.randint(1, 100)
if x < (800 * player[0]) / (team1attack + team1midfielda + 7 * player[0]):
player = player
n = 3
elif x < (team1attack - player[0]) / (team1attack + team1midfielda + 7 * player[0]):
player2 = random.choice(team1playersa)
while player2 == player:
player2 = random.choice(team1playersa)
player = player2
n = 3
else:
player2 = random.choice(team1playersm)
while player2 == player:
player2 = random.choice(team1playersm)
player = player2
n = 4
x = random.randint(1, 100)
if x < 70 * player[0] / T2K[1]:
ball = 11
b = random.randint(1, 2)
if b == 1:
print(player[n] + " gets the ball on the rebound, and is able to put it past " + T2K[3] + " for his team.")
if b == 2:
print(player[n] + " lunges to get ther rebound and kicks it in the net.")
target1 += 1
shot1 += 1
else:
ball = -10
b = random.randint(1, 2)
if b == 1:
print(player[n] + "gets the rebound and shoots, but " + T2K[3] + " is able to barely get back and grab hold of the ball.")
if b == 2:
print(player[n] + " lunges and shoots on the rebound, but " + T2K[3] + " is just able to get back and grabs the ball.")
shot1 += 1
target1 += 1
else:
ball = -9
x = random.randint(1, 100)
if x < (100 * T2D1[0]) / (Team2Defense):
player = T2D1
elif x < (100 * T2D2[0]) / (Team2Defense):
player = T2D2
else:
player = T2D3
b = random.randint(1, 3)
if b == 1:
print(player[3] + " is able to get to the ball first.")
if b == 2:
print(player[3] + " reaches the ball and is in a position to clear it.")
if b == 3:
print(player[3] + " lunges for the ball and gains possession.")
else:
ball = -10
b = random.randint(1, 4)
if b == 1:
print(T2K[3] + " is able to grab onto the ball, negating the threat.")
elif b == 2:
print(player[n] + " hangs his head in his hands, as " + T2K[3] + "blocks his shot.")
elif b == 3:
print("The shot is weak, and " + T2K[3] + " is able to grab it")
else:
print("The ball flies into the bottom corner, but somehow, " + T2K[3] + "is able to grab it.")
elif shoot - dive == 1 or shoot - dive == -1:
ball = 11
b = random.randint(1, 5)
if b == 1:
print("The ball sails into the bottom corner uncontested.")
elif b == 2:
print("The shot was mediocre, but it goes in nonetheless.")
elif b == 3:
print("All " + T2K[3] + " can do is watch as the ball rolls into the bottom corner.")
elif b == 4:
print("The shot likely wouldn't have been blocked anyway, but the goalkeeper diving the wrong way solidifies the goal.")
else:
print("The shot is perfect, flying into the top corner.")
target1 += 1
else:
x = random.randint(1, 100)
target1 += 1
if x < (60 * a[2] / T2K[1]):
ball = 11
b = random.randint(1, 3)
if b == 1:
print(a[n] + " just sails the ball past the goalkeepers outstretched legs, with nothing he can do to stop it.")
if b == 2:
print(T2K[3] + "'s feet are mere inches away from the ball, but it sails into the net, as his forward momentum prevents him from blocking it.")
if b == 3:
print("The ball flies into the goal behind him, and the goalkeeper cannot save it.")
elif (70 * a[2] / T2K[1]):
x = random.randint(1, 3)
if x == 1:
print(T2K[3] + " is able to just get his feet to it, and the ball rebounds back out into the attacking players.")
elif x == 2:
print(T2K[3] + "'s feet are near the ball, and it rebounds off back into the fray of players.")
elif x == 3:
print("The ball doesn't go high enough to make it in, but " + T2K[3] + " isn't able to knock it far with his feat, and it bounces off into the fray of players.")
x = random.randint(1, 100)
print("")
time.sleep(3)
if x < 15 * (team1attack + team1midfielda + 7 * player[0]) / (team2defense):
x = random.randint(1, 100)
if x < (800 * player[0]) / (team1attack + team1midfielda + 7 * player[0]):
player = player
n = 3
elif x < (team1attack - player[0]) / (team1attack + team1midfielda + 7 * player[0]):
player2 = random.choice(team1playersa)
while player2 == player:
player2 = random.choice(team1playersa)
player = player2
n = 3
else:
player2 = random.choice(team1playersm)
while player2 == player:
player2 = random.choice(team1playersm)
player = player2
n = 4
x = random.randint(1, 100)
shot1 += 1
target1 += 1
if x < 70 * player[0] / T2K[1]:
ball = 11
b = random.randint(1, 2)
if b == 1:
print(player[n] + " gets the ball on the rebound, and is able to put it past " + T2K[3] + " for his team.")
if b == 2:
print(player[n] + " lunges to get ther rebound and kicks it in the net.")
else:
ball = -10
b = random.randint(1, 2)
if b == 1:
print(player[n] + "gets the rebound and shoots, but " + T2K[3] + " is able to barely get back and grab hold of the ball.")
if b == 2:
print(player[n] + " lunges and shoots on the rebound, but " + T2K[3] + " is just able to get back and grabs the ball.")
else:
ball = -9
x = randint(1, 100)
if x < (100 * T2D1[0]) / (Team2Defense):
player = T2D1
elif x < (100 * T2D2[0]) / (Team2Defense):
player = T2D2
else:
player = T2D3
b = random.randint(1, 3)
if b == 1:
print(player[3] + " is able to get to the ball first.")
if b == 2:
print(player[3] + " reaches the ball and is in a position to clear it.")
if b == 3:
print(player[3] + " lunges for the ball and gains possession.")
else:
ball = -10
b = random.randint(1, 4)
if b == 1:
print(T2K[3] + " is able to grab onto the ball, negating the threat.")
elif b == 2:
print(player[n] + " hangs his head in his hands, as " + T2K[3] + "blocks his shot.")
elif b == 3:
print("The shot is weak, and " + T2K[3] + " is able to grab it")
else:
print("The ball flies into the bottom corner, but somehow, " + T2K[3] + "is able to grab it.")
player2 = player
elif ball == 9:
x = random.randint(1, 100)
if x < 8 * (team2midfieldd) / (player2[0]):
ball = -2
player = random.choice(team2playersm)
b = random.randint(1, 3)
if b == 1:
print(player2[3] + " tried to pass to midfield but " + player[4] + " intercepts the pass.")
elif b == 2:
print(player[4] + " cuts out the pass towards midfield of " + player2[3] + ".")
else:
print(player2[3] + "'s pass is poor, and it gets cut out by " + player[4] + ".")
else:
player = random.choice(team1playersm)
b = random.randint(1, 4)
ball = 2
if b == 1:
print(player2[3] + " passes to " + player[4])
elif b == 2:
print(player2[3] + " passes into midfield, where " + player[4] + " waits.")
elif b == 3:
print(player2[3] + " hits the ball towards " + player[4])
else:
print(player2[3] + " surveys his options and decides to pass to " + player[4] + ".")
player2 = player
elif ball == 10:
x = random.randint(1, 100)
if x < (8 * team2attack / T1K[0]):
b = random.randint(1, 3)
ball = -3
player = random.choice(team2playersa)
if b == 1:
print(T1K[3] + " completely messes up the pass to his back line and passes to " + player[3] + " right outside the box.")
if b == 2:
print(T1K[3] + " doesn't see " + player[3] + " who cuts out the pass.")
if b == 3:
print(player[3] + "sees the intended pass and is able to get to it quickly")
elif x < 18 * (team2attack / T1K[0]):
ball = -2
player = random.choice(team2playersm)
b = random.randint(1, 3)
if b == 1:
print(T1K[3] + " kicks it to the middle, and " + player[4] + " intercepts the pass.")
elif b == 2:
print("The pass to the middle by " + T1K[3] + " is cut out by " + player[4])
elif b == 3:
print(player[4] + " sees the pass to the middle by the keeper and cuts it out.")
elif x < 40 * (team2attack / T1K[0]):
ball = 9
b = random.randint(1, 3)
player = random.choice(team1playersd)
if b == 1:
print(T1K[3] + " passes to " + player[3])
elif b == 2:
print(player[3] + " receives the ball from " + T1K[3])
elif b == 3:
print(T1K[3] + " plays the ball short to " + player[3])
else:
ball = 2
b = random.randint(1, 3)
player = random.choice(team1playersm)
if b == 1:
print(T1K[3] + " lobs it into the middle to " + player[4])
elif b == 2:
print(player[4] + " receives a long pass from " + T1K[3])
elif b == 3:
print(T1K[3] + " kicks it to " + player[4])
player2 = player
elif ball == -1:
player = random.choice(team2playersa)
player2 = random.choice(team2playersm)
x = random.randint(1, 2)
print(team2 + " start with the ball.")
time.sleep(2)
print("")
if x == 1:
print(player[3] + " starts off by passing to " + player2[4])
if x == 2:
print("The game commences with " + player[3] + " passing to " + player2[4])
ball = -2
player = player2
elif ball == -2:
mk = 0
entered = 0
x = random.randint(1, 100)
if x < 60 * player[0] / team1midfieldd:
entered = 1
ball = 2
x = random.randint(1, 4)
player = random.choice(team1playersm)
if x == 1:
print(player[4] + " steals the ball from " + player2[4])
if x == 2:
print("A bad pass from " + player2[4] + " leads to " + player[4] + " getting the ball for " + team1)
if x == 3:
print(player[4] + " cuts out " + player2[4] + "'s pass")
if x == 4:
print("A poor piece of play by " + player2[4] + " leads to an interception by " + player[4])
else:
for a in team1playersm:
rk = random.randint(1, 100)
if rk < (a[1] * 1.5) and entered == 0:
entered = 1
ball = 7
ha = random.randint(1, 20)
if ha == 1 and a[5] != 1:
a[5] += 2
ak = random.randint(1, 2)
if ak == 1:
print(a[4] + " slides into " + player2[4] + " out of nowhere, entirely missing the ball, and is given a RED CARD!")
if ak == 2:
print("A simply horrible challenge by " + a[4] + " on " + player2[4] + " gets him sent off with a red card.")
team1playersm.remove(a)
team1midfieldd -= a[3]
team1midfielda -= a[0]
elif ha < 5:
a[5] += 1
if a[5] == 1:
ei = random.randint(1, 3)
if ei == 1:
print("A bad challenge by " + a[4] + " leads to a yellow card.")
if ei == 2:
print("The ref gives a yellow card to " + a[4] + " after they catch more of " + player2[4] + " than the ball.")
if ei == 3:
print("A sliding tackle by " + a[4] + " into the leg of " + player2[4] + " gives him a yellow card.")
if a[5] == 2:
ei = random.randint(1, 2)
if ei == 1:
print("A HORRIBLE CHALLENGE BY " + a[4] + "! He slides into the leg of " + player2[4] + " getting a second yellow card. He is sent off!")
team1playersm.remove(a)
team1midfieldd -= a[3]
team1midfielda -= a[0]
else:
aj = random.randint(1, 6)
if aj == 1:
print(a[4] + " fouls " + player2[4] + ". " + team2 + " gets a freekick from the middle, but no card is given.")
if aj == 2:
print("Freekick from the middle after a foul by " + a[4] + ".")
if aj == 3:
print("Challenge is poorly timed by " + a[4] + " and " + team2 + " gets a freekick")
if aj == 4:
print(a[4] + " caught more of the man than the ball and gives away a freekick")
if aj == 5:
print("Despite protests from many players on the team, the ref gives a freekick away after a challenge by " + a[4] + ".")
if aj == 6:
print(a[4] + "'s foot bounces off the ball onto the leg of " + player2[4] + ", showing studs and giving away a freekick.")
if entered == 0:
z = random.randint(1, 100)
if z < 15:
ball = -4
x = random.randint(1, 4)
player = random.choice(team2playersa)
if x == 1:
print("A beautiful ball by " + player2[4] + " arrives right at the feet of " + player[3] + " in the box.")
if x == 2:
print(player2[4] + " sees an opportunity and seizes it, passing the ball into the box at the feet of " + player[3] + ".")
if x == 3:
print(player2[4] + " sails the ball over the heads of the midfield of " + team1 + " right to the feet of " + player[3] + ".")
if x == 4:
print("What a spectacular through ball by " + player2[4] + " to give a ball right at the feet of " + player[3] + " in the box!")
elif z < 70:
ball = -3
x = random.randint(1, 4)
player = random.choice(team2playersa)
if x == 1:
print(player2[4] + " plays a dangerous through ball to " + player[3] + " at the edge of the box.")
if x == 2:
print(player[3] + " receives a smart ball from " + player2[4] + " and is in open space near the box.")
if x == 3:
print(player2[4] + " advances with the ball and plays it off to " + player[3] + " who is in a dangerous position.")
if x == 4:
print(player2[4] + " plays a quick pass to " + player[3] + " who uses it well and starts advancing near the box.")
else:
ball = -2
player = random.choice(team2playersm)
while player == player2:
player = random.choice(team2playersm)
fh = random.randint(1, 3)
if fh == 1:
print(team2 + " are currently content to keep possession, and " + player2[4] + " passes it laterally to " + player[4])
if fh == 2:
print(player2[4] + " passes the ball to " + player[4] + ".")
if fh == 3:
print("The ball remains in the middle, as " + player2[4] + " gives the ball to " + player[4] + ".")
player2 = player
elif ball == -3:
entered = 0
for x in team1playersd:
ks = random.randint(1, 100)
if ks < 5 * (x[1]) and entered == 0:
entered = 1
ball = -6
b = random.randint(1, 3)
if b == 1:
print(x[3] + " tries to stop the threat with a challenge, but gives away a freekick.")
if b == 2:
print(player2[3] + " goes down and gets a freekick")
if b == 3:
print(x[3] + " gets more of " + player2[3] + "'s leg than the ball and gives away a freekick.")
b = random.randint(1, 3)
if b == 1:
time.sleep(3)
print("")
x[4] += 1
if x[4] == 2:
print("The ref gives a second yellow card to " + x[3] + ", and he is sent off.")
team1playersm.remove(a)
team1midfieldd -= x[3]
team1midfielda -= x[0]
else:
y = random.randint(1, 3)
if y == 1:
print("The ref gives a yellow card to " + x[3])
if y == 2:
print(x[3] + " gets a yellow card for this challenge.")
if y == 3:
print("On top of the freekick given away, " + x[3] + " gets a yellow card.")
pass
if entered == 0:
x = random.randint(1, 100)
if x < 14 * (team1defense) / (player2[0]):
ball = 9
en = random.randint(1, 4)
player = random.choice(team1playersd)
if en == 1:
print(player2[3] + " gets the ball stripped off him by " + player[3] + ".")
if en == 2:
print(player2[3] + " pulls his foot back for a shot, but " + player[3] + " steals the ball with an excellent sliding tackle.")
if en == 3:
print(player2[3] + " dribbles around one player, but makes a mistake and " + player[3] + " gets the ball.")
if en == 4:
print(player2[3] + " shoots, but it is blocked by the defense, and " + player[3] + " gets the rebound")
elif x < 65:
ball = -4
player = random.choice(team2playersa)
while player == player2:
player = random.choice(team2playersa)
lkd = random.randint(1, 3)
if lkd == 1:
print(player2[3] + " plays a through ball into the box straight to the feet of " + player[3] + ".")
if lkd == 2:
print(player[3] + " receives a brilliant ball from " + player2[3] + " in a dangerous position in the box")
if lkd == 3:
print("Danger for " + team1 + " as a ball comes from " + player2[3] + " to " + player[3] + " in the box.")
else:
x = random.randint(1, 100)
kda = random.randint(1, 3)
shot2 += 1
if kda == 1:
print(player[3] + " takes a shot at goal.")
if kda == 2:
print(player[3] + " puts his foot behind the ball and hits a powerful shot.")
if kda == 3:
print(player[3] + " is the fortunate reciever of bad marking, and manages to get into space for a clear shot.")
time.sleep(4)
print("")
if x < 40 * (player[0] / T1K[0]):
target2 += 1
ksj = random.randint(1, 4)
ball = -11
if ksj == 1:
print(player[3] + " sails the ball into the top right corner and scores!")
if ksj == 2:
print(player[3] + "'s shot takes a deflection, and goes past the goalkeeper into the bottom right corner!")
if ksj == 3:
print(player[3] + " hits a brilliant shot that curves into the top left corner past the goalkeeper's outstretched arms.")
if ksj == 4:
print(player[3] + "'s ball travels through the legs of defenders, making it difficult for the keeper to follow, and goes into the goal!")
else:
ksk = random.randint(1, 150)
if ksk < 8 * T1K[0]:
ball = 10
dkd = random.randint(1, 3)
if dkd == 1:
print(T1K[3] + "makes a great save and regains control of the ball.")
if dkd == 2:
print("A goal looked certain for a moment, but in the end, " + T1K[3] + " was able to barely grab the ball.")
if dkd == 3:
print("The save was fairly simple for " + T1K[3] + " as the shot failed to find the top corner of the goal.")
target2 += 1
else:
ball = -5
jfd = random.randint(1, 150)
if jfd < 10 * player[0]:
kds = random.randint(1, 3)
if kds == 1:
print(T1K[3] + " palms the ball wide, and it goes out for a corner.")
if kds == 2:
print(T1K[3] + " barely gets to the ball, but in the end, is able to nudge the ball out for a corner.")
if kds == 3:
print(T1K[3] + "'s fingertips just reach the ball, and it goes out for a corner.")
target2 += 1
else:
ball = 10
kd = random.randint(1, 3)
if kd == 1:
print(player[3] + "'s shot had some power behind it, but was unaccurate and went over the bar for a goalkick.")
if kd == 2:
print(player[3] + "'s shot barely goes wide, nearly bringing a brilliant goal.")
if kd == 3:
print(player[3] + "'s shot had just a bit too much curve, and went wide for a goalkick.")
player2 = player
elif ball == -4:
entered = 0
for a in team1playersd:
x = random.randint(1, 25)
if x < a[1]:
entered = 1
ball = -8
h = random.randint(1, 3)
if h == 1:
print(a[3] + " slides into " + player[3] + "'s legs, completely missing the ball. PENALTY!")
if h == 2:
print(a[3] + " tries and fails to get the ball from " + player[3] + " but gets a lot of the player. The ref says PENALTY")
if h == 3:
print(a[3] + " tries to pull " + player[3] + " back, and in the end succeeds, but the ref gives a PENALTY for the poor challenge.")
x = random.randint(1, 3)
if x == 1:
a[4] += 1
if a[4] == 2:
print("The ref gives a second yellow card to " + a[3] + ", and he is sent off.")
team1playersm.remove(a)
team1midfieldd -= a[3]
team1midfielda -= a[0]
else:
time.sleep(3)
print("")
y = random.randint(1, 3)
if y == 1:
print("The ref gives a yellow card to " + a[3])
if y == 2:
print(a[3] + " gets a yellow card for this challenge.")
if y == 3:
print("On top of the penalty given away, " + a[3] + " gets a yellow card.")
pass
x = random.randint(1, 100)
if entered == 0:
shot2 += 1
if x < 40 * (player[0] / T1K[0]):
ball = -11
h = random.randint(1, 3)
target2 += 1
if x == 1:
print(T1K[3] + " rushes out to tackle " + player2[3] + " but he slides the ball through the goalkeepers legs. GOAL!")
if x == 2:
print(player2[3] + " weaves round defenders and shoots the ball into the corner of the goal.")
if x == 3:
print("Poor defending leaves " + player2[3] + " in empty space, and he seizes the opportunity, putting the ball in the bottom left corner of the goal.")
elif x < 70 * (player[0] / T1K[0]):
ball = -5
kds = random.randint(1, 3)
target2 += 1
if kds == 1:
print(T1K[3] + " palms the ball wide, and it goes out for a corner.")
if kds == 2:
print(T1K[3] + " barely gets to the ball, but in the end, is able to nudge the ball out for a corner.")
if kds == 3:
print(T1K[3] + "'s fingertips just reach the ball, and it goes out for a corner.")
else:
ball = 10
x = random.randint(1, 2)
if x == 1:
kd = random.randint(1, 3)
if kd == 1:
print(player2[3] + "'s shot had some power behind it, but was unaccurate and went over the bar for a goalkick.")
if kd == 2:
print(player2[3] + "'s shot barely goes wide, nearly bringing a brilliant goal.")
if kd == 3:
print(player2[3] + "'s shot had just a bit too much curve, and went wide for a goalkick.")
if x == 2:
dkd = random.randint(1, 3)
if dkd == 1:
print(T1K[3] + "makes a great save and regains control of the ball.")
if dkd == 2:
print("A goal looked certain for a moment, but in the end, " + T1K[3] + " was able to barely grab the ball.")
if dkd == 3:
print("The save was fairly simple for " + T1K[3] + " as the shot failed to find the top corner of the goal.")
player2 = player
elif ball == -5:
y = random.randint(1, 2)
if y == 1:
player = random.choice(team2playersa)
print(player[3] + " will take the corner.")
if y == 2:
player = random.choice(team2playersm)
print(player[4] + " decides to take the corner.")
time.sleep(3)
print("")
x = random.randint(1, 100)
arrived = 0
if x > player[0] * 9:
ball = -10
y = random.randint(1, 3)
if y == 1:
print("The ball goes straight towards " + T2K[3] + ".")
if y == 2:
print("The ball flies way long, and goes out for a goalkick.")
if y == 3:
print("No one is near the ball, leading it to go wide for a goalkick.")
else:
y = random.randint(1, 4)
if y == 1:
print("A promising ball flies into the box.")
elif y == 2:
print("The corner is good and curves towards the goal.")
elif y == 3:
print("The corner heads towards the middle of the box.")
elif y == 4:
print("It is well taken, and it curves into the middle of the box.")
time.sleep(4)
print("")
x = random.randint(1, 100)
if x < 4 * T1K[0]:
y = random.randint(1, 3)
ball = -10
if y == 1:
print(T1K[3] + " comes out and grabs the ball.")
elif y == 2:
print("The ball strays too close to the goal and " + T1K[3] + " is able to grab it.")
elif y == 3:
print(T1K[3] + " takes a risk and comes out, but he is able to get the ball.")
else:
x = random.randint(1, 100)
b = random.randint(1, 2)
if b == 1:
player = random.choice(team2playersa)
k = 3
else:
player = random.choice(team2playersm)
k = 4
b = random.randint(1, 2)
if b == 1:
player2 = random.choice(team1playersm)
n = 3
h = 4
else:
player2 = random.choice(team1playersd)
n = 0
h = 3
if x < 30 * (player2[n] / player[0]):
ball = -10
y = random.randint(1, 3)
if y == 1:
print(player2[h] + " is able to fend off " + player[k] + " and the ball goes out for a goalkick.")
elif y == 2:
print(player2[h] + " and " + player[k] + " compete for the ball, and in the end, neither of them get it, and it goes out for a goalkick")
elif y == 3:
print(player2[h] + " successfully defends " + player[k] + " and the ball flies long for a goalkick.")
elif x < 50 * (player2[n] / player[0]):
ball = -5
y = random.randint(1, 3)
if y == 1:
print(player2[h] + " is able to prevent " + player[k] + " from getting the ball, and heads it out for another corner.")
elif y == 2:
print(player2[h] + " gets to the ball first, but can only head it back over the goal for another corner.")
elif y == 3:
print(player[k] + " looked dangerous for a second, but " + player2[h] + " heads the ball long for another corner.")
else:
y = random.randint(1, 4)
if y == 1:
print(player2[h] + " is not able to fend off " + player[k] + " who heads the ball at the goal.")
elif y == 2:
print(player[k] + " beats " + player2[h] + " and heads the ball towards the bottom corner.")
elif y == 3:
print(player[k] + " jumps high in the air and heads it towards the goal.")
else:
print(player[k] + " and " + player2[h] + " both jump for the ball, but in the end " + player[k] + " wins it and heads it towards the goal.")
print("")
time.sleep(5)
shot2 += 1
x = random.randint(1, 100)
if x < 7 * T1K[0]:
target2 += 1
ball = -10
y = random.randint(1, 3)
if y == 1:
print(T1K[3] + " makes a brilliant save, and is able to hold onto the ball.")
elif y == 2:
print(T1K[3] + " leaps and is just able to grab the ball with his outstretched arms.")
elif y == 3:
print(T1K[3] + " reacts quickly and is able to hug the ball to his chest.")
else:
target2 += 1
ball = 11
y = random.randint(1, 3)
if y == 1:
print(T1K[3] + " lungs for the ball, but he is too slow and it goes into the back of the net.")
elif y == 2:
print(T1K[3] + " dives towards the ball, but is too slow, and the ball flies into the back of net.")
elif y == 3:
print("The shot is accurate, and " + T1K[3] + " barely misses the chance to push it wide with his fingertips.")
elif ball == -6:
x = str(input("Who do you want to take the freekick? "))
print("")
playerchosen = 0
for a in team2playersa:
if x == a[3]:
player = a
playerchosen = 1
print(a[3] + " will take the freekick.")
for a in team2playersm:
if x == a[4]:
player = a
playerchosen = 1
print(a[4] + " will take the freekick.")
time.sleep(3)
print("")
shot2 += 1
while playerchosen == 0:
print("Please give the player's full name, with caps and spaces. It also must be an attacker or midfielder. Please add (M) or (A) after their name. ")
time.sleep(1)
print("")
x = str(input("Who do you want to take the freekick? "))
time.sleep(2)
playerchosen = 0
for a in team2playersa:
if x == a[3]:
player = a
playerchosen = 1
print(a[3] + " will take the freekick.")
time.sleep(3)
print("")
for a in team2playersm:
if x == a[4]:
player = a
playerchosen = 1
print(a[4] + " will take the freekick.")
time.sleep(3)
print("")
x = random.randint(1, 100)
if x > 9 * player[0]:
b = random.randint(1, 3)
if b == 1:
print("The kick is poor and the ball sails over the goal.")
elif b == 2:
print("The freekick had some power to it, but the aim was off, and the ball is sent into the stands.")
elif b == 3:
print("The ball has some curl on it, but not enough, and goes past the goal for a goalkick.")
ball = 10
else:
x = random.randint(1, 100)
if x < 6 * T1K[1]:
target2 += 1
ball = 10
b = random.randint(1, 3)
if b == 1:
print("It was a decent shot, but a comfortable save for " + T1K[3] + " nonetheless.")
if b == 2:
print("The ball curls towards the top left corner, but a fantastic save by " + T1K[3] + " ends up with the ball in his hands.")
if b == 3:
print("The ball has some power on it, but " + T1K[3] + " is able to grab the ball.")
elif x < 8 * T1K[0]:
b = random.randint(1, 2)
target2 += 1
ball = -5
if b == 1:
print("The ball has spin and power, but " + T1K[3] + " is able to knock it wide for a corner.")
if b == 2:
print("The ball curves towards the top corner, but " + T1K[3] + " just gets a hand to it and knocks it wide for a corner.")
else:
ball = -11
target2 += 1
b = random.randint(1, 3)
if b == 1:
print("GOAL! The ball flies into the corner, and there was nothing " + T1K[3] + " could do to stop it.")
if b == 2:
print("He really got his foot behind it. The ball flies past " + T1K[3] + "'s fingertips into the goal.")
if b == 3:
print("An unfortunate deflection for " + team1 + " ends up with the ball in the back of the net.")
player2 = player
elif ball == -7:
player2 = random.choice(team2playersm)
x = random.randint(1, 4)
if x == 1:
print(player2[4] + " takes the freekick from the middle quickly.")
elif x == 2:
print(team2 + " decide to move with speed, and " + player2[4] + " is quick to take the freekick.")
elif x == 3:
print("The freekick has little menace, but " + team2 + " are happy for the possession. " + player2[4] + " will take the freekick.")
elif x == 4:
print("The freekick is much to far away for a shot, and will be taken as a pass. It will be taken by " + player2[4])
x = random.randint(1, 100)
time.sleep(3.4)
print("")
if x < 30 / player[0]:
ball = 2
b = random.randint(1, 2)
x = random.randint(1, 100)
if x < 100 * (T1M1[0] / team1midfieldd):
player = T1M1
elif x < 100 * ((T1M1[0] + T1M2[0]) / team1midfieldd):
player = T1M2
else:
player = T1M3
if b == 1:
print("The pass by " + player2[4] + " is horrible and is intercepted by " + player[4] + ".")
if b == 2:
print(player2[4] + "'s pass was weak and inacurate, and it was intercepted by " + player[4] + ".")
else:
x = random.randint(1, 100)
if x < 11:
x = random.randint(1, 100)
if x < 6 * player2[0]:
x = random.randint(1, 100)
if x < 100 * (T1M1[0] / team1midfieldd):
player = T1M1
elif x < 100 * ((T1M1[0] + T1M2[0]) / team1midfieldd):
player = T1M2
else:
player = T1M3
ball = 2
b = random.randint(1, 2)
if b == 1:
print(player2[4] + " attempts a long pass to right outside the box, but it is intercepted by " + player[4] + ".")
if b == 2:
print(player2[4] + " sees an opportunity for a long pass to right outside the box, but " + player[4] + " cuts it out.")
else:
x = random.randint(1, 100)
ball = -3
if x < 100 * (T2A1[0] / team2attack):
player = T2A1
elif x < 100 * ((T2A1[0] + T2A2[0]) / team2attack):
player = T2A2
else:
player = T2A3
while player2 == player:
x = random.randint(1, 100)
if x < 100 * (T2A1[0] / team2attack):
player = T2A1
elif x < 100 * ((T2A1[0] + T2A2[0]) / team2attack):
player = T2A2
else:
player = T2A3
b = random.randint(1, 2)
if b == 1:
print(player2[4] + " sees an opportunity, and curves a ball up to " + player[3] + " who is right outside the box.")
elif b == 2:
print(player2[4] + " passes it over the heads of the opponent midfielders straight to the feet of " + player[3] + " who is standing right outside the box.")
else:
player = random.choice(team2playersm)
while player2 == player:
player = random.choice(team2playersm)
x = random.randint(1, 3)
ball = -2
if x == 1:
print(player2[4] + " plays a simple ball to " + player[4] + ".")
if x == 2:
print(player2[4] + " plays a short pass to " + player[4] + ".")
elif x == 3:
print("The game resumes with " + player2[4] + " passing to " + player[4] + ".")
player2 = player
elif ball == -8:
x = str(input("Who do you want to take the penalty? "))
print("")
playerchosen = 0
for a in team2playersa:
if x == a[3]:
player = a
playerchosen = 1
n = 3
print(a[3] + " will take the penalty.")
for a in team2playersm:
if x == a[4]:
player = a
playerchosen = 1
print(a[4] + " will take the penalty.")
n = 4
print("")
shot2 += 1
time.sleep(3)
while playerchosen == 0:
print("")
print("Please give the player's full name, with caps and spaces. It also must be an attacker or midfielder. Please add (M) or (A) after their name. ")
time.sleep(1)
x = str(input("Who do you want to take the penalty? "))
print("")
playerchosen = 0
time.sleep(2)
for a in team2playersa:
if x == a[3]:
player = a
playerchosen = 1
n = 3
print(a[3] + " will take the penalty.")
time.sleep(3)
print("")
for a in team2playersm:
if x == a[4]:
player = a
playerchosen = 1
n = 4
print(a[4] + " will take the penalty.")
time.sleep(3)
print("")
x = random.randint(1, 2)
if x == 1:
print(T1K[3] + " dives right.")
dive = 1
elif x == 2:
print(T1K[3] + " dives left.")
dive = 2
time.sleep(3)
print("")
x = random.randint(1, 100)
if x < 45:
print(player[n] + " shoots left.")
shoot = 2
elif x > 55:
print(player[n] + " shoots right.")
shoot = 1
else:
print(player[n] + " goes down the middle.")
shoot = 6
time.sleep(3)
print("")
x = random.randint(1, 100)
if x < 50 / player[2]:
x = random.randint(1, 3)
if x == 1:
print("The ball ricochets off the post!")
if x == 2:
print("The shot has little aim, and the ball is sent long.")
if x == 3:
print("He went for the perfect shot, but ended up completely missing the goal.")
ball = 10
if ball != 10:
target2 += 1
if shoot == dive:
x = random.randint(1, 100)
if x < (40 * player[2] / T1K[1]):
ball = -12
b = random.randint(1, 3)
if b == 1:
print(player[n] + " just sails the ball past the goalkeepers outstretched arms.")
if b == 2:
print(T1K[3] + "'s fingers are mere inches away from the ball, but it sails into the net.")
if b == 3:
print("The ball flies into the top corner, and the goalkeeper cannot save it.")
elif (60 * player[2] / T1K[1]):
x = random.randint(1, 3)
if x == 1:
print(T1K[3] + " is able to just get his palms to it, and the ball rebounds back out into the attacking players.")
elif x == 2:
print(T1K[3] + " is near the ball, but isn't able to grab the ball, and it rebounds off back into the fray of players.")
elif x == 3:
print("The ball doesn't go wide enough to make it in, but " + T1K[3] + " isn't able to grab it, and it bounces off his palms.")
x = random.randint(1, 100)
print("")
time.sleep(3)
if x < 15 * (team2attack + team2midfielda + 7 * player[0]) / (team1defense):
x = random.randint(1, 100)
if x < (800 * player[0]) / (team2attack + team2midfielda + 7 * player[0]):
player = player
n = 3
elif x < (team2attack - player[0]) / (team2attack + team2midfielda + 7 * player[0]):
player2 = random.choice(team2playersa)
while player2 == player:
player2 = random.choice(team2playersa)
player = player2
n = 3
else:
player2 = random.choice(team2playersm)
while player2 == player:
player2 = random.choice(team2playersm)
player = player2
n = 4
x = random.randint(1, 100)
if x < 70 * player[0] / T1K[1]:
ball = -11
b = random.randint(1, 2)
shot2 += 1
target2 += 1
if b == 1:
print(player[n] + " gets the ball on the rebound, and is able to put it past " + T1K[3] + " for his team.")
if b == 2:
print(player[n] + " lunges to get ther rebound and kicks it in the net.")
else:
ball = 10
b = random.randint(1, 2)
shot2 += 1
target2 += 1
if b == 1:
print(player[n] + "gets the rebound and shoots, but " + T1K[3] + " is able to barely get back and grab hold of the ball.")
if b == 2:
print(player[n] + " lunges and shoots on the rebound, but " + T1K[3] + " is just able to get back and grabs the ball.")
else:
ball = 9
x = random.randint(1, 100)
if x < (100 * T1D1[0]) / (Team1Defense):
player = T1D1
elif x < (100 * T1D2[0]) / (Team1Defense):
player = T1D2
else:
player = T1D3
b = random.randint(1, 3)
if b == 1:
print(player[3] + " is able to get to the ball first.")
if b == 2:
print(player[3] + " reaches the ball and is in a position to clear it.")
if b == 3:
print(player[3] + " lunges for the ball and gains possession.")
else:
ball = 10
b = random.randint(1, 4)
if b == 1:
print(T1K[3] + " is able to grab onto the ball, negating the threat.")
elif b == 2:
print(player[n] + " hangs his head in his hands, as " + T1K[3] + "blocks his shot.")
elif b == 3:
print("The shot is weak, and " + T1K[3] + " is able to grab it")
else:
print("The ball flies into the bottom corner, but somehow, " + T1K[3] + "is able to grab it.")
elif shoot - dive == 1 or shoot - dive == -1:
ball = -11
b = random.randint(1, 5)
if b == 1:
print("The ball sails into the bottom corner uncontested.")
elif b == 2:
print("The shot was mediocre, but it goes in nonetheless.")
elif b == 3:
print("All " + T1K[3] + " can do is watch as the ball rolls into the bottom corner.")
elif b == 4:
print("The shot likely wouldn't have been blocked anyway, but the goalkeeper diving the wrong way solidifies the goal.")
else:
print("The shot is perfect, flying into the top corner.")
else:
x = random.randint(1, 100)
if x < (60 * player[2] / T1K[1]):
ball = -12
b = random.randint(1, 3)
if b == 1:
print(a[n] + " just sails the ball past the goalkeepers outstretched legs, with nothing he can do to stop it.")
if b == 2:
print(T1K[3] + "'s feet are mere inches away from the ball, but it sails into the net, as his forward momentum prevents him from blocking it.")
if b == 3:
print("The ball flies into the goal behind him, and the goalkeeper cannot save it.")
elif (70 * player[2] / T1K[1]):
x = random.randint(1, 3)
if x == 1:
print(T1K[3] + " is able to just get his feet to it, and the ball rebounds back out into the attacking players.")
elif x == 2:
print(T1K[3] + "'s feet are near the ball, and it rebounds off back into the fray of players.")
elif x == 3:
print("The ball doesn't go high enough to make it in, but " + T1K[3] + " isn't able to knock it far with his feat, and it bounces off into the fray of players.")
x = random.randint(1, 100)
print("")
time.sleep(3)
if x < 15 * (team2attack + team2midfielda + 7 * player[0]) / (team1defense):
x = random.randint(1, 100)
if x < (800 * player[0]) / (team2attack + team2midfielda + 7 * player[0]):
player = player
n = 3
elif x < (team2attack - player[0]) / (team2attack + team2midfielda + 7 * player[0]):
player2 = random.choice(team2playersa)
while player2 == player:
player2 = random.choice(team2playersa)
player = player2
n = 3
else:
player2 = random.choice(team2playersm)
while player2 == player:
player2 = random.choice(team2playersm)
player = player2
n = 4
x = random.randint(1, 100)
if x < 70 * player[0] / T1K[1]:
ball = -11
b = random.randint(1, 2)
if b == 1:
print(player[n] + " gets the ball on the rebound, and is able to put it past " + T1K[3] + " for his team.")
if b == 2:
print(player[n] + " lunges to get ther rebound and kicks it in the net.")
shot2 += 1
target2 += 1
else:
ball = 10
b = random.randint(1, 2)
if b == 1:
print(player[n] + "gets the rebound and shoots, but " + T1K[3] + " is able to barely get back and grab hold of the ball.")
if b == 2:
print(player[n] + " lunges and shoots on the rebound, but " + T1K[3] + " is just able to get back and grabs the ball.")
shot2 += 1
target2 += 1
else:
ball = 9
x = randint(1, 100)
if x < (100 * T1D1[0]) / (Team1Defense):
player = T1D1
elif x < (100 * T1D2[0]) / (Team1Defense):
player = T1D2
else:
player = T1D3
b = random.randint(1, 3)
if b == 1:
print(player[3] + " is able to get to the ball first.")
if b == 2:
print(player[3] + " reaches the ball and is in a position to clear it.")
if b == 3:
print(player[3] + " lunges for the ball and gains possession.")
else:
ball = 10
b = random.randint(1, 4)
if b == 1:
print(T1K[3] + " is able to grab onto the ball, negating the threat.")
elif b == 2:
print(player[n] + " hangs his head in his hands, as " + T1K[3] + "blocks his shot.")
elif b == 3:
print("The shot is weak, and " + T1K[3] + " is able to grab it")
else:
print("The ball flies into the bottom corner, but somehow, " + T1K[3] + "is able to grab it.")
player2 = player
elif ball == -9:
x = random.randint(1, 100)
if x < 8 * (team1midfieldd) / (player2[0]):
ball = 2
player = random.choice(team1playersm)
b = random.randint(1, 3)
if b == 1:
print(player2[3] + " tried to pass to midfield but " + player[4] + " intercepts the pass.")
elif b == 2:
print(player[4] + " cuts out the pass towards midfield of " + player2[3] + ".")
else:
print(player2[3] + "'s pass is poor, and it gets cut out by " + player[4] + ".")
else:
player = random.choice(team2playersm)
b = random.randint(1, 4)
ball = -2
if b == 1:
print(player2[3] + " passes to " + player[4])
elif b == 2:
print(player2[3] + " passes into midfield, where " + player[4] + " waits.")
elif b == 3:
print(player2[3] + " hits the ball towards " + player[4])
else:
print(player2[3] + " surveys his options and decides to pass to " + player[4])
player2 = player
elif ball == - 10:
x = random.randint(1, 100)
if x < (8 * team1attack / T2K[0]):
b = random.randint(1, 3)
ball = 3
player = random.choice(team1playersa)
# add accurate chances of players intercepting, add different outcomes of messed up pas
if b == 1:
print(T2K[3] + " completely messes up the pass to his back line and passes to " + player[3] + " right outside the box.")
if b == 2:
print(T2K[3] + "'s pass is in accurate, and " + player[3] + " cuts it out right outside the box.")
if b == 3:
print(player[3] + "sees the intended pass and is able to get to it quickly, gaining possession of the ball right outside the box.")
elif x < 18 * (team1attack / T2K[0]):
ball = 2
player = random.choice(team1playersm)
b = random.randint(1, 3)
if b == 1:
print(T2K[3] + " kicks it to the middle, and " + player[4] + " intercepts the pass.")
elif b == 2:
print("The pass to the middle by " + T2K[3] + " is cut out by " + player[4])
elif b == 3:
print(player[4] + " sees the pass to the middle by the keeper and cuts it out.")
elif x < 40 * (team1attack / T2K[0]):
ball = -9
b = random.randint(1, 3)
player = random.choice(team2playersd)
if b == 1:
print(T2K[3] + " passes to " + player[3])
elif b == 2:
print(player[3] + " receives the ball from " + T2K[3])
elif b == 3:
print(T2K[3] + " plays the ball short to " + player[3])
else:
ball = - 2
b = random.randint(1, 3)
player = random.choice(team2playersm)
if b == 1:
print(T2K[3] + " lobs it into the middle to " + player[4])
elif b == 2:
print(player[4] + " receives a long pass from " + T2K[3])
elif b == 3:
print(T2K[3] + " kicks it to " + player[4])
player2 = player
if ball == -11:
time.sleep(2.5)
print("")
print("GOALLLLLLLLLLLLLLLL!")
time.sleep(1)
print("")
team2score += 1
print(team1 + " - " + team2)
print("")
print(" " + str(team1score) + " - " + str(team2score) + " ")
ball = 1
if ball == 11:
time.sleep(2.5)
print("")
print("GOALLLLLLLLLLLLLLLL!")
time.sleep(1)
print("")
team1score += 1
print(team1 + " - " + team2)
print("")
print(" " + str(team1score) + " - " + str(team2score) + " ")
ball = -1
if us == half:
ball3 = abs(ball)
done = 0
if ball3 == 2 or ball3 == 7:
done = 1
elif ball3 == 9 or ball3 == 10:
done = 1
elif ball3 == 1:
done = 1
if done == 1:
time.sleep(3)
print("")
print("Half Time")
time.sleep(3)
print(" " + str(team1score) + " - " + str(team2score))
time.sleep(3)
print("")
x = math.floor((100 / 45) * possession1)
y = 100 - x
print("Possession: " + str(x) + " - " + str(y))
print("Shots: " + str(shot1) + " - " + str(shot2))
print("Shots on target: " + str(target1) + " - " + str(target2))
else:
half += 2
if us == full:
ball3 = abs(ball)
done = 0
if ball3 == 2 or ball3 == 7:
done = 1
elif ball3 == 9 or ball3 == 10:
done = 1
elif ball3 == 1:
done = 1
if done == 0:
full += 2
print("")
print("Game Over")
time.sleep(3)
print(" " + str(team1score) + " - " + str(team2score))
time.sleep(3)
print("")
x = math.floor((100 / 45) * possession1)
y = 100 - x
print("Possession: " + str(x) + " - " + str(y))
print("Shots: " + str(shot1) + " - " + str(shot2))
print("Shots on target: " + str(target1) + " - " + str(target2))
time.sleep(6)
print("")
if team1score - team2score > 2:
destroyed1 = [team2 + " left in shambles after this defeat.", "Players on" + team2 + " return to the locker room with tears in their eyes after this performance ", "The stadium fills with mocking chants as " + team2 + " players leave the field",
" Grins are all that can be found on players of " + team1 + " as they return home.", " What a day for " + team2 + ". Simply nothing has gone their way today.",
"Players of " + team2 + " shocked, as they began the match favorites, but actually lost it " + str(team1score) + " - " + str(team2score)]
print(random.choice(destroyed1))
elif team1score - team2score < -2:
destroyed2 = [team1 + " left in shambles after this defeat.", "Players on" + team1 + " return to the locker room with tears in their eyes after this performance ", "The stadium fills with mocking chants as " + team1 + " players leave the field",
" Grins are all that can be found on players of " + team2 + " as they return home.", " What a day for " + team1 + ". Simply nothing has gone their way today.",
"Players of " + team1 + " shocked, as they began the match favorites, but actually lost it " + str(team1score) + " - " + str(team2score)]
print(random.choice(destroyed2))
elif team1score - team2score > 0:
won1 = ["It was generally strong show from " + team1 + " who at times looked unconfident, but pulled through in the end",
"While it was a fairly even match for most of the game, it was " + team1 + " who took the opportunities they were given and converted them to goals.",
" The scoreline of the game was relatively close, but it was " + team1 + " who had the advantage in techinique and persistance.",
" Many fans concerned by the weakness shown by " + team1 + " at times, but mostly, they were fairly convinving"]
print(random.choice(won1))
elif team1score - team2score < 0:
won2 = ["It was generally strong show from " + team2 + " who at times looked unconfident, but pulled through in the end",
"While it was a fairly even match for most of the game, it was " + team2 + " who took the opportunities they were given and converted them to goals.",
" The scoreline of the game was relatively close, but it was " + team2 + " who had the advantage in techinique and persistance.",
" Many fans concerned by the weakness shown by " + team2 + " at times, but mostly, they were fairly convinving"]
print(random.choice(won2))
elif team1score == team2score:
tie = ["While each side had chances, the game ultimately ended in a draw, with neither team pulling ahead", "To decide this match, the teams head to a penalty shootout.",
"This was a important match for both sides, but neither team was able to get the win they wanted.", "Fans of both teams unconvinced after this showing by each side.", "Neither team is up. Time for penalties!",
"The tension reaches a max as the players prepare for penalties.", "Anxious faces all around now, as the players await penalties."]
print(random.choice(tie))
# penalties
team1goals = 0
team2goals = 0
team1shots = 0
team2shots = 0
h = 0
time.sleep(3)
print("")
while (team2goals - team1goals) < (6 - team1shots) and (team1goals - team2goals) < (6 - team1shots):
h += 1
if h == 1:
order = 'first'
if h == 2:
order = 'second'
if h == 3:
order = 'third'
if h == 4:
order = 'fourth'
if h == 5:
order = 'fifth'
if h == 6:
order = 'sixth'
if h == 7:
order = 'seventh'
if h == 8:
order = 'eight'
if h == 9:
order = 'ninth'
if h == 10:
order = 'tenth'
if h == 11:
order = 'eleventh'
if h == 12:
order = 'twelfth'
team1shots += 1
team2shots += 2
x = str(input("Who do you want to take the " + str(order) + " penalty " + team1 + "? "))
print("")
playerchosen = 0
for a in team1playersa:
if x == a[3]:
player = a
playerchosen = 1
n = 3
time.sleep(3)
print(a[3] + " will take the penalty.")
print("")
for a in team1playersm:
if x == a[4]:
player = a
playerchosen = 1
print(a[4] + " will take the penalty.")
n = 4
time.sleep(3)
print("")
shot1 += 1
while playerchosen == 0:
time.sleep(3)
print("")
print("Please give the player's full name, with caps and spaces. It also must be an attacker or midfielder. Please add (M) or (A) after their name. ")
time.sleep(1)
x = str(input("Who do you want to take the penalty? "))
print("")
playerchosen = 0
time.sleep(2)
for a in team1playersa:
if x == a[3]:
player = a
playerchosen = 1
n = 3
print(a[3] + " will take the penalty.")
print("")
for a in team1playersm:
if x == a[4]:
player = a
playerchosen = 1
n = 4
print(a[4] + " will take the penalty.")
print("")
time.sleep(3)
x = random.randint(1, 2)
if x == 1:
print(T2K[3] + " dives right.")
dive = 1
elif x == 2:
print(T2K[3] + " dives left.")
dive = 2
time.sleep(3)
print("")
x = random.randint(1, 100)
if x < 45:
print(player[n] + " shoots left.")
shoot = 2
elif x > 55:
print(player[n] + " shoots right.")
shoot = 1
else:
print(player[n] + " goes down the middle.")
shoot = 6
time.sleep(3)
print("")
x = random.randint(1, 100)
if x < 50 / a[2]:
x = random.randint(1, 3)
if x == 1:
print("The ball ricochets off the post!")
if x == 2:
print("The shot has little aim, and the ball is sent long.")
if x == 3:
print("He went for the perfect shot, but ended up completely missing the goal.")
ball = -10
else:
if shoot == dive:
x = random.randint(1, 100)
if x < (40 * a[2] / T2K[1]):
ball = 11
b = random.randint(1, 3)
if b == 1:
print(player[n] + " just sails the ball past the goalkeepers outstretched arms.")
if b == 2:
print(T2K[3] + "'s fingers are mere inches away from the ball, but it sails into the net.")
if b == 3:
print("The ball flies into the top corner, and the goalkeeper cannot save it.")
team1goals += 1
target1 += 1
elif (60 * a[2] / T2K[1]):
x = random.randint(1, 3)
if x == 1:
print(T2K[3] + " is able to just get his palms to it, and the ball bounces away from the goal.")
elif x == 2:
print(T2K[3] + " is near the ball and just isn't able to grab the ball, but in the end it doesn't matter, and the ball rolls away from the goal.")
elif x == 3:
print("The ball doesn't go wide enough to make past the keeper, and while " + T2K[3] + " isn't able to grab it, and it bounces off his palms.")
else:
ball = -10
b = random.randint(1, 4)
if b == 1:
print(T2K[3] + " is able to grab onto the ball, negating the threat.")
elif b == 2:
print(player[n] + " hangs his head in his hands, as " + T2K[3] + "blocks his shot.")
elif b == 3:
print("The shot is weak, and " + T2K[3] + " is able to grab it")
else:
print("The ball flies into the bottom corner, but somehow, " + T2K[3] + "is able to grab it.")
elif shoot - dive == 1 or shoot - dive == -1:
ball = 11
b = random.randint(1, 5)
if b == 1:
print("The ball sails into the bottom corner uncontested.")
elif b == 2:
print("The shot was mediocre, but it goes in nonetheless.")
elif b == 3:
print("All " + T2K[3] + " can do is watch as the ball rolls into the bottom corner.")
elif b == 4:
print("The shot likely wouldn't have been blocked anyway, but the goalkeeper diving the wrong way solidifies the goal.")
else:
print("The shot is perfect, flying into the top corner.")
team1goals += 1
else:
x = random.randint(1, 100)
target1 += 1
if x < (60 * a[2] / T2K[1]):
ball = 11
team1goals += 1
b = random.randint(1, 3)
if b == 1:
print(a[n] + " just sails the ball past the goalkeepers outstretched legs, with nothing he can do to stop it.")
if b == 2:
print(T2K[3] + "'s feet are mere inches away from the ball, but it sails into the net, as his forward momentum prevents him from blocking it.")
if b == 3:
print("The ball flies into the goal behind him, and the goalkeeper cannot save it.")
elif (70 * a[2] / T2K[1]):
x = random.randint(1, 3)
if x == 1:
print(T2K[3] + " is able to just get his palms to it, and the ball bounces away from the goal.")
elif x == 2:
print(T2K[3] + " is near the ball and just isn't able to grab the ball, but in the end it doesn't matter, and the ball rolls away from the goal.")
elif x == 3:
print("The ball doesn't go wide enough to make past the keeper, and while " + T2K[3] + " isn't able to grab it, and it bounces off his palms.")
if (team2goals - team1goals) < (6 - team1shots) and (team1goals - team2goals) < (6 - team1shots):
time.sleep(3)
print("")
x = str(input("Who do you want to take the " + str(order) + " penalty " + team2 + "? "))
print("")
playerchosen = 0
for a in team2playersa:
if x == a[3]:
player = a
playerchosen = 1
n = 3
print(a[3] + " will take the penalty.")
for a in team2playersm:
if x == a[4]:
player = a
playerchosen = 1
print(a[4] + " will take the penalty.")
n = 4
print("")
shot2 += 1
time.sleep(3)
while playerchosen == 0:
print("")
print("Please give the player's full name, with caps and spaces. It also must be an attacker or midfielder. Please add (M) or (A) after their name. ")
time.sleep(1)
x = str(input("Who do you want to take the penalty? "))
print("")
playerchosen = 0
time.sleep(2)
for a in team2playersa:
if x == a[3]:
player = a
playerchosen = 1
n = 3
print(a[3] + " will take the penalty.")
time.sleep(3)
print("")
for a in team2playersm:
if x == a[4]:
player = a
playerchosen = 1
n = 4
print(a[4] + " will take the penalty.")
time.sleep(3)
print("")
x = random.randint(1, 2)
if x == 1:
print(T1K[3] + " dives right.")
dive = 1
elif x == 2:
print(T1K[3] + " dives left.")
dive = 2
time.sleep(3)
print("")
x = random.randint(1, 100)
if x < 45:
print(player[n] + " shoots left.")
shoot = 2
elif x > 55:
print(player[n] + " shoots right.")
shoot = 1
else:
print(player[n] + " goes down the middle.")
shoot = 6
time.sleep(3)
print("")
x = random.randint(1, 100)
if x < 50 / player[2]:
x = random.randint(1, 3)
if x == 1:
print("The ball ricochets off the post!")
if x == 2:
print("The shot has little aim, and the ball is sent long.")
if x == 3:
print("He went for the perfect shot, but ended up completely missing the goal.")
ball = 10
else:
target2 += 1
if shoot == dive:
x = random.randint(1, 100)
if x < (40 * player[2] / T1K[1]):
ball = -12
b = random.randint(1, 3)
if b == 1:
print(player[n] + " just sails the ball past the goalkeepers outstretched arms.")
if b == 2:
print(T1K[3] + "'s fingers are mere inches away from the ball, but it sails into the net.")
if b == 3:
print("The ball flies into the top corner, and the goalkeeper cannot save it.")
team2goals += 1
elif (60 * player[2] / T1K[1]):
x = random.randint(1, 3)
if x == 1:
print(T2K[3] + " is able to just get his palms to it, and the ball bounces away from the goal.")
elif x == 2:
print(T2K[3] + " is near the ball and just isn't able to grab the ball, but in the end it doesn't matter, and the ball rolls away from the goal.")
elif x == 3:
print("The ball doesn't go wide enough to make past the keeper, and while " + T2K[3] + " isn't able to grab it, and it bounces off his palms.")
else:
ball = 10
b = random.randint(1, 4)
if b == 1:
print(T1K[3] + " is able to grab onto the ball, negating the threat.")
elif b == 2:
print(player[n] + " hangs his head in his hands, as " + T1K[3] + "blocks his shot.")
elif b == 3:
print("The shot is weak, and " + T1K[3] + " is able to grab it")
else:
print("The ball flies into the bottom corner, but somehow, " + T1K[3] + "is able to grab it.")
elif shoot - dive == 1 or shoot - dive == -1:
ball = -11
b = random.randint(1, 5)
if b == 1:
print("The ball sails into the bottom corner uncontested.")
elif b == 2:
print("The shot was mediocre, but it goes in nonetheless.")
elif b == 3:
print("All " + T1K[3] + " can do is watch as the ball rolls into the bottom corner.")
elif b == 4:
print("The shot likely wouldn't have been blocked anyway, but the goalkeeper diving the wrong way solidifies the goal.")
else:
print("The shot is perfect, flying into the top corner.")
team2goals += 1
else:
x = random.randint(1, 100)
if x < (60 * player[2] / T1K[1]):
ball = -12
b = random.randint(1, 3)
if b == 1:
print(a[n] + " just sails the ball past the goalkeepers outstretched legs, with nothing he can do to stop it.")
if b == 2:
print(T1K[3] + "'s feet are mere inches away from the ball, but it sails into the net, as his forward momentum prevents him from blocking it.")
if b == 3:
print("The ball flies into the goal behind him, and the goalkeeper cannot save it.")
team2goals += 1
else:
x = random.randint(1, 3)
if x == 1:
print(T2K[3] + " is able to just get his palms to it, and the ball bounces away from the goal.")
elif x == 2:
print(T2K[3] + " is near the ball and just isn't able to grab the ball, but in the end it doesn't matter, and the ball rolls away from the goal.")
elif x == 3:
print("The ball doesn't go wide enough to make past the keeper, and while " + T2K[3] + " isn't able to grab it, and it bounces off his palms.")
print("")
time.sleep(3)
print("Game Over")
print(str(team1goals) + " - " + str(team2goals))
print("")
time.sleep(3)
| 48.069854
| 249
| 0.442545
| 15,394
| 128,683
| 3.699363
| 0.046252
| 0.031344
| 0.057526
| 0.02713
| 0.893973
| 0.881664
| 0.869916
| 0.854077
| 0.833164
| 0.820538
| 0
| 0.064522
| 0.445008
| 128,683
| 2,676
| 250
| 48.087818
| 0.732868
| 0.000738
| 0
| 0.794095
| 0
| 0.018405
| 0.280711
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.024156
| 0.001534
| 0
| 0.001534
| 0.255752
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7da702a66a47e061aabf11687b9cb9978b0c21a1
| 22,242
|
py
|
Python
|
reviewboard/webapi/tests/urls.py
|
mnoorenberghe/reviewboard
|
b8ba9d662c250cb5ec704a50f619adbf3be8cbf0
|
[
"MIT"
] | null | null | null |
reviewboard/webapi/tests/urls.py
|
mnoorenberghe/reviewboard
|
b8ba9d662c250cb5ec704a50f619adbf3be8cbf0
|
[
"MIT"
] | null | null | null |
reviewboard/webapi/tests/urls.py
|
mnoorenberghe/reviewboard
|
b8ba9d662c250cb5ec704a50f619adbf3be8cbf0
|
[
"MIT"
] | 1
|
2021-11-23T15:25:44.000Z
|
2021-11-23T15:25:44.000Z
|
from __future__ import unicode_literals
from reviewboard.hostingsvcs.models import HostingServiceAccount
from reviewboard.hostingsvcs.service import HostingService
from reviewboard.reviews.models import ReviewRequest
from reviewboard.scmtools.models import Repository
from reviewboard.site.urlresolvers import local_site_reverse
from reviewboard.webapi.resources import resources
def _normalize_id(value, allowed_cls, id_field='pk', ischecker=isinstance):
if ischecker(value, allowed_cls):
return getattr(value, id_field)
elif isinstance(value, int):
return value
else:
raise ValueError('Expected int or %r, but got %r instead'
% (allowed_cls, value))
#
# APITokenResource
#
def get_api_token_list_url(user, local_site_name=None):
return resources.api_token.get_list_url(
local_site_name=local_site_name,
username=user.username)
def get_api_token_item_url(token, local_site_name=None):
return resources.api_token.get_item_url(
local_site_name=local_site_name,
username=token.user.username,
api_token_id=token.pk)
#
# ChangeResource
#
def get_change_list_url(review_request, local_site_name=None):
return resources.change.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id)
def get_change_item_url(changedesc, local_site_name=None):
return resources.change.get_item_url(
local_site_name=local_site_name,
review_request_id=changedesc.review_request.get().display_id,
change_id=changedesc.pk)
#
# DefaultReviewerResource
#
def get_default_reviewer_list_url(local_site_name=None):
return resources.default_reviewer.get_list_url(
local_site_name=local_site_name)
def get_default_reviewer_item_url(default_reviewer_id, local_site_name=None):
return resources.default_reviewer.get_item_url(
local_site_name=local_site_name,
default_reviewer_id=default_reviewer_id)
#
# DiffResource
#
def get_diff_list_url(review_request, local_site_name=None):
return resources.diff.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id)
def get_diff_item_url(review_request, diff_revision, local_site_name=None):
return resources.diff.get_item_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=diff_revision)
#
# DiffFileAttachmentResource
#
def get_diff_file_attachment_list_url(repository, local_site_name=None):
return resources.diff_file_attachment.get_list_url(
local_site_name=local_site_name,
repository_id=repository.pk)
def get_diff_file_attachment_item_url(attachment, repository,
local_site_name=None):
return resources.diff_file_attachment.get_item_url(
local_site_name=local_site_name,
repository_id=repository.pk,
file_attachment_id=attachment.pk)
#
# DraftDiffResource
#
def get_draft_diff_list_url(review_request, local_site_name=None):
return resources.draft_diff.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id)
def get_draft_diff_item_url(review_request, diff_revision,
local_site_name=None):
return resources.draft_diff.get_item_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=diff_revision)
#
# DraftFileAttachmentResource
#
def get_draft_file_attachment_list_url(review_request, local_site_name=None):
return resources.draft_file_attachment.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id)
def get_draft_file_attachment_item_url(review_request, file_attachment_id,
local_site_name=None):
return resources.draft_file_attachment.get_item_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
file_attachment_id=file_attachment_id)
#
# DraftFileDiffResource
#
def get_draft_filediff_list_url(diffset, review_request, local_site_name=None):
return resources.draft_filediff.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=diffset.revision)
def get_draft_filediff_item_url(filediff, review_request,
local_site_name=None):
return resources.draft_filediff.get_item_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=filediff.diffset.revision,
filediff_id=filediff.pk)
#
# DraftOriginalFileResource
#
def get_draft_original_file_url(review_request, diffset, filediff,
local_site_name=None):
return resources.draft_original_file.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=diffset.revision,
filediff_id=filediff.pk)
#
# DraftPatchedFileResource
#
def get_draft_patched_file_url(review_request, diffset, filediff,
local_site_name=None):
return resources.draft_patched_file.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=diffset.revision,
filediff_id=filediff.pk)
#
# FileAttachmentResource
#
def get_file_attachment_list_url(review_request, local_site_name=None):
return resources.file_attachment.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id)
def get_file_attachment_item_url(file_attachment, local_site_name=None):
return resources.file_attachment.get_item_url(
local_site_name=local_site_name,
file_attachment_id=file_attachment.id,
review_request_id=file_attachment.review_request.get().display_id)
#
# FileAttachmentCommentResource
#
def get_file_attachment_comment_list_url(file_attachment,
local_site_name=None):
return resources.file_attachment_comment.get_list_url(
local_site_name=local_site_name,
file_attachment_id=file_attachment.pk,
review_request_id=file_attachment.review_request.get().display_id)
def get_file_attachment_comment_item_url(file_attachment, comment_id,
local_site_name=None):
return resources.file_attachment_comment.get_item_url(
local_site_name=local_site_name,
file_attachment_id=file_attachment.pk,
review_request_id=file_attachment.review_request.get().display_id,
comment_id=comment_id)
#
# FileDiffResource
#
def get_filediff_list_url(diffset, review_request, local_site_name=None):
return resources.filediff.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=diffset.revision)
def get_filediff_item_url(filediff, review_request, local_site_name=None):
return resources.filediff.get_item_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=filediff.diffset.revision,
filediff_id=filediff.pk)
#
# FileDiffCommentResource
#
def get_filediff_comment_list_url(filediff, local_site_name=None):
diffset = filediff.diffset
review_request = diffset.history.review_request.get()
return resources.filediff_comment.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=filediff.diffset.revision,
filediff_id=filediff.pk)
def get_filediff_comment_item_url(filediff, comment_id, local_site_name=None):
diffset = filediff.diffset
review_request = diffset.history.review_request.get()
return resources.filediff_comment.get_item_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=filediff.diffset.revision,
filediff_id=filediff.pk,
comment_id=comment_id)
#
# HostingServiceResource
#
def get_hosting_service_list_url(local_site_name=None):
return resources.hosting_service.get_list_url(
local_site_name=local_site_name)
def get_hosting_service_item_url(hosting_service_or_id, local_site_name=None):
hosting_service_id = _normalize_id(hosting_service_or_id,
HostingService,
id_field='id',
ischecker=issubclass)
return resources.hosting_service.get_item_url(
local_site_name=local_site_name,
hosting_service_id=hosting_service_id)
#
# HostingServiceAccountResource
#
def get_hosting_service_account_list_url(local_site_name=None):
return resources.hosting_service_account.get_list_url(
local_site_name=local_site_name)
def get_hosting_service_account_item_url(account_or_id, local_site_name=None):
account_id = _normalize_id(account_or_id, HostingServiceAccount)
return resources.hosting_service_account.get_item_url(
local_site_name=local_site_name,
account_id=account_id)
#
# OriginalFileResource
#
def get_original_file_url(review_request, diffset, filediff,
local_site_name=None):
return resources.original_file.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=diffset.revision,
filediff_id=filediff.pk)
#
# PatchedFileResource
#
def get_patched_file_url(review_request, diffset, filediff,
local_site_name=None):
return resources.patched_file.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
diff_revision=diffset.revision,
filediff_id=filediff.pk)
#
# RemoteRepositoryResource
#
def get_remote_repository_list_url(account, local_site_name=None):
return resources.remote_repository.get_list_url(
local_site_name=local_site_name,
account_id=account.pk)
def get_remote_repository_item_url(remote_repository, local_site_name=None):
return resources.remote_repository.get_item_url(
local_site_name=local_site_name,
account_id=remote_repository.hosting_service_account.pk,
repository_id=remote_repository.id)
#
# RepositoryResource
#
def get_repository_list_url(local_site_name=None):
return resources.repository.get_list_url(
local_site_name=local_site_name)
def get_repository_item_url(repository_or_id, local_site_name=None):
repository_id = _normalize_id(repository_or_id, Repository)
return resources.repository.get_item_url(
local_site_name=local_site_name,
repository_id=repository_id)
#
# RepositoryBranchesResource
#
def get_repository_branches_url(repository, local_site_name=None):
return resources.repository_branches.get_list_url(
local_site_name=local_site_name,
repository_id=repository.pk)
#
# RepositoryCommitsResource
#
def get_repository_commits_url(repository, local_site_name=None):
return resources.repository_commits.get_list_url(
local_site_name=local_site_name,
repository_id=repository.pk)
#
# RepositoryInfoResource
#
def get_repository_info_url(repository, local_site_name=None):
return resources.repository_info.get_list_url(
local_site_name=local_site_name,
repository_id=repository.pk)
#
# ReviewResource
#
def get_review_list_url(review_request, local_site_name=None):
return resources.review.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id)
def get_review_item_url(review_request, review_id, local_site_name=None):
return resources.review.get_item_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
review_id=review_id)
#
# ReviewDiffCommentResource
#
def get_review_diff_comment_list_url(review, local_site_name=None):
return resources.review_diff_comment.get_list_url(
local_site_name=local_site_name,
review_request_id=review.review_request.display_id,
review_id=review.pk)
def get_review_diff_comment_item_url(review, comment_id, local_site_name=None):
return resources.review_diff_comment.get_item_url(
local_site_name=local_site_name,
review_request_id=review.review_request.display_id,
review_id=review.pk,
comment_id=comment_id)
#
# FileAttachmentCommentResource
#
def get_review_file_attachment_comment_list_url(review, local_site_name=None):
return resources.review_file_attachment_comment.get_list_url(
local_site_name=local_site_name,
review_request_id=review.review_request.display_id,
review_id=review.pk)
def get_review_file_attachment_comment_item_url(review, comment_id,
local_site_name=None):
return resources.review_file_attachment_comment.get_item_url(
local_site_name=local_site_name,
review_request_id=review.review_request.display_id,
review_id=review.pk,
comment_id=comment_id)
#
# ReviewGroupResource
#
def get_review_group_list_url(local_site_name=None):
return resources.review_group.get_list_url(
local_site_name=local_site_name)
def get_review_group_item_url(group_name, local_site_name=None):
return resources.review_group.get_item_url(
local_site_name=local_site_name,
group_name=group_name)
#
# ReviewGroupUserResource
#
def get_review_group_user_list_url(group_name, local_site_name=None):
return resources.user.get_list_url(
local_site_name=local_site_name,
group_name=group_name)
def get_review_group_user_item_url(group_name, username, local_site_name=None):
return resources.user.get_item_url(
local_site_name=local_site_name,
group_name=group_name,
username=username)
#
# ReviewReplyResource
#
def get_review_reply_list_url(review, local_site_name=None):
return resources.review_reply.get_list_url(
local_site_name=local_site_name,
review_request_id=review.review_request.display_id,
review_id=review.pk)
def get_review_reply_item_url(review, reply_id, local_site_name=None):
return resources.review_reply.get_item_url(
local_site_name=local_site_name,
review_request_id=review.review_request.display_id,
review_id=review.pk,
reply_id=reply_id)
#
# ReviewReplyDiffCommentResource
#
def get_review_reply_diff_comment_list_url(reply, local_site_name=None):
return resources.review_reply_diff_comment.get_list_url(
local_site_name=local_site_name,
review_request_id=reply.review_request.display_id,
review_id=reply.base_reply_to_id,
reply_id=reply.pk)
def get_review_reply_diff_comment_item_url(reply, comment_id,
local_site_name=None):
return resources.review_reply_diff_comment.get_item_url(
local_site_name=local_site_name,
review_request_id=reply.review_request.display_id,
review_id=reply.base_reply_to_id,
reply_id=reply.pk,
comment_id=comment_id)
#
# ReviewReplyFileAttachmentCommentResource
#
def get_review_reply_file_attachment_comment_list_url(reply,
local_site_name=None):
return resources.review_reply_file_attachment_comment.get_list_url(
local_site_name=local_site_name,
review_request_id=reply.review_request.display_id,
review_id=reply.base_reply_to_id,
reply_id=reply.pk)
def get_review_reply_file_attachment_comment_item_url(reply, comment_id,
local_site_name=None):
return resources.review_reply_file_attachment_comment.get_item_url(
local_site_name=local_site_name,
review_request_id=reply.review_request.display_id,
review_id=reply.base_reply_to_id,
reply_id=reply.pk,
comment_id=comment_id)
#
# ReviewReplyScreenshotCommentResource
#
def get_review_reply_screenshot_comment_list_url(reply, local_site_name=None):
return resources.review_reply_screenshot_comment.get_list_url(
local_site_name=local_site_name,
review_request_id=reply.review_request.display_id,
review_id=reply.base_reply_to_id,
reply_id=reply.pk)
def get_review_reply_screenshot_comment_item_url(reply, comment_id,
local_site_name=None):
return resources.review_reply_screenshot_comment.get_item_url(
local_site_name=local_site_name,
review_request_id=reply.review_request.display_id,
review_id=reply.base_reply_to_id,
reply_id=reply.pk,
comment_id=comment_id)
#
# ReviewRequestResource
#
def get_review_request_list_url(local_site_name=None):
return resources.review_request.get_list_url(
local_site_name=local_site_name)
def get_review_request_item_url(review_request_id, local_site_name=None):
return resources.review_request.get_item_url(
local_site_name=local_site_name,
review_request_id=review_request_id)
#
# ReviewRequestDraftResource
#
def get_review_request_draft_url(review_request, local_site_name=None):
return resources.review_request_draft.get_item_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id)
#
# ReviewScreenshotCommentResource
#
def get_review_screenshot_comment_list_url(review, local_site_name=None):
return resources.review_screenshot_comment.get_list_url(
local_site_name=local_site_name,
review_request_id=review.review_request.display_id,
review_id=review.pk)
def get_review_screenshot_comment_item_url(review, comment_id,
local_site_name=None):
return resources.review_screenshot_comment.get_item_url(
local_site_name=local_site_name,
review_request_id=review.review_request.display_id,
review_id=review.pk,
comment_id=comment_id)
#
# RootResource
#
def get_root_url(local_site_name=None):
return local_site_reverse('root-resource',
local_site_name=local_site_name)
#
# ScreenshotResource
#
def get_screenshot_list_url(review_request_or_id, local_site_name=None):
review_request_id = _normalize_id(review_request_or_id, ReviewRequest,
id_field='display_id')
return resources.screenshot.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request_id)
def get_screenshot_item_url(screenshot, local_site_name=None):
return resources.screenshot.get_item_url(
local_site_name=local_site_name,
screenshot_id=screenshot.pk,
review_request_id=screenshot.review_request.get().display_id)
#
# ScreenshotCommentResource
#
def get_screenshot_comment_list_url(screenshot, local_site_name=None):
return resources.screenshot_comment.get_list_url(
local_site_name=local_site_name,
review_request_id=screenshot.review_request.get().display_id,
screenshot_id=screenshot.pk)
def get_screenshot_comment_item_url(screenshot, comment_id,
local_site_name=None):
return resources.screenshot_comment.get_item_url(
local_site_name=local_site_name,
review_request_id=screenshot.review_request.get().display_id,
screenshot_id=screenshot.pk,
comment_id=comment_id)
#
# ScreenshotDraftResource
#
def get_screenshot_draft_list_url(review_request, local_site_name=None):
return resources.draft_screenshot.get_list_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id)
def get_screenshot_draft_item_url(review_request, screenshot_id,
local_site_name=None):
return resources.draft_screenshot.get_item_url(
local_site_name=local_site_name,
review_request_id=review_request.display_id,
screenshot_id=screenshot_id)
#
# ServerInfoResource
#
def get_server_info_url(local_site_name=None):
return resources.server_info.get_item_url(local_site_name=local_site_name)
#
# SessionResource
#
def get_session_url(local_site_name=None):
return resources.session.get_list_url(local_site_name=local_site_name)
#
# UserResource
#
def get_user_list_url(local_site_name=None):
return resources.user.get_list_url(
local_site_name=local_site_name)
def get_user_item_url(username, local_site_name=None):
return resources.user.get_item_url(
local_site_name=local_site_name,
username=username)
#
# ValidateDiffResource
#
def get_validate_diff_url(local_site_name=None):
return resources.validate_diff.get_item_url(
local_site_name=local_site_name)
#
# WatchedReviewGroupResource
#
def get_watched_review_group_list_url(username, local_site_name=None):
return resources.watched_review_group.get_list_url(
local_site_name=local_site_name,
username=username)
def get_watched_review_group_item_url(username, object_id,
local_site_name=None):
return resources.watched_review_group.get_item_url(
local_site_name=local_site_name,
username=username,
watched_obj_id=object_id)
#
# WatchedReviewRequestResource
#
def get_watched_review_request_list_url(username, local_site_name=None):
return resources.watched_review_request.get_list_url(
local_site_name=local_site_name,
username=username)
def get_watched_review_request_item_url(username, object_id,
local_site_name=None):
return resources.watched_review_request.get_item_url(
local_site_name=local_site_name,
username=username,
watched_obj_id=object_id)
| 31.194951
| 79
| 0.751956
| 2,905
| 22,242
| 5.248881
| 0.04716
| 0.139297
| 0.199502
| 0.09234
| 0.784759
| 0.756034
| 0.732555
| 0.721013
| 0.709601
| 0.64474
| 0
| 0
| 0.183347
| 22,242
| 712
| 80
| 31.238764
| 0.839463
| 0.047388
| 0
| 0.51699
| 0
| 0
| 0.003088
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.191748
| false
| 0
| 0.01699
| 0.174757
| 0.402913
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
7dd75a71ab41e1dab4be3836db02ea65d26395c1
| 334
|
py
|
Python
|
causalml/dataset/__init__.py
|
mohi7solanki/causalml
|
73e41837a8edb7220de1e2d102157000b2ccd476
|
[
"Apache-2.0"
] | null | null | null |
causalml/dataset/__init__.py
|
mohi7solanki/causalml
|
73e41837a8edb7220de1e2d102157000b2ccd476
|
[
"Apache-2.0"
] | null | null | null |
causalml/dataset/__init__.py
|
mohi7solanki/causalml
|
73e41837a8edb7220de1e2d102157000b2ccd476
|
[
"Apache-2.0"
] | null | null | null |
from .regression import synthetic_data
from .regression import simulate_nuisance_and_easy_treatment
from .regression import simulate_randomized_trial
from .regression import simulate_easy_propensity_difficult_baseline
from .regression import simulate_unrelated_treatment_control
from .classification import make_uplift_classification
| 47.714286
| 67
| 0.91018
| 40
| 334
| 7.2
| 0.5
| 0.243056
| 0.347222
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071856
| 334
| 6
| 68
| 55.666667
| 0.929032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c494a56f838c1a23232ff18be130143c2e0439b0
| 68
|
py
|
Python
|
inac8hr/gui/__init__.py
|
th-bunratta/8hr.insomniac
|
5173500a1ad7197096d513b38258aa65b035fcf3
|
[
"BSD-3-Clause"
] | null | null | null |
inac8hr/gui/__init__.py
|
th-bunratta/8hr.insomniac
|
5173500a1ad7197096d513b38258aa65b035fcf3
|
[
"BSD-3-Clause"
] | null | null | null |
inac8hr/gui/__init__.py
|
th-bunratta/8hr.insomniac
|
5173500a1ad7197096d513b38258aa65b035fcf3
|
[
"BSD-3-Clause"
] | null | null | null |
from inac8hr.gui.basics import *
from inac8hr.gui.controls import *
| 22.666667
| 34
| 0.794118
| 10
| 68
| 5.4
| 0.6
| 0.407407
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.117647
| 68
| 2
| 35
| 34
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c4977ee003a1282e2b45d760d2a02e8f5a28c9db
| 135
|
py
|
Python
|
examples/probabilistic/ant_on_grid/run.py
|
Noxsense/mCRL2
|
dd2fcdd6eb8b15af2729633041c2dbbd2216ad24
|
[
"BSL-1.0"
] | 61
|
2018-05-24T13:14:05.000Z
|
2022-03-29T11:35:03.000Z
|
examples/probabilistic/ant_on_grid/run.py
|
Noxsense/mCRL2
|
dd2fcdd6eb8b15af2729633041c2dbbd2216ad24
|
[
"BSL-1.0"
] | 229
|
2018-05-28T08:31:09.000Z
|
2022-03-21T11:02:41.000Z
|
examples/probabilistic/ant_on_grid/run.py
|
Noxsense/mCRL2
|
dd2fcdd6eb8b15af2729633041c2dbbd2216ad24
|
[
"BSL-1.0"
] | 28
|
2018-04-11T14:09:39.000Z
|
2022-02-25T15:57:39.000Z
|
import os
os.system('mcrl22lps -v ant_on_grid.mcrl2 ant_on_grid.lps')
os.system('lps2lts -vrjittyc ant_on_grid.lps ant_on_grid.aut')
| 22.5
| 62
| 0.792593
| 26
| 135
| 3.807692
| 0.5
| 0.20202
| 0.363636
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03252
| 0.088889
| 135
| 5
| 63
| 27
| 0.772358
| 0
| 0
| 0
| 0
| 0
| 0.708955
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
c49ecfe3f5e40b8630b042b5298e25357e801306
| 1,278
|
py
|
Python
|
mediaplatform/migrations/0023_add_remaining_at_indexes.py
|
jbrownrs/issue-376-GDS-link
|
e8cce1b79f46b98a7d24b2da5eca48430fd904a3
|
[
"MIT"
] | 5
|
2019-01-07T17:22:34.000Z
|
2020-10-08T15:03:12.000Z
|
mediaplatform/migrations/0023_add_remaining_at_indexes.py
|
jbrownrs/issue-376-GDS-link
|
e8cce1b79f46b98a7d24b2da5eca48430fd904a3
|
[
"MIT"
] | 203
|
2017-12-14T09:51:56.000Z
|
2018-08-28T14:04:08.000Z
|
mediaplatform/migrations/0023_add_remaining_at_indexes.py
|
jbrownrs/issue-376-GDS-link
|
e8cce1b79f46b98a7d24b2da5eca48430fd904a3
|
[
"MIT"
] | 5
|
2018-10-22T11:36:01.000Z
|
2020-07-20T05:47:49.000Z
|
# Generated by Django 2.1.2 on 2018-10-22 12:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mediaplatform', '0022_move_channel_institution_to_billing_account'),
]
operations = [
migrations.AddIndex(
model_name='channel',
index=models.Index(fields=['created_at'], name='mediaplatfo_created_9c6424_idx'),
),
migrations.AddIndex(
model_name='channel',
index=models.Index(fields=['deleted_at'], name='mediaplatfo_deleted_043765_idx'),
),
migrations.AddIndex(
model_name='mediaitem',
index=models.Index(fields=['created_at'], name='mediaplatfo_created_fbeb5b_idx'),
),
migrations.AddIndex(
model_name='mediaitem',
index=models.Index(fields=['deleted_at'], name='mediaplatfo_deleted_98a707_idx'),
),
migrations.AddIndex(
model_name='playlist',
index=models.Index(fields=['created_at'], name='mediaplatfo_created_de3ebd_idx'),
),
migrations.AddIndex(
model_name='playlist',
index=models.Index(fields=['deleted_at'], name='mediaplatfo_deleted_7cde4f_idx'),
),
]
| 33.631579
| 93
| 0.622848
| 128
| 1,278
| 5.9375
| 0.359375
| 0.142105
| 0.181579
| 0.213158
| 0.714474
| 0.710526
| 0.710526
| 0.710526
| 0.710526
| 0.318421
| 0
| 0.040881
| 0.253521
| 1,278
| 37
| 94
| 34.540541
| 0.755765
| 0.035211
| 0
| 0.580645
| 1
| 0
| 0.283509
| 0.185215
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032258
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c4b56d982db2fbc5151bbe8b924739e0da4c5c65
| 2,925
|
py
|
Python
|
BestStore/Products/migrations/0007_auto_20190604_1100.py
|
bucc-sanyam/BestStore
|
047b475522c4f449eaf1bb474fff0cd9e8af36c0
|
[
"MIT"
] | 1
|
2019-11-08T04:52:26.000Z
|
2019-11-08T04:52:26.000Z
|
BestStore/Products/migrations/0007_auto_20190604_1100.py
|
bucc-sanyam/BestStore
|
047b475522c4f449eaf1bb474fff0cd9e8af36c0
|
[
"MIT"
] | 12
|
2019-06-08T17:48:01.000Z
|
2022-03-11T23:49:17.000Z
|
BestStore/Products/migrations/0007_auto_20190604_1100.py
|
bucc-sanyam/BestStore
|
047b475522c4f449eaf1bb474fff0cd9e8af36c0
|
[
"MIT"
] | 8
|
2019-05-27T08:08:00.000Z
|
2019-06-10T07:09:41.000Z
|
# Generated by Django 2.2.1 on 2019-06-04 11:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Products', '0006_wishlist'),
]
operations = [
migrations.AddField(
model_name='category',
name='date_created',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='category',
name='date_modified',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='category',
name='is_active',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='newsletter',
name='date_created',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='newsletter',
name='is_subscribed',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='product',
name='date_modified',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='product',
name='is_active',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='productimages',
name='date_created',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='productimages',
name='date_modified',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='subcategory',
name='date_created',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='subcategory',
name='date_modified',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='subcategory',
name='is_active',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='tags',
name='date_created',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='tags',
name='date_modified',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='wishlist',
name='date_created',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='wishlist',
name='date_modified',
field=models.DateTimeField(auto_now_add=True, null=True),
),
]
| 31.117021
| 69
| 0.556239
| 265
| 2,925
| 5.94717
| 0.177358
| 0.182741
| 0.233503
| 0.274112
| 0.90165
| 0.90165
| 0.90165
| 0.845178
| 0.806472
| 0.767767
| 0
| 0.009729
| 0.332308
| 2,925
| 93
| 70
| 31.451613
| 0.797235
| 0.015385
| 0
| 0.908046
| 1
| 0
| 0.122307
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011494
| 0
| 0.045977
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
c4b824b07741d7f399c2a34b413ce28165544670
| 54,581
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/router_peer.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/compute/router_peer.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/compute/router_peer.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['RouterPeerArgs', 'RouterPeer']
@pulumi.input_type
class RouterPeerArgs:
def __init__(__self__, *,
interface: pulumi.Input[str],
peer_asn: pulumi.Input[int],
peer_ip_address: pulumi.Input[str],
router: pulumi.Input[str],
advertise_mode: Optional[pulumi.Input[str]] = None,
advertised_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
advertised_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input['RouterPeerAdvertisedIpRangeArgs']]]] = None,
advertised_route_priority: Optional[pulumi.Input[int]] = None,
bfd: Optional[pulumi.Input['RouterPeerBfdArgs']] = None,
enable: Optional[pulumi.Input[bool]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a RouterPeer resource.
:param pulumi.Input[str] interface: Name of the interface the BGP peer is associated with.
:param pulumi.Input[int] peer_asn: Peer BGP Autonomous System Number (ASN).
Each BGP interface may use a different value.
:param pulumi.Input[str] peer_ip_address: IP address of the BGP interface outside Google Cloud Platform.
Only IPv4 is supported.
:param pulumi.Input[str] router: The name of the Cloud Router in which this BgpPeer will be configured.
:param pulumi.Input[str] advertise_mode: User-specified flag to indicate which mode to use for advertisement.
Valid values of this enum field are: `DEFAULT`, `CUSTOM`
Default value is `DEFAULT`.
Possible values are `DEFAULT` and `CUSTOM`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] advertised_groups: User-specified list of prefix groups to advertise in custom
mode, which can take one of the following options:
* `ALL_SUBNETS`: Advertises all available subnets, including peer VPC subnets.
* `ALL_VPC_SUBNETS`: Advertises the router's own VPC subnets.
* `ALL_PEER_VPC_SUBNETS`: Advertises peer subnets of the router's VPC network.
:param pulumi.Input[Sequence[pulumi.Input['RouterPeerAdvertisedIpRangeArgs']]] advertised_ip_ranges: User-specified list of individual IP ranges to advertise in
custom mode. This field can only be populated if advertiseMode
is `CUSTOM` and is advertised to all peers of the router. These IP
ranges will be advertised in addition to any specified groups.
Leave this field blank to advertise no custom IP ranges.
Structure is documented below.
:param pulumi.Input[int] advertised_route_priority: The priority of routes advertised to this BGP peer.
Where there is more than one matching route of maximum
length, the routes with the lowest priority value win.
:param pulumi.Input['RouterPeerBfdArgs'] bfd: BFD configuration for the BGP peering.
Structure is documented below.
:param pulumi.Input[bool] enable: The status of the BGP peer connection. If set to false, any active session
with the peer is terminated and all associated routing information is removed.
If set to true, the peer connection can be established with routing information.
The default is true.
:param pulumi.Input[str] ip_address: IP address of the interface inside Google Cloud Platform.
Only IPv4 is supported.
:param pulumi.Input[str] name: Name of this BGP peer. The name must be 1-63 characters long,
and comply with RFC1035. Specifically, the name must be 1-63 characters
long and match the regular expression `a-z?` which
means the first character must be a lowercase letter, and all
following characters must be a dash, lowercase letter, or digit,
except the last character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] region: Region where the router and BgpPeer reside.
If it is not provided, the provider region is used.
"""
pulumi.set(__self__, "interface", interface)
pulumi.set(__self__, "peer_asn", peer_asn)
pulumi.set(__self__, "peer_ip_address", peer_ip_address)
pulumi.set(__self__, "router", router)
if advertise_mode is not None:
pulumi.set(__self__, "advertise_mode", advertise_mode)
if advertised_groups is not None:
pulumi.set(__self__, "advertised_groups", advertised_groups)
if advertised_ip_ranges is not None:
pulumi.set(__self__, "advertised_ip_ranges", advertised_ip_ranges)
if advertised_route_priority is not None:
pulumi.set(__self__, "advertised_route_priority", advertised_route_priority)
if bfd is not None:
pulumi.set(__self__, "bfd", bfd)
if enable is not None:
pulumi.set(__self__, "enable", enable)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if region is not None:
pulumi.set(__self__, "region", region)
@property
@pulumi.getter
def interface(self) -> pulumi.Input[str]:
"""
Name of the interface the BGP peer is associated with.
"""
return pulumi.get(self, "interface")
@interface.setter
def interface(self, value: pulumi.Input[str]):
pulumi.set(self, "interface", value)
@property
@pulumi.getter(name="peerAsn")
def peer_asn(self) -> pulumi.Input[int]:
"""
Peer BGP Autonomous System Number (ASN).
Each BGP interface may use a different value.
"""
return pulumi.get(self, "peer_asn")
@peer_asn.setter
def peer_asn(self, value: pulumi.Input[int]):
pulumi.set(self, "peer_asn", value)
@property
@pulumi.getter(name="peerIpAddress")
def peer_ip_address(self) -> pulumi.Input[str]:
"""
IP address of the BGP interface outside Google Cloud Platform.
Only IPv4 is supported.
"""
return pulumi.get(self, "peer_ip_address")
@peer_ip_address.setter
def peer_ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "peer_ip_address", value)
@property
@pulumi.getter
def router(self) -> pulumi.Input[str]:
"""
The name of the Cloud Router in which this BgpPeer will be configured.
"""
return pulumi.get(self, "router")
@router.setter
def router(self, value: pulumi.Input[str]):
pulumi.set(self, "router", value)
@property
@pulumi.getter(name="advertiseMode")
def advertise_mode(self) -> Optional[pulumi.Input[str]]:
"""
User-specified flag to indicate which mode to use for advertisement.
Valid values of this enum field are: `DEFAULT`, `CUSTOM`
Default value is `DEFAULT`.
Possible values are `DEFAULT` and `CUSTOM`.
"""
return pulumi.get(self, "advertise_mode")
@advertise_mode.setter
def advertise_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "advertise_mode", value)
@property
@pulumi.getter(name="advertisedGroups")
def advertised_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
User-specified list of prefix groups to advertise in custom
mode, which can take one of the following options:
* `ALL_SUBNETS`: Advertises all available subnets, including peer VPC subnets.
* `ALL_VPC_SUBNETS`: Advertises the router's own VPC subnets.
* `ALL_PEER_VPC_SUBNETS`: Advertises peer subnets of the router's VPC network.
"""
return pulumi.get(self, "advertised_groups")
@advertised_groups.setter
def advertised_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "advertised_groups", value)
@property
@pulumi.getter(name="advertisedIpRanges")
def advertised_ip_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RouterPeerAdvertisedIpRangeArgs']]]]:
"""
User-specified list of individual IP ranges to advertise in
custom mode. This field can only be populated if advertiseMode
is `CUSTOM` and is advertised to all peers of the router. These IP
ranges will be advertised in addition to any specified groups.
Leave this field blank to advertise no custom IP ranges.
Structure is documented below.
"""
return pulumi.get(self, "advertised_ip_ranges")
@advertised_ip_ranges.setter
def advertised_ip_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RouterPeerAdvertisedIpRangeArgs']]]]):
pulumi.set(self, "advertised_ip_ranges", value)
@property
@pulumi.getter(name="advertisedRoutePriority")
def advertised_route_priority(self) -> Optional[pulumi.Input[int]]:
"""
The priority of routes advertised to this BGP peer.
Where there is more than one matching route of maximum
length, the routes with the lowest priority value win.
"""
return pulumi.get(self, "advertised_route_priority")
@advertised_route_priority.setter
def advertised_route_priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "advertised_route_priority", value)
@property
@pulumi.getter
def bfd(self) -> Optional[pulumi.Input['RouterPeerBfdArgs']]:
"""
BFD configuration for the BGP peering.
Structure is documented below.
"""
return pulumi.get(self, "bfd")
@bfd.setter
def bfd(self, value: Optional[pulumi.Input['RouterPeerBfdArgs']]):
pulumi.set(self, "bfd", value)
@property
@pulumi.getter
def enable(self) -> Optional[pulumi.Input[bool]]:
"""
The status of the BGP peer connection. If set to false, any active session
with the peer is terminated and all associated routing information is removed.
If set to true, the peer connection can be established with routing information.
The default is true.
"""
return pulumi.get(self, "enable")
@enable.setter
def enable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
IP address of the interface inside Google Cloud Platform.
Only IPv4 is supported.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of this BGP peer. The name must be 1-63 characters long,
and comply with RFC1035. Specifically, the name must be 1-63 characters
long and match the regular expression `a-z?` which
means the first character must be a lowercase letter, and all
following characters must be a dash, lowercase letter, or digit,
except the last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
Region where the router and BgpPeer reside.
If it is not provided, the provider region is used.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@pulumi.input_type
class _RouterPeerState:
def __init__(__self__, *,
advertise_mode: Optional[pulumi.Input[str]] = None,
advertised_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
advertised_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input['RouterPeerAdvertisedIpRangeArgs']]]] = None,
advertised_route_priority: Optional[pulumi.Input[int]] = None,
bfd: Optional[pulumi.Input['RouterPeerBfdArgs']] = None,
enable: Optional[pulumi.Input[bool]] = None,
interface: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
management_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
peer_asn: Optional[pulumi.Input[int]] = None,
peer_ip_address: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
router: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering RouterPeer resources.
:param pulumi.Input[str] advertise_mode: User-specified flag to indicate which mode to use for advertisement.
Valid values of this enum field are: `DEFAULT`, `CUSTOM`
Default value is `DEFAULT`.
Possible values are `DEFAULT` and `CUSTOM`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] advertised_groups: User-specified list of prefix groups to advertise in custom
mode, which can take one of the following options:
* `ALL_SUBNETS`: Advertises all available subnets, including peer VPC subnets.
* `ALL_VPC_SUBNETS`: Advertises the router's own VPC subnets.
* `ALL_PEER_VPC_SUBNETS`: Advertises peer subnets of the router's VPC network.
:param pulumi.Input[Sequence[pulumi.Input['RouterPeerAdvertisedIpRangeArgs']]] advertised_ip_ranges: User-specified list of individual IP ranges to advertise in
custom mode. This field can only be populated if advertiseMode
is `CUSTOM` and is advertised to all peers of the router. These IP
ranges will be advertised in addition to any specified groups.
Leave this field blank to advertise no custom IP ranges.
Structure is documented below.
:param pulumi.Input[int] advertised_route_priority: The priority of routes advertised to this BGP peer.
Where there is more than one matching route of maximum
length, the routes with the lowest priority value win.
:param pulumi.Input['RouterPeerBfdArgs'] bfd: BFD configuration for the BGP peering.
Structure is documented below.
:param pulumi.Input[bool] enable: The status of the BGP peer connection. If set to false, any active session
with the peer is terminated and all associated routing information is removed.
If set to true, the peer connection can be established with routing information.
The default is true.
:param pulumi.Input[str] interface: Name of the interface the BGP peer is associated with.
:param pulumi.Input[str] ip_address: IP address of the interface inside Google Cloud Platform.
Only IPv4 is supported.
:param pulumi.Input[str] management_type: The resource that configures and manages this BGP peer. * 'MANAGED_BY_USER' is the default value and can be managed by
you or other users * 'MANAGED_BY_ATTACHMENT' is a BGP peer that is configured and managed by Cloud Interconnect,
specifically by an InterconnectAttachment of type PARTNER. Google automatically creates, updates, and deletes this type
of BGP peer when the PARTNER InterconnectAttachment is created, updated, or deleted.
:param pulumi.Input[str] name: Name of this BGP peer. The name must be 1-63 characters long,
and comply with RFC1035. Specifically, the name must be 1-63 characters
long and match the regular expression `a-z?` which
means the first character must be a lowercase letter, and all
following characters must be a dash, lowercase letter, or digit,
except the last character, which cannot be a dash.
:param pulumi.Input[int] peer_asn: Peer BGP Autonomous System Number (ASN).
Each BGP interface may use a different value.
:param pulumi.Input[str] peer_ip_address: IP address of the BGP interface outside Google Cloud Platform.
Only IPv4 is supported.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] region: Region where the router and BgpPeer reside.
If it is not provided, the provider region is used.
:param pulumi.Input[str] router: The name of the Cloud Router in which this BgpPeer will be configured.
"""
if advertise_mode is not None:
pulumi.set(__self__, "advertise_mode", advertise_mode)
if advertised_groups is not None:
pulumi.set(__self__, "advertised_groups", advertised_groups)
if advertised_ip_ranges is not None:
pulumi.set(__self__, "advertised_ip_ranges", advertised_ip_ranges)
if advertised_route_priority is not None:
pulumi.set(__self__, "advertised_route_priority", advertised_route_priority)
if bfd is not None:
pulumi.set(__self__, "bfd", bfd)
if enable is not None:
pulumi.set(__self__, "enable", enable)
if interface is not None:
pulumi.set(__self__, "interface", interface)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if management_type is not None:
pulumi.set(__self__, "management_type", management_type)
if name is not None:
pulumi.set(__self__, "name", name)
if peer_asn is not None:
pulumi.set(__self__, "peer_asn", peer_asn)
if peer_ip_address is not None:
pulumi.set(__self__, "peer_ip_address", peer_ip_address)
if project is not None:
pulumi.set(__self__, "project", project)
if region is not None:
pulumi.set(__self__, "region", region)
if router is not None:
pulumi.set(__self__, "router", router)
@property
@pulumi.getter(name="advertiseMode")
def advertise_mode(self) -> Optional[pulumi.Input[str]]:
"""
User-specified flag to indicate which mode to use for advertisement.
Valid values of this enum field are: `DEFAULT`, `CUSTOM`
Default value is `DEFAULT`.
Possible values are `DEFAULT` and `CUSTOM`.
"""
return pulumi.get(self, "advertise_mode")
@advertise_mode.setter
def advertise_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "advertise_mode", value)
@property
@pulumi.getter(name="advertisedGroups")
def advertised_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
User-specified list of prefix groups to advertise in custom
mode, which can take one of the following options:
* `ALL_SUBNETS`: Advertises all available subnets, including peer VPC subnets.
* `ALL_VPC_SUBNETS`: Advertises the router's own VPC subnets.
* `ALL_PEER_VPC_SUBNETS`: Advertises peer subnets of the router's VPC network.
"""
return pulumi.get(self, "advertised_groups")
@advertised_groups.setter
def advertised_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "advertised_groups", value)
@property
@pulumi.getter(name="advertisedIpRanges")
def advertised_ip_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RouterPeerAdvertisedIpRangeArgs']]]]:
"""
User-specified list of individual IP ranges to advertise in
custom mode. This field can only be populated if advertiseMode
is `CUSTOM` and is advertised to all peers of the router. These IP
ranges will be advertised in addition to any specified groups.
Leave this field blank to advertise no custom IP ranges.
Structure is documented below.
"""
return pulumi.get(self, "advertised_ip_ranges")
@advertised_ip_ranges.setter
def advertised_ip_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RouterPeerAdvertisedIpRangeArgs']]]]):
pulumi.set(self, "advertised_ip_ranges", value)
@property
@pulumi.getter(name="advertisedRoutePriority")
def advertised_route_priority(self) -> Optional[pulumi.Input[int]]:
"""
The priority of routes advertised to this BGP peer.
Where there is more than one matching route of maximum
length, the routes with the lowest priority value win.
"""
return pulumi.get(self, "advertised_route_priority")
@advertised_route_priority.setter
def advertised_route_priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "advertised_route_priority", value)
@property
@pulumi.getter
def bfd(self) -> Optional[pulumi.Input['RouterPeerBfdArgs']]:
"""
BFD configuration for the BGP peering.
Structure is documented below.
"""
return pulumi.get(self, "bfd")
@bfd.setter
def bfd(self, value: Optional[pulumi.Input['RouterPeerBfdArgs']]):
pulumi.set(self, "bfd", value)
@property
@pulumi.getter
def enable(self) -> Optional[pulumi.Input[bool]]:
"""
The status of the BGP peer connection. If set to false, any active session
with the peer is terminated and all associated routing information is removed.
If set to true, the peer connection can be established with routing information.
The default is true.
"""
return pulumi.get(self, "enable")
@enable.setter
def enable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable", value)
@property
@pulumi.getter
def interface(self) -> Optional[pulumi.Input[str]]:
"""
Name of the interface the BGP peer is associated with.
"""
return pulumi.get(self, "interface")
@interface.setter
def interface(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "interface", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
IP address of the interface inside Google Cloud Platform.
Only IPv4 is supported.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter(name="managementType")
def management_type(self) -> Optional[pulumi.Input[str]]:
"""
The resource that configures and manages this BGP peer. * 'MANAGED_BY_USER' is the default value and can be managed by
you or other users * 'MANAGED_BY_ATTACHMENT' is a BGP peer that is configured and managed by Cloud Interconnect,
specifically by an InterconnectAttachment of type PARTNER. Google automatically creates, updates, and deletes this type
of BGP peer when the PARTNER InterconnectAttachment is created, updated, or deleted.
"""
return pulumi.get(self, "management_type")
@management_type.setter
def management_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "management_type", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of this BGP peer. The name must be 1-63 characters long,
and comply with RFC1035. Specifically, the name must be 1-63 characters
long and match the regular expression `a-z?` which
means the first character must be a lowercase letter, and all
following characters must be a dash, lowercase letter, or digit,
except the last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="peerAsn")
def peer_asn(self) -> Optional[pulumi.Input[int]]:
"""
Peer BGP Autonomous System Number (ASN).
Each BGP interface may use a different value.
"""
return pulumi.get(self, "peer_asn")
@peer_asn.setter
def peer_asn(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "peer_asn", value)
@property
@pulumi.getter(name="peerIpAddress")
def peer_ip_address(self) -> Optional[pulumi.Input[str]]:
"""
IP address of the BGP interface outside Google Cloud Platform.
Only IPv4 is supported.
"""
return pulumi.get(self, "peer_ip_address")
@peer_ip_address.setter
def peer_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_ip_address", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
Region where the router and BgpPeer reside.
If it is not provided, the provider region is used.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def router(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Cloud Router in which this BgpPeer will be configured.
"""
return pulumi.get(self, "router")
@router.setter
def router(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "router", value)
class RouterPeer(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
advertise_mode: Optional[pulumi.Input[str]] = None,
advertised_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
advertised_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouterPeerAdvertisedIpRangeArgs']]]]] = None,
advertised_route_priority: Optional[pulumi.Input[int]] = None,
bfd: Optional[pulumi.Input[pulumi.InputType['RouterPeerBfdArgs']]] = None,
enable: Optional[pulumi.Input[bool]] = None,
interface: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
peer_asn: Optional[pulumi.Input[int]] = None,
peer_ip_address: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
router: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
BGP information that must be configured into the routing stack to
establish BGP peering. This information must specify the peer ASN
and either the interface name, IP address, or peer IP address.
Please refer to RFC4273.
To get more information about RouterBgpPeer, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/routers)
* How-to Guides
* [Google Cloud Router](https://cloud.google.com/router/docs/)
## Example Usage
### Router Peer Basic
```python
import pulumi
import pulumi_gcp as gcp
peer = gcp.compute.RouterPeer("peer",
advertised_route_priority=100,
interface="interface-1",
peer_asn=65513,
peer_ip_address="169.254.1.2",
region="us-central1",
router="my-router")
```
### Router Peer Disabled
```python
import pulumi
import pulumi_gcp as gcp
peer = gcp.compute.RouterPeer("peer",
advertised_route_priority=100,
enable=False,
interface="interface-1",
peer_asn=65513,
peer_ip_address="169.254.1.2",
region="us-central1",
router="my-router")
```
### Router Peer Bfd
```python
import pulumi
import pulumi_gcp as gcp
peer = gcp.compute.RouterPeer("peer",
advertised_route_priority=100,
bfd=gcp.compute.RouterPeerBfdArgs(
min_receive_interval=1000,
min_transmit_interval=1000,
multiplier=5,
session_initialization_mode="ACTIVE",
),
interface="interface-1",
peer_asn=65513,
peer_ip_address="169.254.1.2",
region="us-central1",
router="my-router")
```
## Import
RouterBgpPeer can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/routerPeer:RouterPeer default projects/{{project}}/regions/{{region}}/routers/{{router}}/{{name}}
```
```sh
$ pulumi import gcp:compute/routerPeer:RouterPeer default {{project}}/{{region}}/{{router}}/{{name}}
```
```sh
$ pulumi import gcp:compute/routerPeer:RouterPeer default {{region}}/{{router}}/{{name}}
```
```sh
$ pulumi import gcp:compute/routerPeer:RouterPeer default {{router}}/{{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] advertise_mode: User-specified flag to indicate which mode to use for advertisement.
Valid values of this enum field are: `DEFAULT`, `CUSTOM`
Default value is `DEFAULT`.
Possible values are `DEFAULT` and `CUSTOM`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] advertised_groups: User-specified list of prefix groups to advertise in custom
mode, which can take one of the following options:
* `ALL_SUBNETS`: Advertises all available subnets, including peer VPC subnets.
* `ALL_VPC_SUBNETS`: Advertises the router's own VPC subnets.
* `ALL_PEER_VPC_SUBNETS`: Advertises peer subnets of the router's VPC network.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouterPeerAdvertisedIpRangeArgs']]]] advertised_ip_ranges: User-specified list of individual IP ranges to advertise in
custom mode. This field can only be populated if advertiseMode
is `CUSTOM` and is advertised to all peers of the router. These IP
ranges will be advertised in addition to any specified groups.
Leave this field blank to advertise no custom IP ranges.
Structure is documented below.
:param pulumi.Input[int] advertised_route_priority: The priority of routes advertised to this BGP peer.
Where there is more than one matching route of maximum
length, the routes with the lowest priority value win.
:param pulumi.Input[pulumi.InputType['RouterPeerBfdArgs']] bfd: BFD configuration for the BGP peering.
Structure is documented below.
:param pulumi.Input[bool] enable: The status of the BGP peer connection. If set to false, any active session
with the peer is terminated and all associated routing information is removed.
If set to true, the peer connection can be established with routing information.
The default is true.
:param pulumi.Input[str] interface: Name of the interface the BGP peer is associated with.
:param pulumi.Input[str] ip_address: IP address of the interface inside Google Cloud Platform.
Only IPv4 is supported.
:param pulumi.Input[str] name: Name of this BGP peer. The name must be 1-63 characters long,
and comply with RFC1035. Specifically, the name must be 1-63 characters
long and match the regular expression `a-z?` which
means the first character must be a lowercase letter, and all
following characters must be a dash, lowercase letter, or digit,
except the last character, which cannot be a dash.
:param pulumi.Input[int] peer_asn: Peer BGP Autonomous System Number (ASN).
Each BGP interface may use a different value.
:param pulumi.Input[str] peer_ip_address: IP address of the BGP interface outside Google Cloud Platform.
Only IPv4 is supported.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] region: Region where the router and BgpPeer reside.
If it is not provided, the provider region is used.
:param pulumi.Input[str] router: The name of the Cloud Router in which this BgpPeer will be configured.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RouterPeerArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
BGP information that must be configured into the routing stack to
establish BGP peering. This information must specify the peer ASN
and either the interface name, IP address, or peer IP address.
Please refer to RFC4273.
To get more information about RouterBgpPeer, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/routers)
* How-to Guides
* [Google Cloud Router](https://cloud.google.com/router/docs/)
## Example Usage
### Router Peer Basic
```python
import pulumi
import pulumi_gcp as gcp
peer = gcp.compute.RouterPeer("peer",
advertised_route_priority=100,
interface="interface-1",
peer_asn=65513,
peer_ip_address="169.254.1.2",
region="us-central1",
router="my-router")
```
### Router Peer Disabled
```python
import pulumi
import pulumi_gcp as gcp
peer = gcp.compute.RouterPeer("peer",
advertised_route_priority=100,
enable=False,
interface="interface-1",
peer_asn=65513,
peer_ip_address="169.254.1.2",
region="us-central1",
router="my-router")
```
### Router Peer Bfd
```python
import pulumi
import pulumi_gcp as gcp
peer = gcp.compute.RouterPeer("peer",
advertised_route_priority=100,
bfd=gcp.compute.RouterPeerBfdArgs(
min_receive_interval=1000,
min_transmit_interval=1000,
multiplier=5,
session_initialization_mode="ACTIVE",
),
interface="interface-1",
peer_asn=65513,
peer_ip_address="169.254.1.2",
region="us-central1",
router="my-router")
```
## Import
RouterBgpPeer can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/routerPeer:RouterPeer default projects/{{project}}/regions/{{region}}/routers/{{router}}/{{name}}
```
```sh
$ pulumi import gcp:compute/routerPeer:RouterPeer default {{project}}/{{region}}/{{router}}/{{name}}
```
```sh
$ pulumi import gcp:compute/routerPeer:RouterPeer default {{region}}/{{router}}/{{name}}
```
```sh
$ pulumi import gcp:compute/routerPeer:RouterPeer default {{router}}/{{name}}
```
:param str resource_name: The name of the resource.
:param RouterPeerArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RouterPeerArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
advertise_mode: Optional[pulumi.Input[str]] = None,
advertised_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
advertised_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouterPeerAdvertisedIpRangeArgs']]]]] = None,
advertised_route_priority: Optional[pulumi.Input[int]] = None,
bfd: Optional[pulumi.Input[pulumi.InputType['RouterPeerBfdArgs']]] = None,
enable: Optional[pulumi.Input[bool]] = None,
interface: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
peer_asn: Optional[pulumi.Input[int]] = None,
peer_ip_address: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
router: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RouterPeerArgs.__new__(RouterPeerArgs)
__props__.__dict__["advertise_mode"] = advertise_mode
__props__.__dict__["advertised_groups"] = advertised_groups
__props__.__dict__["advertised_ip_ranges"] = advertised_ip_ranges
__props__.__dict__["advertised_route_priority"] = advertised_route_priority
__props__.__dict__["bfd"] = bfd
__props__.__dict__["enable"] = enable
if interface is None and not opts.urn:
raise TypeError("Missing required property 'interface'")
__props__.__dict__["interface"] = interface
__props__.__dict__["ip_address"] = ip_address
__props__.__dict__["name"] = name
if peer_asn is None and not opts.urn:
raise TypeError("Missing required property 'peer_asn'")
__props__.__dict__["peer_asn"] = peer_asn
if peer_ip_address is None and not opts.urn:
raise TypeError("Missing required property 'peer_ip_address'")
__props__.__dict__["peer_ip_address"] = peer_ip_address
__props__.__dict__["project"] = project
__props__.__dict__["region"] = region
if router is None and not opts.urn:
raise TypeError("Missing required property 'router'")
__props__.__dict__["router"] = router
__props__.__dict__["management_type"] = None
super(RouterPeer, __self__).__init__(
'gcp:compute/routerPeer:RouterPeer',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
advertise_mode: Optional[pulumi.Input[str]] = None,
advertised_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
advertised_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouterPeerAdvertisedIpRangeArgs']]]]] = None,
advertised_route_priority: Optional[pulumi.Input[int]] = None,
bfd: Optional[pulumi.Input[pulumi.InputType['RouterPeerBfdArgs']]] = None,
enable: Optional[pulumi.Input[bool]] = None,
interface: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
management_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
peer_asn: Optional[pulumi.Input[int]] = None,
peer_ip_address: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
router: Optional[pulumi.Input[str]] = None) -> 'RouterPeer':
"""
Get an existing RouterPeer resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] advertise_mode: User-specified flag to indicate which mode to use for advertisement.
Valid values of this enum field are: `DEFAULT`, `CUSTOM`
Default value is `DEFAULT`.
Possible values are `DEFAULT` and `CUSTOM`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] advertised_groups: User-specified list of prefix groups to advertise in custom
mode, which can take one of the following options:
* `ALL_SUBNETS`: Advertises all available subnets, including peer VPC subnets.
* `ALL_VPC_SUBNETS`: Advertises the router's own VPC subnets.
* `ALL_PEER_VPC_SUBNETS`: Advertises peer subnets of the router's VPC network.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouterPeerAdvertisedIpRangeArgs']]]] advertised_ip_ranges: User-specified list of individual IP ranges to advertise in
custom mode. This field can only be populated if advertiseMode
is `CUSTOM` and is advertised to all peers of the router. These IP
ranges will be advertised in addition to any specified groups.
Leave this field blank to advertise no custom IP ranges.
Structure is documented below.
:param pulumi.Input[int] advertised_route_priority: The priority of routes advertised to this BGP peer.
Where there is more than one matching route of maximum
length, the routes with the lowest priority value win.
:param pulumi.Input[pulumi.InputType['RouterPeerBfdArgs']] bfd: BFD configuration for the BGP peering.
Structure is documented below.
:param pulumi.Input[bool] enable: The status of the BGP peer connection. If set to false, any active session
with the peer is terminated and all associated routing information is removed.
If set to true, the peer connection can be established with routing information.
The default is true.
:param pulumi.Input[str] interface: Name of the interface the BGP peer is associated with.
:param pulumi.Input[str] ip_address: IP address of the interface inside Google Cloud Platform.
Only IPv4 is supported.
:param pulumi.Input[str] management_type: The resource that configures and manages this BGP peer. * 'MANAGED_BY_USER' is the default value and can be managed by
you or other users * 'MANAGED_BY_ATTACHMENT' is a BGP peer that is configured and managed by Cloud Interconnect,
specifically by an InterconnectAttachment of type PARTNER. Google automatically creates, updates, and deletes this type
of BGP peer when the PARTNER InterconnectAttachment is created, updated, or deleted.
:param pulumi.Input[str] name: Name of this BGP peer. The name must be 1-63 characters long,
and comply with RFC1035. Specifically, the name must be 1-63 characters
long and match the regular expression `a-z?` which
means the first character must be a lowercase letter, and all
following characters must be a dash, lowercase letter, or digit,
except the last character, which cannot be a dash.
:param pulumi.Input[int] peer_asn: Peer BGP Autonomous System Number (ASN).
Each BGP interface may use a different value.
:param pulumi.Input[str] peer_ip_address: IP address of the BGP interface outside Google Cloud Platform.
Only IPv4 is supported.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] region: Region where the router and BgpPeer reside.
If it is not provided, the provider region is used.
:param pulumi.Input[str] router: The name of the Cloud Router in which this BgpPeer will be configured.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RouterPeerState.__new__(_RouterPeerState)
__props__.__dict__["advertise_mode"] = advertise_mode
__props__.__dict__["advertised_groups"] = advertised_groups
__props__.__dict__["advertised_ip_ranges"] = advertised_ip_ranges
__props__.__dict__["advertised_route_priority"] = advertised_route_priority
__props__.__dict__["bfd"] = bfd
__props__.__dict__["enable"] = enable
__props__.__dict__["interface"] = interface
__props__.__dict__["ip_address"] = ip_address
__props__.__dict__["management_type"] = management_type
__props__.__dict__["name"] = name
__props__.__dict__["peer_asn"] = peer_asn
__props__.__dict__["peer_ip_address"] = peer_ip_address
__props__.__dict__["project"] = project
__props__.__dict__["region"] = region
__props__.__dict__["router"] = router
return RouterPeer(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="advertiseMode")
def advertise_mode(self) -> pulumi.Output[Optional[str]]:
"""
User-specified flag to indicate which mode to use for advertisement.
Valid values of this enum field are: `DEFAULT`, `CUSTOM`
Default value is `DEFAULT`.
Possible values are `DEFAULT` and `CUSTOM`.
"""
return pulumi.get(self, "advertise_mode")
@property
@pulumi.getter(name="advertisedGroups")
def advertised_groups(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
User-specified list of prefix groups to advertise in custom
mode, which can take one of the following options:
* `ALL_SUBNETS`: Advertises all available subnets, including peer VPC subnets.
* `ALL_VPC_SUBNETS`: Advertises the router's own VPC subnets.
* `ALL_PEER_VPC_SUBNETS`: Advertises peer subnets of the router's VPC network.
"""
return pulumi.get(self, "advertised_groups")
@property
@pulumi.getter(name="advertisedIpRanges")
def advertised_ip_ranges(self) -> pulumi.Output[Optional[Sequence['outputs.RouterPeerAdvertisedIpRange']]]:
"""
User-specified list of individual IP ranges to advertise in
custom mode. This field can only be populated if advertiseMode
is `CUSTOM` and is advertised to all peers of the router. These IP
ranges will be advertised in addition to any specified groups.
Leave this field blank to advertise no custom IP ranges.
Structure is documented below.
"""
return pulumi.get(self, "advertised_ip_ranges")
@property
@pulumi.getter(name="advertisedRoutePriority")
def advertised_route_priority(self) -> pulumi.Output[Optional[int]]:
"""
The priority of routes advertised to this BGP peer.
Where there is more than one matching route of maximum
length, the routes with the lowest priority value win.
"""
return pulumi.get(self, "advertised_route_priority")
@property
@pulumi.getter
def bfd(self) -> pulumi.Output['outputs.RouterPeerBfd']:
"""
BFD configuration for the BGP peering.
Structure is documented below.
"""
return pulumi.get(self, "bfd")
@property
@pulumi.getter
def enable(self) -> pulumi.Output[Optional[bool]]:
"""
The status of the BGP peer connection. If set to false, any active session
with the peer is terminated and all associated routing information is removed.
If set to true, the peer connection can be established with routing information.
The default is true.
"""
return pulumi.get(self, "enable")
@property
@pulumi.getter
def interface(self) -> pulumi.Output[str]:
"""
Name of the interface the BGP peer is associated with.
"""
return pulumi.get(self, "interface")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Output[str]:
"""
IP address of the interface inside Google Cloud Platform.
Only IPv4 is supported.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="managementType")
def management_type(self) -> pulumi.Output[str]:
"""
The resource that configures and manages this BGP peer. * 'MANAGED_BY_USER' is the default value and can be managed by
you or other users * 'MANAGED_BY_ATTACHMENT' is a BGP peer that is configured and managed by Cloud Interconnect,
specifically by an InterconnectAttachment of type PARTNER. Google automatically creates, updates, and deletes this type
of BGP peer when the PARTNER InterconnectAttachment is created, updated, or deleted.
"""
return pulumi.get(self, "management_type")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of this BGP peer. The name must be 1-63 characters long,
and comply with RFC1035. Specifically, the name must be 1-63 characters
long and match the regular expression `a-z?` which
means the first character must be a lowercase letter, and all
following characters must be a dash, lowercase letter, or digit,
except the last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="peerAsn")
def peer_asn(self) -> pulumi.Output[int]:
"""
Peer BGP Autonomous System Number (ASN).
Each BGP interface may use a different value.
"""
return pulumi.get(self, "peer_asn")
@property
@pulumi.getter(name="peerIpAddress")
def peer_ip_address(self) -> pulumi.Output[str]:
"""
IP address of the BGP interface outside Google Cloud Platform.
Only IPv4 is supported.
"""
return pulumi.get(self, "peer_ip_address")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
Region where the router and BgpPeer reside.
If it is not provided, the provider region is used.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter
def router(self) -> pulumi.Output[str]:
"""
The name of the Cloud Router in which this BgpPeer will be configured.
"""
return pulumi.get(self, "router")
| 47.012059
| 186
| 0.644638
| 6,608
| 54,581
| 5.182657
| 0.050091
| 0.070021
| 0.051099
| 0.04304
| 0.942769
| 0.929775
| 0.919847
| 0.911087
| 0.907963
| 0.893743
| 0
| 0.005527
| 0.267401
| 54,581
| 1,160
| 187
| 47.052586
| 0.850948
| 0.483648
| 0
| 0.796296
| 1
| 0
| 0.110323
| 0.027801
| 0
| 0
| 0
| 0
| 0
| 1
| 0.164609
| false
| 0.002058
| 0.014403
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c4ca7509a48558892f613c5e94013e1666c72d4c
| 5,393
|
py
|
Python
|
Problems/2/remove_dups/test_remove_dups.py
|
weezybusy/Cracking-the-Coding-Interview
|
22b8f62c97781ea5aa388434d75ad5abde42c85a
|
[
"MIT"
] | null | null | null |
Problems/2/remove_dups/test_remove_dups.py
|
weezybusy/Cracking-the-Coding-Interview
|
22b8f62c97781ea5aa388434d75ad5abde42c85a
|
[
"MIT"
] | null | null | null |
Problems/2/remove_dups/test_remove_dups.py
|
weezybusy/Cracking-the-Coding-Interview
|
22b8f62c97781ea5aa388434d75ad5abde42c85a
|
[
"MIT"
] | null | null | null |
#/usr/bin/env python3
import unittest
from singly_linked_list import Node, SinglyLinkedList
from remove_dups import remove_dups, remove_dups_alt
class TestRemoveDups(unittest.TestCase):
def test_remove_dups(self):
# Check removing dups from the empty list.
lst = SinglyLinkedList()
remove_dups(lst)
self.assertEqual(lst.size, 0)
# Check removing dups from the list wiht one element in it.
lst = SinglyLinkedList()
lst.insert_tail(Node(1))
remove_dups(lst)
self.assertEqual(lst.size, 1)
self.assertEqual(lst.head.data, 1)
# Check removing dups from the tree with repeated element in it.
lst = SinglyLinkedList()
original_data = [1, 1]
for i in range(len(original_data)):
lst.insert_tail(Node(original_data[i]))
remove_dups(lst)
self.assertEqual(lst.size, 1)
self.assertEqual(lst.head.data, 1)
lst = SinglyLinkedList()
original_data = [1, 1, 1, 1, 1]
for i in range(len(original_data)):
lst.insert_tail(Node(original_data[i]))
remove_dups(lst)
self.assertEqual(lst.size, 1)
self.assertEqual(lst.head.data, 1)
# Check removing dups from the tree with mixed data.
lst = SinglyLinkedList()
original_data = [1, 5, 3, 5, 4, 8, 1, 12, 33, 5]
expected_data = [1, 5, 3, 4, 8, 12, 33]
for i in range(len(original_data)):
lst.insert_tail(Node(original_data[i]))
remove_dups(lst)
self.assertEqual(lst.size, len(expected_data))
node = lst.head
for i in range(len(expected_data)):
self.assertEqual(node.data, expected_data[i])
node = node.next
lst = SinglyLinkedList()
original_data = [38, 49, 51, 1080, 12, -48, -48, 1080]
expected_data = [38, 49, 51, 1080, 12, -48]
for i in range(len(original_data)):
lst.insert_tail(Node(original_data[i]))
remove_dups(lst)
self.assertEqual(lst.size, len(expected_data))
node = lst.head
for i in range(len(expected_data)):
self.assertEqual(node.data, expected_data[i])
node = node.next
# Check removing dups from the tree with no repeated data.
lst = SinglyLinkedList()
original_data = [1, 2, 3, 4, 5]
expected_data = [1, 2, 3, 4, 5]
for i in range(len(original_data)):
lst.insert_tail(Node(original_data[i]))
remove_dups(lst)
self.assertEqual(lst.size, len(expected_data))
node = lst.head
for i in range(len(expected_data)):
self.assertEqual(node.data, expected_data[i])
node = node.next
def test_remove_dups_alt(self):
# Check removing dups from the empty list.
lst = SinglyLinkedList()
remove_dups_alt(lst)
self.assertEqual(lst.size, 0)
# Check removing dups from the list wiht one element in it.
lst = SinglyLinkedList()
lst.insert_tail(Node(1))
remove_dups_alt(lst)
self.assertEqual(lst.size, 1)
self.assertEqual(lst.head.data, 1)
# Check removing dups from the tree with repeated element in it.
lst = SinglyLinkedList()
original_data = [1, 1]
for i in range(len(original_data)):
lst.insert_tail(Node(original_data[i]))
remove_dups_alt(lst)
self.assertEqual(lst.size, 1)
self.assertEqual(lst.head.data, 1)
lst = SinglyLinkedList()
original_data = [1, 1, 1, 1, 1]
for i in range(len(original_data)):
lst.insert_tail(Node(original_data[i]))
remove_dups_alt(lst)
self.assertEqual(lst.size, 1)
self.assertEqual(lst.head.data, 1)
# Check removing dups from the tree with mixed data.
lst = SinglyLinkedList()
original_data = [1, 5, 3, 5, 4, 8, 1, 12, 33, 5]
expected_data = [1, 5, 3, 4, 8, 12, 33]
for i in range(len(original_data)):
lst.insert_tail(Node(original_data[i]))
remove_dups_alt(lst)
self.assertEqual(lst.size, len(expected_data))
node = lst.head
for i in range(len(expected_data)):
self.assertEqual(node.data, expected_data[i])
node = node.next
lst = SinglyLinkedList()
original_data = [38, 49, 51, 1080, 12, -48, -48, 1080]
expected_data = [38, 49, 51, 1080, 12, -48]
for i in range(len(original_data)):
lst.insert_tail(Node(original_data[i]))
remove_dups_alt(lst)
self.assertEqual(lst.size, len(expected_data))
node = lst.head
for i in range(len(expected_data)):
self.assertEqual(node.data, expected_data[i])
node = node.next
# Check removing dups from the tree with no repeated data.
lst = SinglyLinkedList()
original_data = [1, 2, 3, 4, 5]
expected_data = [1, 2, 3, 4, 5]
for i in range(len(original_data)):
lst.insert_tail(Node(original_data[i]))
remove_dups_alt(lst)
self.assertEqual(lst.size, len(expected_data))
node = lst.head
for i in range(len(expected_data)):
self.assertEqual(node.data, expected_data[i])
node = node.next
if __name__ == '__main__':
unittest.main()
| 35.480263
| 72
| 0.600779
| 731
| 5,393
| 4.287278
| 0.090287
| 0.114869
| 0.114869
| 0.056158
| 0.932036
| 0.932036
| 0.932036
| 0.932036
| 0.931078
| 0.931078
| 0
| 0.042015
| 0.289449
| 5,393
| 151
| 73
| 35.715232
| 0.775835
| 0.103838
| 0
| 0.932203
| 0
| 0
| 0.001659
| 0
| 0
| 0
| 0
| 0
| 0.220339
| 1
| 0.016949
| false
| 0
| 0.025424
| 0
| 0.050847
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c4ff1757af8232023d000ff23a9cf8abbfba18cf
| 100
|
py
|
Python
|
answers/test_answer_01.py
|
milliams/python_testing
|
b19b839744634007ea0b1f5051a37480146d3284
|
[
"MIT"
] | 3
|
2018-09-20T06:56:12.000Z
|
2020-12-30T18:51:32.000Z
|
answers/test_answer_01.py
|
milliams/python_testing
|
b19b839744634007ea0b1f5051a37480146d3284
|
[
"MIT"
] | null | null | null |
answers/test_answer_01.py
|
milliams/python_testing
|
b19b839744634007ea0b1f5051a37480146d3284
|
[
"MIT"
] | 1
|
2018-09-19T17:57:48.000Z
|
2018-09-19T17:57:48.000Z
|
from my_lib import add_elements
def test_add():
assert add_elements([1, 2], [3, 4]) == [4, 6]
| 16.666667
| 49
| 0.63
| 18
| 100
| 3.277778
| 0.777778
| 0.372881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 0.2
| 100
| 5
| 50
| 20
| 0.6625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1efc6bc401817ec3eb4d9d26ec8aad1e7c05b623
| 3,324
|
py
|
Python
|
mayan/apps/document_states/tests/test_tasks.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 2
|
2021-09-12T19:41:19.000Z
|
2021-09-12T19:41:20.000Z
|
mayan/apps/document_states/tests/test_tasks.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 37
|
2021-09-13T01:00:12.000Z
|
2021-10-02T03:54:30.000Z
|
mayan/apps/document_states/tests/test_tasks.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 1
|
2021-09-22T13:17:30.000Z
|
2021-09-22T13:17:30.000Z
|
from mayan.apps.documents.models.document_models import Document
from mayan.apps.documents.tests.base import GenericDocumentTestCase
from .mixins.workflow_template_mixins import (
WorkflowTaskTestCaseMixin, WorkflowTemplateTestMixin
)
class WorkflowTaskTestCase(
WorkflowTaskTestCaseMixin, WorkflowTemplateTestMixin, GenericDocumentTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_document_stub()
self._create_test_workflow_template(add_test_document_type=True)
self._create_test_workflow_template_state()
def test_task_launch_all_workflows(self):
workflow_instance_count = self.test_document.workflows.count()
self._execute_task_launch_all_workflows()
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count + 1
)
def test_trashed_document_task_launch_all_workflows(self):
workflow_instance_count = self.test_document.workflows.count()
self.test_document.delete()
self._execute_task_launch_all_workflows()
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
def test_task_launch_workflow(self):
workflow_instance_count = self.test_document.workflows.count()
self._execute_task_launch_workflow()
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count + 1
)
def test_trashed_document_task_launch_workflow(self):
workflow_instance_count = self.test_document.workflows.count()
self.test_document.delete()
self._execute_task_launch_workflow()
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
def test_task_launch_workflow_for(self):
workflow_instance_count = self.test_document.workflows.count()
self._execute_task_launch_workflow_for()
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count + 1
)
def test_trashed_document_task_launch_workflow_for(self):
workflow_instance_count = self.test_document.workflows.count()
self.test_document.delete()
with self.assertRaises(expected_exception=Document.DoesNotExist):
self._execute_task_launch_workflow_for()
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
def test_task_launch_all_workflow_for(self):
workflow_instance_count = self.test_document.workflows.count()
self._execute_task_launch_all_workflow_for()
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count + 1
)
def test_trashed_document_task_launch_all_workflow_for(self):
workflow_instance_count = self.test_document.workflows.count()
self.test_document.delete()
with self.assertRaises(expected_exception=Document.DoesNotExist):
self._execute_task_launch_all_workflow_for()
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
| 32.588235
| 82
| 0.70367
| 351
| 3,324
| 6.219373
| 0.136752
| 0.126432
| 0.146587
| 0.183234
| 0.814475
| 0.783784
| 0.783784
| 0.783784
| 0.783784
| 0.783784
| 0
| 0.001553
| 0.22503
| 3,324
| 101
| 83
| 32.910891
| 0.845885
| 0
| 0
| 0.558824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 1
| 0.132353
| false
| 0
| 0.044118
| 0
| 0.205882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f861610c4e941763db03b1d517167012c99ba28
| 5,365
|
py
|
Python
|
tests/virtual_time/test_async.py
|
wbap/BriCA1
|
02fea1be62fa16cce28a7ac7fdb8351e04227bec
|
[
"Apache-2.0"
] | 17
|
2015-09-21T07:25:05.000Z
|
2022-01-01T00:42:44.000Z
|
tests/virtual_time/test_async.py
|
wbap/BriCA1
|
02fea1be62fa16cce28a7ac7fdb8351e04227bec
|
[
"Apache-2.0"
] | null | null | null |
tests/virtual_time/test_async.py
|
wbap/BriCA1
|
02fea1be62fa16cce28a7ac7fdb8351e04227bec
|
[
"Apache-2.0"
] | 7
|
2016-01-06T16:01:49.000Z
|
2018-09-25T12:12:15.000Z
|
import sys, os
sys.path.append(os.getcwd())
import numpy as np
import brica1
def test_autostep():
agent = brica1.Agent()
scheduler = brica1.VirtualTimeScheduler(agent)
zero = np.zeros(3, dtype=np.short)
data = np.array([1, 2, 3], dtype=np.short)
CompA = brica1.ConstantComponent()
CompB = brica1.PipeComponent()
CompC = brica1.NullComponent()
CompA.offset = 200
CompB.offset = 0
CompC.offset = 100
CompA.interval = 500
CompB.interval = 500
CompC.interval = 500
ModA = brica1.Module()
ModA.add_component('CompA', CompA)
ModA.add_component('CompB', CompB)
ModA.add_component('CompC', CompC)
CompA.set_state('out', data)
CompA.make_out_port('out', 3)
CompB.make_in_port('in', 3)
brica1.connect((CompA, 'out'), (CompB, 'in'))
CompB.make_out_port('out', 3)
CompB.set_map('in', 'out')
CompC.make_in_port('in', 3)
brica1.connect((CompB, 'out'), (CompC, 'in'))
agent.add_submodule('ModA', ModA)
scheduler.update()
a_out = zero
b_in = zero
b_out = zero
c_in = zero
assert (CompA.get_state('out') == data).all()
assert CompA.get_state('out') is not data
while True:
assert (CompA.get_out_port('out').buffer == a_out).all()
assert (CompB.get_in_port('in').buffer == b_in ).all()
assert (CompB.get_out_port('out').buffer == b_out).all()
assert (CompC.get_in_port('in').buffer == c_in ).all()
time = scheduler.step()
if time > 1600:
break
if time == 700:
a_out = data
if time == 1000:
b_in = data
if time == 1500:
b_out = data
if time == 1600:
c_in = data
def test_interval():
agent = brica1.Agent()
scheduler = brica1.VirtualTimeScheduler(agent)
zero = np.zeros(3, dtype=np.short)
data = np.array([1, 2, 3], dtype=np.short)
CompA = brica1.ConstantComponent()
CompB = brica1.PipeComponent()
CompC = brica1.NullComponent()
CompA.offset = 200
CompB.offset = 0
CompC.offset = 100
CompA.interval = 500
CompB.interval = 500
CompC.interval = 500
ModA = brica1.Module()
ModA.add_component('CompA', CompA)
ModA.add_component('CompB', CompB)
ModA.add_component('CompC', CompC)
CompA.set_state('out', data)
CompA.make_out_port('out', 3)
CompB.make_in_port('in', 3)
brica1.connect((CompA, 'out'), (CompB, 'in'))
CompB.make_out_port('out', 3)
CompB.set_map('in', 'out')
CompC.make_in_port('in', 3)
brica1.connect((CompB, 'out'), (CompC, 'in'))
agent.add_submodule('ModA', ModA)
scheduler.update()
a_out = zero
b_in = zero
b_out = zero
c_in = zero
assert (CompA.get_state('out') == data).all()
assert CompA.get_state('out') is not data
interval = 100
steps = 0
while True:
assert (CompA.get_out_port('out').buffer == a_out).all()
assert (CompB.get_in_port('in').buffer == b_in ).all()
assert (CompB.get_out_port('out').buffer == b_out).all()
assert (CompC.get_in_port('in').buffer == c_in ).all()
assert steps * interval == scheduler.current_time
time = scheduler.step(interval)
steps += 1
if time > 1600:
break
if time == 700:
a_out = data
if time == 1000:
b_in = data
if time == 1500:
b_out = data
if time == 1600:
c_in = data
def test_sleep():
agent = brica1.Agent()
scheduler = brica1.VirtualTimeScheduler(agent)
zero = np.zeros(3, dtype=np.short)
data = np.array([1, 2, 3], dtype=np.short)
CompA = brica1.ConstantComponent()
CompB = brica1.PipeComponent()
CompC = brica1.NullComponent()
CompA.offset = 100
CompB.offset = 200
CompC.offset = 300
CompA.interval = 100
CompB.interval = 100
CompC.interval = 100
CompA.sleep = 300
CompB.sleep = 300
CompC.sleep = 300
ModA = brica1.Module()
ModA.add_component('CompA', CompA)
ModA.add_component('CompB', CompB)
ModA.add_component('CompC', CompC)
CompA.set_state('out', data)
CompA.make_out_port('out', 3)
CompB.make_in_port('in', 3)
brica1.connect((CompA, 'out'), (CompB, 'in'))
CompB.make_out_port('out', 3)
CompB.set_map('in', 'out')
CompC.make_in_port('in', 3)
brica1.connect((CompB, 'out'), (CompC, 'in'))
agent.add_submodule('ModA', ModA)
scheduler.update()
a_out = zero
b_in = zero
b_out = zero
c_in = zero
assert (CompA.get_state('out') == data).all()
assert CompA.get_state('out') is not data
interval = 100
steps = 0
while True:
assert (CompA.get_out_port('out').buffer == a_out).all()
assert (CompB.get_in_port('in').buffer == b_in ).all()
assert (CompB.get_out_port('out').buffer == b_out).all()
assert (CompC.get_in_port('in').buffer == c_in ).all()
assert steps * interval == scheduler.current_time
time = scheduler.step(interval)
steps += 1
if time > 300:
break
if time == 200:
a_out = data
if time == 200:
b_in = data
if time == 300:
b_out = data
if time == 300:
c_in = data
| 24.497717
| 64
| 0.58397
| 721
| 5,365
| 4.188627
| 0.102635
| 0.029801
| 0.039735
| 0.025828
| 0.915563
| 0.901987
| 0.901987
| 0.901987
| 0.901987
| 0.901987
| 0
| 0.043835
| 0.27288
| 5,365
| 218
| 65
| 24.610092
| 0.730326
| 0
| 0
| 0.890909
| 0
| 0
| 0.035228
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 1
| 0.018182
| false
| 0
| 0.018182
| 0
| 0.036364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f8af90bab5964b9f00510b5d0abe379894619fd
| 2,324
|
py
|
Python
|
tests/testCaseTests.py
|
Scony/python-junit
|
0424e11746f83b5dfbf4e27667bf252c8db30d50
|
[
"MIT"
] | 6
|
2017-01-25T11:12:54.000Z
|
2018-11-11T13:44:04.000Z
|
tests/testCaseTests.py
|
Scony/python-junit
|
0424e11746f83b5dfbf4e27667bf252c8db30d50
|
[
"MIT"
] | null | null | null |
tests/testCaseTests.py
|
Scony/python-junit
|
0424e11746f83b5dfbf4e27667bf252c8db30d50
|
[
"MIT"
] | 2
|
2017-04-02T15:19:38.000Z
|
2019-03-19T21:01:54.000Z
|
import unittest
import junit
class TestTestCase(unittest.TestCase):
def testParametersSetting1(self):
tc = junit.TestCase(
classname='aaa',
name='bbb',
)
self.assertEqual(tc.params['time'], None)
self.assertEqual(tc.params['classname'], 'aaa')
self.assertEqual(tc.params['name'], 'bbb')
self.assertEqual(tc.params['status'], None)
self.assertEqual(tc.params['skipped'], None)
self.assertEqual(tc.params['failure'], None)
self.assertEqual(tc.params['error'], None)
self.assertEqual(tc.params['systemOut'], None)
self.assertEqual(tc.params['systemErr'], None)
def testParametersSetting2(self):
tc = junit.TestCase(
time=0.1,
classname='aaa',
name='bbb',
status='ccc',
skipped='ddd',
failure='eee',
error='fff',
systemOut='ggg',
systemErr='hhh',
)
self.assertEqual(tc.params['time'], 0.1)
self.assertEqual(tc.params['classname'], 'aaa')
self.assertEqual(tc.params['name'], 'bbb')
self.assertEqual(tc.params['status'], 'ccc')
self.assertEqual(tc.params['skipped'], 'ddd')
self.assertEqual(tc.params['failure'], 'eee')
self.assertEqual(tc.params['error'], 'fff')
self.assertEqual(tc.params['systemOut'], 'ggg')
self.assertEqual(tc.params['systemErr'], 'hhh')
def testParametersSetting3(self):
tc = junit.TestCase(
time='0.1',
classname='aaa',
name='bbb',
status='ccc',
skipped='ddd',
failure='eee',
error='fff',
systemOut='ggg',
systemErr='hhh',
)
self.assertEqual(tc.params['time'], '0.1')
self.assertEqual(tc.params['classname'], 'aaa')
self.assertEqual(tc.params['name'], 'bbb')
self.assertEqual(tc.params['status'], 'ccc')
self.assertEqual(tc.params['skipped'], 'ddd')
self.assertEqual(tc.params['failure'], 'eee')
self.assertEqual(tc.params['error'], 'fff')
self.assertEqual(tc.params['systemOut'], 'ggg')
self.assertEqual(tc.params['systemErr'], 'hhh')
if __name__ == '__main__':
unittest.main()
| 33.681159
| 55
| 0.55852
| 237
| 2,324
| 5.443038
| 0.151899
| 0.313953
| 0.355814
| 0.481395
| 0.852713
| 0.713178
| 0.689922
| 0.689922
| 0.689922
| 0.689922
| 0
| 0.006494
| 0.271084
| 2,324
| 68
| 56
| 34.176471
| 0.755018
| 0
| 0
| 0.639344
| 0
| 0
| 0.129948
| 0
| 0
| 0
| 0
| 0
| 0.442623
| 1
| 0.04918
| false
| 0
| 0.032787
| 0
| 0.098361
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ff989b389642c73571b5cb2a9fe3909dcf6e07f
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_xerath/na_xerath_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_xerath/na_xerath_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_xerath/na_xerath_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Xerath_Jng_Aatrox(Ratings):
pass
class NA_Xerath_Jng_Ahri(Ratings):
pass
class NA_Xerath_Jng_Akali(Ratings):
pass
class NA_Xerath_Jng_Alistar(Ratings):
pass
class NA_Xerath_Jng_Amumu(Ratings):
pass
class NA_Xerath_Jng_Anivia(Ratings):
pass
class NA_Xerath_Jng_Annie(Ratings):
pass
class NA_Xerath_Jng_Ashe(Ratings):
pass
class NA_Xerath_Jng_AurelionSol(Ratings):
pass
class NA_Xerath_Jng_Azir(Ratings):
pass
class NA_Xerath_Jng_Bard(Ratings):
pass
class NA_Xerath_Jng_Blitzcrank(Ratings):
pass
class NA_Xerath_Jng_Brand(Ratings):
pass
class NA_Xerath_Jng_Braum(Ratings):
pass
class NA_Xerath_Jng_Caitlyn(Ratings):
pass
class NA_Xerath_Jng_Camille(Ratings):
pass
class NA_Xerath_Jng_Cassiopeia(Ratings):
pass
class NA_Xerath_Jng_Chogath(Ratings):
pass
class NA_Xerath_Jng_Corki(Ratings):
pass
class NA_Xerath_Jng_Darius(Ratings):
pass
class NA_Xerath_Jng_Diana(Ratings):
pass
class NA_Xerath_Jng_Draven(Ratings):
pass
class NA_Xerath_Jng_DrMundo(Ratings):
pass
class NA_Xerath_Jng_Ekko(Ratings):
pass
class NA_Xerath_Jng_Elise(Ratings):
pass
class NA_Xerath_Jng_Evelynn(Ratings):
pass
class NA_Xerath_Jng_Ezreal(Ratings):
pass
class NA_Xerath_Jng_Fiddlesticks(Ratings):
pass
class NA_Xerath_Jng_Fiora(Ratings):
pass
class NA_Xerath_Jng_Fizz(Ratings):
pass
class NA_Xerath_Jng_Galio(Ratings):
pass
class NA_Xerath_Jng_Gangplank(Ratings):
pass
class NA_Xerath_Jng_Garen(Ratings):
pass
class NA_Xerath_Jng_Gnar(Ratings):
pass
class NA_Xerath_Jng_Gragas(Ratings):
pass
class NA_Xerath_Jng_Graves(Ratings):
pass
class NA_Xerath_Jng_Hecarim(Ratings):
pass
class NA_Xerath_Jng_Heimerdinger(Ratings):
pass
class NA_Xerath_Jng_Illaoi(Ratings):
pass
class NA_Xerath_Jng_Irelia(Ratings):
pass
class NA_Xerath_Jng_Ivern(Ratings):
pass
class NA_Xerath_Jng_Janna(Ratings):
pass
class NA_Xerath_Jng_JarvanIV(Ratings):
pass
class NA_Xerath_Jng_Jax(Ratings):
pass
class NA_Xerath_Jng_Jayce(Ratings):
pass
class NA_Xerath_Jng_Jhin(Ratings):
pass
class NA_Xerath_Jng_Jinx(Ratings):
pass
class NA_Xerath_Jng_Kalista(Ratings):
pass
class NA_Xerath_Jng_Karma(Ratings):
pass
class NA_Xerath_Jng_Karthus(Ratings):
pass
class NA_Xerath_Jng_Kassadin(Ratings):
pass
class NA_Xerath_Jng_Katarina(Ratings):
pass
class NA_Xerath_Jng_Kayle(Ratings):
pass
class NA_Xerath_Jng_Kayn(Ratings):
pass
class NA_Xerath_Jng_Kennen(Ratings):
pass
class NA_Xerath_Jng_Khazix(Ratings):
pass
class NA_Xerath_Jng_Kindred(Ratings):
pass
class NA_Xerath_Jng_Kled(Ratings):
pass
class NA_Xerath_Jng_KogMaw(Ratings):
pass
class NA_Xerath_Jng_Leblanc(Ratings):
pass
class NA_Xerath_Jng_LeeSin(Ratings):
pass
class NA_Xerath_Jng_Leona(Ratings):
pass
class NA_Xerath_Jng_Lissandra(Ratings):
pass
class NA_Xerath_Jng_Lucian(Ratings):
pass
class NA_Xerath_Jng_Lulu(Ratings):
pass
class NA_Xerath_Jng_Lux(Ratings):
pass
class NA_Xerath_Jng_Malphite(Ratings):
pass
class NA_Xerath_Jng_Malzahar(Ratings):
pass
class NA_Xerath_Jng_Maokai(Ratings):
pass
class NA_Xerath_Jng_MasterYi(Ratings):
pass
class NA_Xerath_Jng_MissFortune(Ratings):
pass
class NA_Xerath_Jng_MonkeyKing(Ratings):
pass
class NA_Xerath_Jng_Mordekaiser(Ratings):
pass
class NA_Xerath_Jng_Morgana(Ratings):
pass
class NA_Xerath_Jng_Nami(Ratings):
pass
class NA_Xerath_Jng_Nasus(Ratings):
pass
class NA_Xerath_Jng_Nautilus(Ratings):
pass
class NA_Xerath_Jng_Nidalee(Ratings):
pass
class NA_Xerath_Jng_Nocturne(Ratings):
pass
class NA_Xerath_Jng_Nunu(Ratings):
pass
class NA_Xerath_Jng_Olaf(Ratings):
pass
class NA_Xerath_Jng_Orianna(Ratings):
pass
class NA_Xerath_Jng_Ornn(Ratings):
pass
class NA_Xerath_Jng_Pantheon(Ratings):
pass
class NA_Xerath_Jng_Poppy(Ratings):
pass
class NA_Xerath_Jng_Quinn(Ratings):
pass
class NA_Xerath_Jng_Rakan(Ratings):
pass
class NA_Xerath_Jng_Rammus(Ratings):
pass
class NA_Xerath_Jng_RekSai(Ratings):
pass
class NA_Xerath_Jng_Renekton(Ratings):
pass
class NA_Xerath_Jng_Rengar(Ratings):
pass
class NA_Xerath_Jng_Riven(Ratings):
pass
class NA_Xerath_Jng_Rumble(Ratings):
pass
class NA_Xerath_Jng_Ryze(Ratings):
pass
class NA_Xerath_Jng_Sejuani(Ratings):
pass
class NA_Xerath_Jng_Shaco(Ratings):
pass
class NA_Xerath_Jng_Shen(Ratings):
pass
class NA_Xerath_Jng_Shyvana(Ratings):
pass
class NA_Xerath_Jng_Singed(Ratings):
pass
class NA_Xerath_Jng_Sion(Ratings):
pass
class NA_Xerath_Jng_Sivir(Ratings):
pass
class NA_Xerath_Jng_Skarner(Ratings):
pass
class NA_Xerath_Jng_Sona(Ratings):
pass
class NA_Xerath_Jng_Soraka(Ratings):
pass
class NA_Xerath_Jng_Swain(Ratings):
pass
class NA_Xerath_Jng_Syndra(Ratings):
pass
class NA_Xerath_Jng_TahmKench(Ratings):
pass
class NA_Xerath_Jng_Taliyah(Ratings):
pass
class NA_Xerath_Jng_Talon(Ratings):
pass
class NA_Xerath_Jng_Taric(Ratings):
pass
class NA_Xerath_Jng_Teemo(Ratings):
pass
class NA_Xerath_Jng_Thresh(Ratings):
pass
class NA_Xerath_Jng_Tristana(Ratings):
pass
class NA_Xerath_Jng_Trundle(Ratings):
pass
class NA_Xerath_Jng_Tryndamere(Ratings):
pass
class NA_Xerath_Jng_TwistedFate(Ratings):
pass
class NA_Xerath_Jng_Twitch(Ratings):
pass
class NA_Xerath_Jng_Udyr(Ratings):
pass
class NA_Xerath_Jng_Urgot(Ratings):
pass
class NA_Xerath_Jng_Varus(Ratings):
pass
class NA_Xerath_Jng_Vayne(Ratings):
pass
class NA_Xerath_Jng_Veigar(Ratings):
pass
class NA_Xerath_Jng_Velkoz(Ratings):
pass
class NA_Xerath_Jng_Vi(Ratings):
pass
class NA_Xerath_Jng_Viktor(Ratings):
pass
class NA_Xerath_Jng_Vladimir(Ratings):
pass
class NA_Xerath_Jng_Volibear(Ratings):
pass
class NA_Xerath_Jng_Warwick(Ratings):
pass
class NA_Xerath_Jng_Xayah(Ratings):
pass
class NA_Xerath_Jng_Xerath(Ratings):
pass
class NA_Xerath_Jng_XinZhao(Ratings):
pass
class NA_Xerath_Jng_Yasuo(Ratings):
pass
class NA_Xerath_Jng_Yorick(Ratings):
pass
class NA_Xerath_Jng_Zac(Ratings):
pass
class NA_Xerath_Jng_Zed(Ratings):
pass
class NA_Xerath_Jng_Ziggs(Ratings):
pass
class NA_Xerath_Jng_Zilean(Ratings):
pass
class NA_Xerath_Jng_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
d23b57de3345b633956c7140156932bb999090e1
| 228
|
py
|
Python
|
rexpro/connection.py
|
atomos/rexpro-python
|
1a5c29e0655236bd14c8a1bda19073c05fb67064
|
[
"Apache-2.0"
] | 5
|
2015-04-23T17:38:38.000Z
|
2021-07-13T01:45:04.000Z
|
rexpro/connection.py
|
atomos/rexpro-python
|
1a5c29e0655236bd14c8a1bda19073c05fb67064
|
[
"Apache-2.0"
] | 7
|
2015-02-27T17:48:01.000Z
|
2016-07-01T12:53:46.000Z
|
rexpro/connection.py
|
atomos/rexpro-python
|
1a5c29e0655236bd14c8a1bda19073c05fb67064
|
[
"Apache-2.0"
] | 5
|
2015-02-09T16:55:00.000Z
|
2016-09-09T09:53:13.000Z
|
from rexpro.connectors.sync import RexProSyncSocket as RexProSocket
from rexpro.connectors.sync import RexProSyncConnection as RexProConnection
from rexpro.connectors.sync import RexProSyncConnectionPool as RexProConnectionPool
| 57
| 83
| 0.894737
| 24
| 228
| 8.5
| 0.5
| 0.147059
| 0.294118
| 0.352941
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 228
| 3
| 84
| 76
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d27b80bb12d669a7fbd334ea18d67064f7784ea8
| 2,191
|
py
|
Python
|
Applied Math/KhanAcademy/plotting.py
|
darkeclipz/jupyter-notebooks
|
5de784244ad9db12cfacbbec3053b11f10456d7e
|
[
"Unlicense"
] | 1
|
2018-08-28T12:16:12.000Z
|
2018-08-28T12:16:12.000Z
|
Applied Math/KhanAcademy/plotting.py
|
darkeclipz/jupyter-notebooks
|
5de784244ad9db12cfacbbec3053b11f10456d7e
|
[
"Unlicense"
] | null | null | null |
Applied Math/KhanAcademy/plotting.py
|
darkeclipz/jupyter-notebooks
|
5de784244ad9db12cfacbbec3053b11f10456d7e
|
[
"Unlicense"
] | null | null | null |
import plotly
plotly.offline.init_notebook_mode()
def plot_line_fixed(x,y, title_x='', title_y='', d=[0,1]):
trace = plotly.graph_objs.Scatter(
x = x,
y = y
)
layout = dict(
xaxis = dict(title=title_x,domain=d),
yaxis = dict(title=title_y,scaleanchor='x',scaleratio=1,domain=d),
)
data = [trace]
fig = dict(data=data, layout=layout)
plotly.offline.iplot(fig, filename='line')
def plot_2d_fixed(x,y,z=[], title_x='', title_y='',d=[0,1]):
trace = plotly.graph_objs.Scatter(
x = x,
y = y,
mode = 'markers+text',
name = 'Name',
text = z,
marker = dict(size=16,line=dict(width=2,color='rgba(0,0,0,.4)'), color='rgba(100,100,100,0.3)'),
textposition='top'
)
layout = plotly.graph_objs.Layout(
hovermode='closest',
xaxis=dict(
title=title_x,
domain=d
),
yaxis=dict(
title=title_y,
scaleanchor='x',
scaleratio=1,
domain=d
),
showlegend = False
)
data = [trace]
fig = dict(data=data, layout=layout)
plotly.offline.iplot(fig, filename='line')
def plot_line(x,y, title_x='', title_y=''):
trace = plotly.graph_objs.Scatter(
x = x,
y = y
)
layout = dict(
xaxis = dict(title=title_x),
yaxis = dict(title=title_y),
)
data = [trace]
fig = dict(data=data, layout=layout)
plotly.offline.iplot(fig, filename='line')
def plot_2d(x,y,z=[], title_x='', title_y=''):
trace = plotly.graph_objs.Scatter(
x = x,
y = y,
mode = 'markers+text',
name = 'Name',
text = z,
marker = dict(size=16,line=dict(width=2,color='rgba(0,0,0,.4)'), color='rgba(100,100,100,0.3)'),
textposition='top'
)
layout = plotly.graph_objs.Layout(
hovermode='closest',
xaxis=dict(
title=title_x,
),
yaxis=dict(
title=title_y,
),
showlegend = False
)
data = [trace]
fig = dict(data=data, layout=layout)
plotly.offline.iplot(fig, filename='line')
| 27.049383
| 104
| 0.532634
| 283
| 2,191
| 4.017668
| 0.183746
| 0.014072
| 0.098505
| 0.042216
| 0.941953
| 0.941953
| 0.938434
| 0.933157
| 0.933157
| 0.933157
| 0
| 0.029043
| 0.308535
| 2,191
| 80
| 105
| 27.3875
| 0.721452
| 0
| 0
| 0.736842
| 0
| 0
| 0.063898
| 0.019169
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.013158
| 0
| 0.065789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d27c3c16f0f91c23b2700e4eba782679c8743373
| 1,557
|
py
|
Python
|
IMTool/temp.py
|
naong2/MayaPython
|
011c5665d30c27eb42e718564bca6b954718c2ea
|
[
"MIT"
] | null | null | null |
IMTool/temp.py
|
naong2/MayaPython
|
011c5665d30c27eb42e718564bca6b954718c2ea
|
[
"MIT"
] | null | null | null |
IMTool/temp.py
|
naong2/MayaPython
|
011c5665d30c27eb42e718564bca6b954718c2ea
|
[
"MIT"
] | 1
|
2022-03-23T09:25:27.000Z
|
2022-03-23T09:25:27.000Z
|
import maya.cmds as cmds
from IMTool.IMUtility.BlendShapeList import *
JBC_BSList = [u'BS_node.mouthFrownLeft', u'BS_node.mouthDimpleRight', u'BS_node.mouthDimpleLeft', u'BS_node.mouthClose', u'BS_node.jawRight', u'BS_node.jawOpen', u'BS_node.jawLeft', u'BS_node.jawForward', u'BS_node.mouthFunnel', u'BS_node.mouthFrownRight', u'BS_node.eyeWideRight', u'BS_node.cheekRaiserLeft', u'BS_node.cheekPuffRight', u'BS_node.cheekPuffLeft', u'BS_node.browOuterUpRight', u'BS_node.browOuterUpLeft', u'BS_node.browInnerUpRight', u'BS_node.browInnerUpLeft', u'BS_node.browDownRight', u'BS_node.browDownLeft', u'BS_node.noseSneerRight', u'BS_node.noseSneerLeft', u'BS_node.mouthUpperUpRight', u'BS_node.mouthUpperUpLeft', u'BS_node.mouthStretchRight', u'BS_node.mouthStretchLeft', u'BS_node.eyeLookInLeft', u'BS_node.eyeLookDownRight', u'BS_node.eyeLookDownLeft', u'BS_node.eyeBlinkRight', u'BS_node.eyeBlinkLeft', u'BS_node.cheekSquintRight', u'BS_node.cheekSquintLeft', u'BS_node.cheekRaiserRight', u'BS_node.eyeWideLeft', u'BS_node.eyeSquintRight', u'BS_node.eyeSquintLeft', u'BS_node.eyeLookUpRight', u'BS_node.eyeLookUpLeft', u'BS_node.eyeLookOutRight', u'BS_node.eyeLookOutLeft', u'BS_node.eyeLookInRight', u'BS_node.mouthPucker', u'BS_node.mouthLowerDownRight', u'BS_node.mouthPressRight', u'BS_node.mouthPressLeft', u'BS_node.mouthRollLower', u'BS_node.mouthRight', u'BS_node.mouthRollUpper', u'BS_node.mouthSmileLeft', u'BS_node.mouthShrugUpper', u'BS_node.mouthShrugLower', u'BS_node.mouthSmileRight', u'BS_node.mouthLowerDownLeft', u'BS_node.mouthLeft']
| 222.428571
| 1,482
| 0.811175
| 232
| 1,557
| 5.202586
| 0.293103
| 0.136703
| 0.318973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043674
| 1,557
| 6
| 1,483
| 259.5
| 0.810611
| 0
| 0
| 0
| 0
| 0
| 0.767352
| 0.616967
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
964ad3058c811712fbc4b0ec06b5ade1cd8dc342
| 11,844
|
py
|
Python
|
SimModel_Python_API/simmodel_swig/Release/SimTimeSeriesSchedule_Week_Daily.py
|
EnEff-BIM/EnEffBIM-Framework
|
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
|
[
"MIT"
] | 3
|
2016-05-30T15:12:16.000Z
|
2022-03-22T08:11:13.000Z
|
SimModel_Python_API/simmodel_swig/Release/SimTimeSeriesSchedule_Week_Daily.py
|
EnEff-BIM/EnEffBIM-Framework
|
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
|
[
"MIT"
] | 21
|
2016-06-13T11:33:45.000Z
|
2017-05-23T09:46:52.000Z
|
SimModel_Python_API/simmodel_swig/Release/SimTimeSeriesSchedule_Week_Daily.py
|
EnEff-BIM/EnEffBIM-Framework
|
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
|
[
"MIT"
] | null | null | null |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SimTimeSeriesSchedule_Week_Daily', [dirname(__file__)])
except ImportError:
import _SimTimeSeriesSchedule_Week_Daily
return _SimTimeSeriesSchedule_Week_Daily
if fp is not None:
try:
_mod = imp.load_module('_SimTimeSeriesSchedule_Week_Daily', fp, pathname, description)
finally:
fp.close()
return _mod
_SimTimeSeriesSchedule_Week_Daily = swig_import_helper()
del swig_import_helper
else:
import _SimTimeSeriesSchedule_Week_Daily
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
import base
import SimTimeSeriesSchedule_Calendar_Configuration
class SimTimeSeriesSchedule_Week(SimTimeSeriesSchedule_Calendar_Configuration.SimTimeSeriesSchedule):
__swig_setmethods__ = {}
for _s in [SimTimeSeriesSchedule_Calendar_Configuration.SimTimeSeriesSchedule]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimTimeSeriesSchedule_Week, name, value)
__swig_getmethods__ = {}
for _s in [SimTimeSeriesSchedule_Calendar_Configuration.SimTimeSeriesSchedule]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimTimeSeriesSchedule_Week, name)
__repr__ = _swig_repr
def SimTimeSeriesSched_Name(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_SimTimeSeriesSched_Name(self, *args)
def SimTimeSeriesSched_TimeSeriesSchedType(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_SimTimeSeriesSched_TimeSeriesSchedType(self, *args)
def T24SchedDayAllDayRef(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_T24SchedDayAllDayRef(self, *args)
def T24SchedDayWkdaysRef(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_T24SchedDayWkdaysRef(self, *args)
def T24SchedDayWkEndsRef(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_T24SchedDayWkEndsRef(self, *args)
def __init__(self, *args):
this = _SimTimeSeriesSchedule_Week_Daily.new_SimTimeSeriesSchedule_Week(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week__clone(self, f, c)
__swig_destroy__ = _SimTimeSeriesSchedule_Week_Daily.delete_SimTimeSeriesSchedule_Week
__del__ = lambda self: None
SimTimeSeriesSchedule_Week_swigregister = _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_swigregister
SimTimeSeriesSchedule_Week_swigregister(SimTimeSeriesSchedule_Week)
class SimTimeSeriesSchedule_Week_Daily(SimTimeSeriesSchedule_Week):
__swig_setmethods__ = {}
for _s in [SimTimeSeriesSchedule_Week]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimTimeSeriesSchedule_Week_Daily, name, value)
__swig_getmethods__ = {}
for _s in [SimTimeSeriesSchedule_Week]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimTimeSeriesSchedule_Week_Daily, name)
__repr__ = _swig_repr
def SimTimeSeriesSched_SundaySched_DayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_SundaySched_DayName(self, *args)
def SimTimeSeriesSched_MondaySched_DayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_MondaySched_DayName(self, *args)
def SimTimeSeriesSched_TuesdaySched_DayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_TuesdaySched_DayName(self, *args)
def SimTimeSeriesSched_WedSched_DayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_WedSched_DayName(self, *args)
def SimTimeSeriesSched_ThursdaySched_DayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_ThursdaySched_DayName(self, *args)
def SimTimeSeriesSched_FridaySched_DayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_FridaySched_DayName(self, *args)
def SimTimeSeriesSched_SaturdaySched_DayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_SaturdaySched_DayName(self, *args)
def SimTimeSeriesSched_HolidaySched_DayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_HolidaySched_DayName(self, *args)
def SimTimeSeriesSched_SummerDesignDaySched_DayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_SummerDesignDaySched_DayName(self, *args)
def SimTimeSeriesSched_WinterDesignDaySched_DayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_WinterDesignDaySched_DayName(self, *args)
def SimTimeSeriesSched_CustomDay_1_2_SchedDayName(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_SimTimeSeriesSched_CustomDay_1_2_SchedDayName(self, *args)
def __init__(self, *args):
this = _SimTimeSeriesSchedule_Week_Daily.new_SimTimeSeriesSchedule_Week_Daily(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily__clone(self, f, c)
__swig_destroy__ = _SimTimeSeriesSchedule_Week_Daily.delete_SimTimeSeriesSchedule_Week_Daily
__del__ = lambda self: None
SimTimeSeriesSchedule_Week_Daily_swigregister = _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_swigregister
SimTimeSeriesSchedule_Week_Daily_swigregister(SimTimeSeriesSchedule_Week_Daily)
class SimTimeSeriesSchedule_Week_Daily_sequence(base.sequence_common):
__swig_setmethods__ = {}
for _s in [base.sequence_common]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimTimeSeriesSchedule_Week_Daily_sequence, name, value)
__swig_getmethods__ = {}
for _s in [base.sequence_common]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimTimeSeriesSchedule_Week_Daily_sequence, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimTimeSeriesSchedule_Week_Daily.new_SimTimeSeriesSchedule_Week_Daily_sequence(*args)
try:
self.this.append(this)
except:
self.this = this
def assign(self, n, x):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_assign(self, n, x)
def begin(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_begin(self, *args)
def end(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_end(self, *args)
def rbegin(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_rbegin(self, *args)
def rend(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_rend(self, *args)
def at(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_at(self, *args)
def front(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_front(self, *args)
def back(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_back(self, *args)
def push_back(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_push_back(self, *args)
def pop_back(self):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_pop_back(self)
def detach_back(self, pop=True):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_detach_back(self, pop)
def insert(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_insert(self, *args)
def erase(self, *args):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_erase(self, *args)
def detach(self, position, r, erase=True):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_detach(self, position, r, erase)
def swap(self, x):
return _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_swap(self, x)
__swig_destroy__ = _SimTimeSeriesSchedule_Week_Daily.delete_SimTimeSeriesSchedule_Week_Daily_sequence
__del__ = lambda self: None
SimTimeSeriesSchedule_Week_Daily_sequence_swigregister = _SimTimeSeriesSchedule_Week_Daily.SimTimeSeriesSchedule_Week_Daily_sequence_swigregister
SimTimeSeriesSchedule_Week_Daily_sequence_swigregister(SimTimeSeriesSchedule_Week_Daily_sequence)
# This file is compatible with both classic and new-style classes.
| 43.866667
| 145
| 0.772121
| 1,241
| 11,844
| 6.809831
| 0.131346
| 0.328364
| 0.330138
| 0.223287
| 0.75707
| 0.657555
| 0.618625
| 0.567033
| 0.48089
| 0.407762
| 0
| 0.003209
| 0.15797
| 11,844
| 269
| 146
| 44.02974
| 0.844179
| 0.024823
| 0
| 0.320197
| 1
| 0
| 0.022704
| 0.005719
| 0
| 0
| 0
| 0
| 0
| 1
| 0.206897
| false
| 0.009852
| 0.059113
| 0.172414
| 0.605911
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
967d5d46510002207ce85c4ef1e4124928dbc8e3
| 138
|
py
|
Python
|
torchsample/transforms/__init__.py
|
wavepak/torchsample
|
6b366d65cd0ae97f48b4825d04a36b45aff4699a
|
[
"MIT"
] | 1,934
|
2017-03-02T02:25:41.000Z
|
2022-03-29T16:15:19.000Z
|
torchsample/transforms/__init__.py
|
wavepak/torchsample
|
6b366d65cd0ae97f48b4825d04a36b45aff4699a
|
[
"MIT"
] | 96
|
2017-03-05T19:09:06.000Z
|
2022-02-04T21:37:46.000Z
|
torchsample/transforms/__init__.py
|
wavepak/torchsample
|
6b366d65cd0ae97f48b4825d04a36b45aff4699a
|
[
"MIT"
] | 337
|
2017-03-02T05:55:43.000Z
|
2022-03-15T02:01:45.000Z
|
from __future__ import absolute_import
from .affine_transforms import *
from .image_transforms import *
from .tensor_transforms import *
| 23
| 38
| 0.833333
| 17
| 138
| 6.294118
| 0.470588
| 0.280374
| 0.373832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123188
| 138
| 6
| 39
| 23
| 0.884298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
73816c83d248c030855ac678a75e5156bf0beb3a
| 38,589
|
py
|
Python
|
framework/graphs/html_templates.py
|
2spmohanty/Performance
|
5a86a56f40bad1e12654fe1bb737affb4e0edd26
|
[
"Apache-2.0"
] | null | null | null |
framework/graphs/html_templates.py
|
2spmohanty/Performance
|
5a86a56f40bad1e12654fe1bb737affb4e0edd26
|
[
"Apache-2.0"
] | null | null | null |
framework/graphs/html_templates.py
|
2spmohanty/Performance
|
5a86a56f40bad1e12654fe1bb737affb4e0edd26
|
[
"Apache-2.0"
] | null | null | null |
# Memory Stats ####
vm_mem="""
<!doctype html>
<html lang="en">
<head>
<style>
#chartdiv {
width: 100%%;
height: 900px;
}
</style>
<script src="https://www.amcharts.com/lib/4/core.js"></script>
<script src="https://www.amcharts.com/lib/4/charts.js"></script>
<script src="https://www.amcharts.com/lib/4/themes/animated.js"></script>
<script type="text/javascript" src="%(datafile)s"></script>
<script type="text/javascript" src="creategraph.js"></script>
<script type="text/javascript">
function draw() {
//console.log(data)
am4core.ready(function () {
// Themes begin
var titlearray = []
var indx = 1
am4core.useTheme(am4themes_animated);
// Themes end
var chart = am4core.create("chartdiv", am4charts.XYChart);
chart.colors.list = [
am4core.color("#CD5C5C"),am4core.color("#FF9900"),am4core.color("#0066CC"),am4core.color("#CCCC00"),
am4core.color("#669933"),am4core.color("#CCCCFF"),am4core.color("#CC9900"),am4core.color("#999966"),
am4core.color("#CC99CC"),am4core.color("#FF9999"),am4core.color("#CC6600"),am4core.color("#CC66CC"),
am4core.color("#0066FF"),am4core.color("#FF6699"),am4core.color("#CC3300"),am4core.color("#CC33CC"),
am4core.color("#99CC66"),am4core.color("#FF3399"),am4core.color("#66CCCC"),am4core.color("#669999"),
am4core.color("#CC0000"),am4core.color("#660000"),am4core.color("#3366CC"),am4core.color("#6600CC"),
am4core.color("#663300"),am4core.color("#993300"),am4core.color("#666600"),am4core.color("#666699"),
am4core.color("#6666FF"),am4core.color("#669900"),am4core.color("#003333")
];
colorindx = 0
var scrollbarX = new am4charts.XYChartScrollbar();
var interfaceColors = new am4core.InterfaceColorSet();
chart.data = data;
// the following line makes value axes to be arranged vertically.
chart.leftAxesContainer.layout = "vertical";
var dateAxis = chart.xAxes.push(new am4charts.DateAxis());
dateAxis.renderer.grid.template.location = 0;
dateAxis.renderer.ticks.template.length = 8;
dateAxis.renderer.ticks.template.strokeOpacity = 0.1;
dateAxis.renderer.grid.template.disabled = true;
dateAxis.renderer.ticks.template.disabled = false;
dateAxis.renderer.ticks.template.strokeOpacity = 0.2;
var valueAxis = chart.yAxes.push(new am4charts.ValueAxis());
valueAxis.tooltip.disabled = false;
valueAxis.title.text = "Progress %%";
valueAxis.zIndex = 1;
valueAxis.renderer.baseGrid.disabled = true;
// Set up axis
valueAxis.renderer.inside = true;
valueAxis.height = am4core.percent(60);
valueAxis.renderer.labels.template.verticalCenter = "bottom";
valueAxis.renderer.labels.template.padding(2, 2, 2, 2);
//valueAxis.renderer.maxLabelPosition = 0.95;
valueAxis.renderer.fontSize = "0.8em"
// uncomment these lines to fill plot area of this axis with some color
valueAxis.renderer.gridContainer.background.fill = interfaceColors.getFor("alternativeBackground");
valueAxis.renderer.gridContainer.background.fillOpacity = 0.05;
//Iterate on all VMs and Plot
vmarray.forEach(element => {
var series0 = chart.series.push(new am4charts.LineSeries());
series0.dataFields.dateX = "date";
series0.dataFields.valueY = [element];
series0.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
//series.tooltipText = "{valueY.value}";
var bullet = series0.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series0.name = [element];
scrollbarX.series.push(series0);
});
titlearray.push({ "Axes": valueAxis, "Name": "VM Operation Data" })
//Render Memory
//console.log(memarray)
memarray.forEach(element => {
var axisName = 'valueAxis' + indx
indx = indx + 1;
//console.log(axisName)
window[axisName] = chart.yAxes.push(new am4charts.ValueAxis());
window[axisName].tooltip.disabled = true;
// this makes gap between panels
window[axisName].marginTop = 30;
window[axisName].renderer.baseGrid.disabled = true;
window[axisName].renderer.inside = true;
window[axisName].height = am4core.percent(30);
window[axisName].zIndex = 10
window[axisName].renderer.labels.template.verticalCenter = "bottom";
window[axisName].renderer.labels.template.padding(2, 2, 2, 2);
//valueAxis2.renderer.maxLabelPosition = 0.95;
window[axisName].renderer.fontSize = "0.8em"
window[axisName].title.text = "%%";
// uncomment these lines to fill plot area of this axis with some color
window[axisName].renderer.gridContainer.background.fill = interfaceColors.getFor("alternativeBackground");
window[axisName].renderer.gridContainer.background.fillOpacity = 0.05;
var memkey = element + "memusage" ;
var series1 = chart.series.push(new am4charts.LineSeries());
series1.dataFields.dateX = "date";
series1.dataFields.valueY = [memkey];
series1.yAxis = window[axisName];
series1.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
//series.tooltipText = "{valueY.value}";
var bullet = series1.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series1.name = [element] + "- Memory %%";
series1.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
scrollbarX.series.push(series1);
titlearray.push({ "Axes": window[axisName], "Name": [element] + " : Memory Usage %%" } );
});
chart.legend = new am4charts.Legend();
// Set Title for Subplots
title_indx = 1
titlearray.forEach(element => {
//var title_axis = element["Axes"];
var title_name = element["Name"];
console.log(element["Axes"].title.text)
var title2 = "title" + title_indx
title_indx = title_indx + 1;
window[title2] = element["Axes"].renderer.gridContainer.createChild(am4core.Label);
window[title2].text = "[bold]" + title_name;
//window[title2].fill = series.fill;
window[title2].isMeasured = false;
window[title2].y = 15;
window[title2].x = am4core.percent(50);
window[title2].align = "center";
window[title2].textAlign = "middle";
}
);
chart.cursor = new am4charts.XYCursor();
chart.cursor.xAxis = dateAxis;
scrollbarX.marginBottom = 20;
chart.scrollbarX = scrollbarX;
// Enable export
chart.exporting.menu = new am4core.ExportMenu();
chart.exporting.menu.align = "left";
chart.exporting.menu.verticalAlign = "top";
}); // end am4core.ready()
}
</script>
</head>
<body onload="load();draw();">
<div id="chartdiv"> </div>
<div id="details"> Source : %(srchost)s</div>
<div id="details"> Destination : %(desthost)s</div>
</body>
</html>
"""
####### Datastore Stats ######
vm_ds="""
<!doctype html>
<html lang="en">
<head>
<style>
#chartdiv {
width: 100%%;
height: 900px;
}
</style>
<script src="https://www.amcharts.com/lib/4/core.js"></script>
<script src="https://www.amcharts.com/lib/4/charts.js"></script>
<script src="https://www.amcharts.com/lib/4/themes/animated.js"></script>
<script type="text/javascript" src="%(datafile)s"></script>
<script type="text/javascript" src="creategraph.js"></script>
<script type="text/javascript">
function draw() {
//console.log(data)
am4core.ready(function () {
// Themes begin
var titlearray = []
var indx = 1
am4core.useTheme(am4themes_animated);
// Themes end
var chart = am4core.create("chartdiv", am4charts.XYChart);
chart.colors.list = [
am4core.color("#CD5C5C"),am4core.color("#FF9900"),am4core.color("#0066CC"),am4core.color("#CCCC00"),
am4core.color("#669933"),am4core.color("#CCCCFF"),am4core.color("#CC9900"),am4core.color("#999966"),
am4core.color("#CC99CC"),am4core.color("#FF9999"),am4core.color("#CC6600"),am4core.color("#CC66CC"),
am4core.color("#0066FF"),am4core.color("#FF6699"),am4core.color("#CC3300"),am4core.color("#CC33CC"),
am4core.color("#99CC66"),am4core.color("#FF3399"),am4core.color("#66CCCC"),am4core.color("#669999"),
am4core.color("#CC0000"),am4core.color("#660000"),am4core.color("#3366CC"),am4core.color("#6600CC"),
am4core.color("#663300"),am4core.color("#993300"),am4core.color("#666600"),am4core.color("#666699"),
am4core.color("#6666FF"),am4core.color("#669900"),am4core.color("#003333")
];
colorindx = 0
var scrollbarX = new am4charts.XYChartScrollbar();
var interfaceColors = new am4core.InterfaceColorSet();
chart.data = data;
// the following line makes value axes to be arranged vertically.
chart.leftAxesContainer.layout = "vertical";
var dateAxis = chart.xAxes.push(new am4charts.DateAxis());
dateAxis.renderer.grid.template.location = 0;
dateAxis.renderer.ticks.template.length = 8;
dateAxis.renderer.ticks.template.strokeOpacity = 0.1;
dateAxis.renderer.grid.template.disabled = true;
dateAxis.renderer.ticks.template.disabled = false;
dateAxis.renderer.ticks.template.strokeOpacity = 0.2;
var valueAxis = chart.yAxes.push(new am4charts.ValueAxis());
valueAxis.tooltip.disabled = false;
valueAxis.title.text = "Progress %%";
valueAxis.zIndex = 1;
valueAxis.renderer.baseGrid.disabled = true;
// Set up axis
valueAxis.renderer.inside = true;
valueAxis.height = am4core.percent(60);
valueAxis.renderer.labels.template.verticalCenter = "bottom";
valueAxis.renderer.labels.template.padding(2, 2, 2, 2);
//valueAxis.renderer.maxLabelPosition = 0.95;
valueAxis.renderer.fontSize = "0.8em"
// uncomment these lines to fill plot area of this axis with some color
valueAxis.renderer.gridContainer.background.fill = interfaceColors.getFor("alternativeBackground");
valueAxis.renderer.gridContainer.background.fillOpacity = 0.05;
//Iterate on all VMs and Plot
vmarray.forEach(element => {
var series0 = chart.series.push(new am4charts.LineSeries());
series0.dataFields.dateX = "date";
series0.dataFields.valueY = [element];
series0.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
//series.tooltipText = "{valueY.value}";
var bullet = series0.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series0.name = [element];
scrollbarX.series.push(series0);
});
titlearray.push({ "Axes": valueAxis, "Name": "VM-Data" })
//Render Datastore
dsarray.forEach(element => {
//console.log(element)
var axisName = 'valueAxis' + indx
indx = indx + 1;
window[axisName] = chart.yAxes.push(new am4charts.ValueAxis());
window[axisName].tooltip.disabled = true;
// this makes gap between panels
window[axisName].marginTop = 30;
window[axisName].renderer.baseGrid.disabled = true;
window[axisName].renderer.inside = true;
window[axisName].height = am4core.percent(30);
window[axisName].zIndex = 10
window[axisName].renderer.labels.template.verticalCenter = "bottom";
window[axisName].renderer.labels.template.padding(2, 2, 2, 2);
//valueAxis2.renderer.maxLabelPosition = 0.95;
window[axisName].renderer.fontSize = "0.8em"
window[axisName].title.text = "ms";
// uncomment these lines to fill plot area of this axis with some color
window[axisName].renderer.gridContainer.background.fill = interfaceColors.getFor("alternativeBackground");
window[axisName].renderer.gridContainer.background.fillOpacity = 0.05;
var rKey = element + "ReadLatency"
var wKey = element + "WriteLatency"
var series2 = chart.series.push(new am4charts.LineSeries());
series2.yAxis = window[axisName];
series2.dataFields.dateX = "date";
series2.dataFields.valueY = [rKey];
series2.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
var bullet = series2.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series2.name = [element] + "-ReadLatency";
scrollbarX.series.push(series2);
var series3 = chart.series.push(new am4charts.LineSeries());
series3.yAxis = window[axisName];
series3.dataFields.dateX = "date";
series3.dataFields.valueY = [wKey];
series3.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
var bullet = series3.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series3.name = [element] + "-WriteLatency";
scrollbarX.series.push(series3);
titlearray.push({ "Axes": window[axisName], "Name": testdata.datastore[element][0].Host + ", " +[element] + " : " + "DataStore Latency" })
});
dsarray.forEach(element => {
//console.log(element)
var axisName = 'valueAxis' + indx
indx = indx + 1;
window[axisName] = chart.yAxes.push(new am4charts.ValueAxis());
window[axisName].tooltip.disabled = true;
// this makes gap between panels
window[axisName].marginTop = 30;
window[axisName].renderer.baseGrid.disabled = true;
window[axisName].renderer.inside = true;
window[axisName].height = am4core.percent(30);
window[axisName].zIndex = 10
window[axisName].renderer.labels.template.verticalCenter = "bottom";
window[axisName].renderer.labels.template.padding(2, 2, 2, 2);
//valueAxis2.renderer.maxLabelPosition = 0.95;
window[axisName].renderer.fontSize = "0.8em"
window[axisName].title.text = "KBps";
// uncomment these lines to fill plot area of this axis with some color
window[axisName].renderer.gridContainer.background.fill = interfaceColors.getFor("alternativeBackground");
window[axisName].renderer.gridContainer.background.fillOpacity = 0.05;
var readKey = element + "Read"
var writeKey = element + "Write"
var series2 = chart.series.push(new am4charts.LineSeries());
series2.yAxis = window[axisName];
series2.dataFields.dateX = "date";
series2.dataFields.valueY = [readKey];
series2.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
var bullet = series2.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series2.name = [element] + "-Read";
scrollbarX.series.push(series2);
var series3 = chart.series.push(new am4charts.LineSeries());
series3.yAxis = window[axisName];
series3.dataFields.dateX = "date";
series3.dataFields.valueY = [writeKey];
series3.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
var bullet = series3.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series3.name = [element] + "-Write";
scrollbarX.series.push(series3);
titlearray.push({ "Axes": window[axisName], "Name": testdata.datastore[element][0].Host + ", " +[element] + " : " + "DataStore I/O" })
});
chart.legend = new am4charts.Legend();
// Set Title for Subplots
title_indx = 1
titlearray.forEach(element => {
//var title_axis = element["Axes"];
var title_name = element["Name"];
console.log(element["Axes"].title.text)
var title2 = "title" + title_indx
title_indx = title_indx + 1;
window[title2] = element["Axes"].renderer.gridContainer.createChild(am4core.Label);
window[title2].text = "[bold]" + title_name;
//window[title2].fill = series.fill;
window[title2].isMeasured = false;
window[title2].y = 15;
window[title2].x = am4core.percent(50);
window[title2].align = "center";
window[title2].textAlign = "middle";
}
);
chart.cursor = new am4charts.XYCursor();
chart.cursor.xAxis = dateAxis;
scrollbarX.marginBottom = 20;
chart.scrollbarX = scrollbarX;
// Enable export
chart.exporting.menu = new am4core.ExportMenu();
chart.exporting.menu.align = "left";
chart.exporting.menu.verticalAlign = "top";
}); // end am4core.ready()
}
</script>
</head>
<body onload="load();draw();">
<div id="chartdiv"> </div>
<div id="details"> Source : %(srchost)s</div>
<div id="details"> Destination : %(desthost)s</div>
</body>
</html>
"""
######## Only VM #######
vm_only="""
<!doctype html>
<html lang="en">
<head>
<style>
#chartdiv {
width: 100%%;
height: 900px;
}
</style>
<script src="https://www.amcharts.com/lib/4/core.js"></script>
<script src="https://www.amcharts.com/lib/4/charts.js"></script>
<script src="https://www.amcharts.com/lib/4/themes/animated.js"></script>
<script type="text/javascript" src="%(datafile)s"></script>
<script type="text/javascript" src="creategraph.js"></script>
<script type="text/javascript">
function draw() {
//console.log(data)
am4core.ready(function () {
// Themes begin
var titlearray = []
var indx = 1
am4core.useTheme(am4themes_animated);
// Themes end
var chart = am4core.create("chartdiv", am4charts.XYChart);
chart.colors.list = [
am4core.color("#3498DB"),
am4core.color("#CD5C5C"), am4core.color("#FF9900"), am4core.color("#0066CC"), am4core.color("#CCCC00"),
am4core.color("#669933"), am4core.color("#CCCCFF"), am4core.color("#CC9900"), am4core.color("#999966"),
am4core.color("#CC99CC"), am4core.color("#FF9999"), am4core.color("#CC6600"), am4core.color("#CC66CC"),
am4core.color("#0066FF"), am4core.color("#FF6699"), am4core.color("#CC3300"), am4core.color("#CC33CC"),
am4core.color("#99CC66"), am4core.color("#FF3399"), am4core.color("#66CCCC"), am4core.color("#669999"),
am4core.color("#CC0000"), am4core.color("#660000"), am4core.color("#3366CC"), am4core.color("#6600CC"),
am4core.color("#663300"), am4core.color("#993300"), am4core.color("#666600"), am4core.color("#666699"),
am4core.color("#6666FF"), am4core.color("#669900"), am4core.color("#003333")
];
colorindx = 0
var scrollbarX = new am4charts.XYChartScrollbar();
var scrollbarY = new am4charts.XYChartScrollbar();
//var scrollbarY = new am4charts.ChartScrollbar();
var interfaceColors = new am4core.InterfaceColorSet();
chart.data = data;
// the following line makes value axes to be arranged vertically.
chart.leftAxesContainer.layout = "vertical";
var dateAxis = chart.xAxes.push(new am4charts.DateAxis());
dateAxis.renderer.grid.template.location = 0;
dateAxis.renderer.ticks.template.length = 8;
dateAxis.renderer.ticks.template.strokeOpacity = 0.1;
dateAxis.renderer.grid.template.disabled = true;
dateAxis.renderer.ticks.template.disabled = false;
dateAxis.renderer.ticks.template.strokeOpacity = 0.2;
var valueAxis = chart.yAxes.push(new am4charts.ValueAxis());
valueAxis.tooltip.disabled = false;
valueAxis.title.text = "Progress %%";
valueAxis.zIndex = 1;
valueAxis.renderer.baseGrid.disabled = true;
// Set up axis
valueAxis.renderer.inside = true;
valueAxis.height = am4core.percent(60);
valueAxis.renderer.labels.template.verticalCenter = "bottom";
valueAxis.renderer.labels.template.padding(2, 2, 2, 2);
//valueAxis.renderer.maxLabelPosition = 0.95;
valueAxis.renderer.fontSize = "0.8em"
// uncomment these lines to fill plot area of this axis with some color
valueAxis.renderer.gridContainer.background.fill = interfaceColors.getFor("alternativeBackground");
valueAxis.renderer.gridContainer.background.fillOpacity = 0.05;
//Iterate on all VMs and Plot
vmarray.forEach(element => {
var series0 = chart.series.push(new am4charts.LineSeries());
series0.dataFields.dateX = "date";
series0.dataFields.valueY = [element];
series0.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx + 1;
//series.tooltipText = "{valueY.value}";
var bullet = series0.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value} :" + element
series0.name = [element];
scrollbarX.series.push(series0);
scrollbarY.series.push(series0)
});
titlearray.push({ "Axes": valueAxis, "Name": "VMs Operation Data Progress/ Time" })
chart.legend = new am4charts.Legend();
// Set Title for Subplots
title_indx = 1
titlearray.forEach(element => {
//var title_axis = element["Axes"];
var title_name = element["Name"];
console.log(element["Axes"].title.text)
var title2 = "title" + title_indx
title_indx = title_indx + 1;
window[title2] = element["Axes"].renderer.gridContainer.createChild(am4core.Label);
window[title2].text = "[bold]" + title_name;
window[title2].fill = chart.colors.getIndex(0);
//window[title2].fill = series.fill;
window[title2].isMeasured = false;
window[title2].y = 15;
window[title2].x = am4core.percent(40);
window[title2].align = "center";
window[title2].textAlign = "middle";
}
);
chart.cursor = new am4charts.XYCursor();
chart.cursor.xAxis = dateAxis;
scrollbarX.marginBottom = 20;
chart.scrollbarX = scrollbarX;
//chart.cursor.yAxis = valueAxis;
//chart.scrollbarY = scrollbarY;
//chart.cursor.behavior = "zoomY";
chart.scrollbarY = new am4core.Scrollbar();
//chart.scrollbarY = new am4core.Scrollbar();
// Enable export
chart.exporting.menu = new am4core.ExportMenu();
chart.exporting.menu.align = "left";
chart.exporting.menu.verticalAlign = "top";
}); // end am4core.ready()
}
</script>
</head>
<body onload="load();draw();">
<div id="chartdiv"> </div>
<div id="details"> Source : %(srchost)s</div>
<div id="details"> Destination : %(desthost)s</div>
</body>
</html>
"""
########### NIC Stats ######
vm_nic="""
<!doctype html>
<html lang="en">
<head>
<style>
#chartdiv {
width: 100%%;
height: 900px;
}
</style>
<script src="https://www.amcharts.com/lib/4/core.js"></script>
<script src="https://www.amcharts.com/lib/4/charts.js"></script>
<script src="https://www.amcharts.com/lib/4/themes/animated.js"></script>
<script type="text/javascript" src="%(datafile)s"></script>
<script type="text/javascript" src="creategraph.js"></script>
<script type="text/javascript">
function draw() {
//console.log(data)
am4core.ready(function () {
// Themes begin
var titlearray = []
var indx = 1
am4core.useTheme(am4themes_animated);
// Themes end
var chart = am4core.create("chartdiv", am4charts.XYChart);
chart.colors.list = [
am4core.color("#CD5C5C"),am4core.color("#FF9900"),am4core.color("#0066CC"),am4core.color("#CCCC00"),
am4core.color("#669933"),am4core.color("#CCCCFF"),am4core.color("#CC9900"),am4core.color("#999966"),
am4core.color("#CC99CC"),am4core.color("#FF9999"),am4core.color("#CC6600"),am4core.color("#CC66CC"),
am4core.color("#0066FF"),am4core.color("#FF6699"),am4core.color("#CC3300"),am4core.color("#CC33CC"),
am4core.color("#99CC66"),am4core.color("#FF3399"),am4core.color("#66CCCC"),am4core.color("#669999"),
am4core.color("#CC0000"),am4core.color("#660000"),am4core.color("#3366CC"),am4core.color("#6600CC"),
am4core.color("#663300"),am4core.color("#993300"),am4core.color("#666600"),am4core.color("#666699"),
am4core.color("#6666FF"),am4core.color("#669900"),am4core.color("#003333")
];
colorindx = 0
var scrollbarX = new am4charts.XYChartScrollbar();
var interfaceColors = new am4core.InterfaceColorSet();
chart.data = data;
// the following line makes value axes to be arranged vertically.
chart.leftAxesContainer.layout = "vertical";
var dateAxis = chart.xAxes.push(new am4charts.DateAxis());
dateAxis.renderer.grid.template.location = 0;
dateAxis.renderer.ticks.template.length = 8;
dateAxis.renderer.ticks.template.strokeOpacity = 0.1;
dateAxis.renderer.grid.template.disabled = true;
dateAxis.renderer.ticks.template.disabled = false;
dateAxis.renderer.ticks.template.strokeOpacity = 0.2;
var valueAxis = chart.yAxes.push(new am4charts.ValueAxis());
valueAxis.tooltip.disabled = false;
valueAxis.title.text = "Progress %%";
valueAxis.zIndex = 1;
valueAxis.renderer.baseGrid.disabled = true;
// Set up axis
valueAxis.renderer.inside = true;
valueAxis.height = am4core.percent(60);
valueAxis.renderer.labels.template.verticalCenter = "bottom";
valueAxis.renderer.labels.template.padding(2, 2, 2, 2);
//valueAxis.renderer.maxLabelPosition = 0.95;
valueAxis.renderer.fontSize = "0.8em"
// uncomment these lines to fill plot area of this axis with some color
valueAxis.renderer.gridContainer.background.fill = interfaceColors.getFor("alternativeBackground");
valueAxis.renderer.gridContainer.background.fillOpacity = 0.05;
//Iterate on all VMs and Plot
vmarray.forEach(element => {
var series0 = chart.series.push(new am4charts.LineSeries());
series0.dataFields.dateX = "date";
series0.dataFields.valueY = [element];
series0.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
//series.tooltipText = "{valueY.value}";
var bullet = series0.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series0.name = [element];
scrollbarX.series.push(series0);
});
titlearray.push({ "Axes": valueAxis, "Name": "VM Operation Data" })
//Render NICS
nicarray.forEach(element => {
var axisName = 'valueAxis' + indx
indx = indx + 1;
//console.log(axisName)
window[axisName] = chart.yAxes.push(new am4charts.ValueAxis());
window[axisName].tooltip.disabled = true;
// this makes gap between panels
window[axisName].marginTop = 30;
window[axisName].renderer.baseGrid.disabled = true;
window[axisName].renderer.inside = true;
window[axisName].height = am4core.percent(30);
window[axisName].zIndex = 10
window[axisName].renderer.labels.template.verticalCenter = "bottom";
window[axisName].renderer.labels.template.padding(2, 2, 2, 2);
//valueAxis2.renderer.maxLabelPosition = 0.95;
window[axisName].renderer.fontSize = "0.8em"
window[axisName].title.text = "Mbps";
// uncomment these lines to fill plot area of this axis with some color
window[axisName].renderer.gridContainer.background.fill = interfaceColors.getFor("alternativeBackground");
window[axisName].renderer.gridContainer.background.fillOpacity = 0.05;
/*
var series1 = chart.series.push(new am4charts.ColumnSeries());
series1.columns.template.width = am4core.percent(50);
series1.dataFields.dateX = "date";
series1.dataFields.valueY = [element];
series1.yAxis = window[axisName];
series1.columns.template.tooltipText = "{valueY.value}";
series1.name = [element] + "-" + testdata.nic[element][0].vnic ;
*/
var series1 = chart.series.push(new am4charts.LineSeries());
series1.dataFields.dateX = "date";
series1.dataFields.valueY = [element];
series1.yAxis = window[axisName];
series1.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
//series.tooltipText = "{valueY.value}";
var bullet = series1.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series1.name = [element] + "-" + testdata.nic[element][0].vnic + "- totalUsage";
series1.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
scrollbarX.series.push(series1);
var skey = element + "sent"
var gkey = element + "got"
var series2 = chart.series.push(new am4charts.LineSeries());
series2.dataFields.dateX = "date";
series2.dataFields.valueY = [skey];
series2.yAxis = window[axisName];
series2.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
//series.tooltipText = "{valueY.value}";
var bullet = series2.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series2.name = [element] + "-" + testdata.nic[element][0].vnic + "-Tx";
series2.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
scrollbarX.series.push(series2);
var series3 = chart.series.push(new am4charts.LineSeries());
series3.dataFields.dateX = "date";
series3.dataFields.valueY = [gkey];
series3.yAxis = window[axisName];
series3.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
//series.tooltipText = "{valueY.value}";
var bullet = series3.bullets.push(new am4charts.CircleBullet());
bullet.tooltipText = "{valueY.value}"
series3.name = [element] + "-" + testdata.nic[element][0].vnic + "-Rx";
series3.fill = chart.colors.getIndex(colorindx);
colorindx = colorindx +1 ;
scrollbarX.series.push(series3);
titlearray.push({ "Axes": window[axisName], "Name": [element] + " : " + testdata.nic[element][0].vnic +
", Max Bandwidth: " + testdata.nic[element][0].vnicmax + " Mb"})
});
chart.legend = new am4charts.Legend();
// Set Title for Subplots
title_indx = 1
titlearray.forEach(element => {
//var title_axis = element["Axes"];
var title_name = element["Name"];
console.log(element["Axes"].title.text)
var title2 = "title" + title_indx
title_indx = title_indx + 1;
window[title2] = element["Axes"].renderer.gridContainer.createChild(am4core.Label);
window[title2].text = "[bold]" + title_name;
//window[title2].fill = series.fill;
window[title2].isMeasured = false;
window[title2].y = 15;
window[title2].x = am4core.percent(50);
window[title2].align = "center";
window[title2].textAlign = "middle";
}
);
chart.cursor = new am4charts.XYCursor();
chart.cursor.xAxis = dateAxis;
scrollbarX.marginBottom = 20;
chart.scrollbarX = scrollbarX;
// Enable export
chart.exporting.menu = new am4core.ExportMenu();
chart.exporting.menu.align = "left";
chart.exporting.menu.verticalAlign = "top";
}); // end am4core.ready()
}
</script>
</head>
<body onload="load();draw();">
<div id="chartdiv"> </div>
<div id="details"> Source : %(srchost)s</div>
<div id="details"> Destination : %(desthost)s</div>
</body>
</html>
"""
| 38.978788
| 158
| 0.527171
| 3,393
| 38,589
| 5.984969
| 0.083407
| 0.073866
| 0.029153
| 0.019254
| 0.967647
| 0.961737
| 0.961737
| 0.944748
| 0.942384
| 0.938346
| 0
| 0.045572
| 0.345487
| 38,589
| 990
| 159
| 38.978788
| 0.758443
| 0.00127
| 0
| 0.891207
| 0
| 0.071535
| 0.998103
| 0.370609
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
73db6a587d0d168b2083d03bddde4d635b43946b
| 156
|
py
|
Python
|
opengnn/__init__.py
|
CoderPat/OpenGNN
|
bc54328ad4aa034098073c72153eed361b4266ce
|
[
"MIT"
] | 32
|
2019-01-28T13:38:21.000Z
|
2022-03-29T08:39:00.000Z
|
opengnn/__init__.py
|
CoderPat/OpenGNN
|
bc54328ad4aa034098073c72153eed361b4266ce
|
[
"MIT"
] | 1
|
2020-01-16T03:09:18.000Z
|
2020-01-16T03:44:20.000Z
|
opengnn/__init__.py
|
CoderPat/OpenGNN
|
bc54328ad4aa034098073c72153eed361b4266ce
|
[
"MIT"
] | 7
|
2019-03-07T14:13:15.000Z
|
2022-03-15T10:40:41.000Z
|
from opengnn import decoders
from opengnn import encoders
from opengnn import inputters
from opengnn import models
from opengnn.runner import Runner
| 22.285714
| 34
| 0.820513
| 21
| 156
| 6.095238
| 0.380952
| 0.429688
| 0.53125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173077
| 156
| 6
| 35
| 26
| 0.992248
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fbe1c895491f5d6b9aeae558616e22f56d7acb2d
| 1,789
|
py
|
Python
|
web/huluwa/dinnerplanner/migrations/0001_initial.py
|
heguangzhu/huluwa
|
7f799c75600227a03a0e42890d9a3ca2cb261cb7
|
[
"Apache-2.0"
] | null | null | null |
web/huluwa/dinnerplanner/migrations/0001_initial.py
|
heguangzhu/huluwa
|
7f799c75600227a03a0e42890d9a3ca2cb261cb7
|
[
"Apache-2.0"
] | null | null | null |
web/huluwa/dinnerplanner/migrations/0001_initial.py
|
heguangzhu/huluwa
|
7f799c75600227a03a0e42890d9a3ca2cb261cb7
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.2.7 on 2021-09-15 16:40
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Dish',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dish_name', models.CharField(max_length=30)),
('dish_type', models.CharField(max_length=30)),
('caixi', models.CharField(max_length=30)),
('flavor', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nick_name', models.CharField(max_length=30)),
('gender', models.CharField(max_length=2)),
('state', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='TrainingItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nick_name', models.CharField(max_length=30)),
('gender', models.CharField(max_length=2)),
('state', models.CharField(max_length=100)),
('dish_name', models.CharField(max_length=30)),
('dish_type', models.CharField(max_length=30)),
('caixi', models.CharField(max_length=30)),
('flavor', models.CharField(max_length=30)),
('rank', models.IntegerField()),
],
),
]
| 37.270833
| 114
| 0.544997
| 176
| 1,789
| 5.375
| 0.289773
| 0.221987
| 0.266385
| 0.35518
| 0.732558
| 0.732558
| 0.732558
| 0.732558
| 0.732558
| 0.732558
| 0
| 0.034874
| 0.310788
| 1,789
| 47
| 115
| 38.06383
| 0.73236
| 0.025154
| 0
| 0.725
| 1
| 0
| 0.078071
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
8383fb6492bf7eb1bcf6a659c8479b398abfd259
| 70,634
|
py
|
Python
|
libs/data_forms.py
|
tanakonDigiOranic/fastapi-molog-wms
|
91942916a9afec4dc13bc734ba429b2fbae3cac5
|
[
"MIT"
] | null | null | null |
libs/data_forms.py
|
tanakonDigiOranic/fastapi-molog-wms
|
91942916a9afec4dc13bc734ba429b2fbae3cac5
|
[
"MIT"
] | null | null | null |
libs/data_forms.py
|
tanakonDigiOranic/fastapi-molog-wms
|
91942916a9afec4dc13bc734ba429b2fbae3cac5
|
[
"MIT"
] | null | null | null |
def create_PSO_job():
CRE_PSO_JOB = {
"DATA": [
{
"<Release Reference Label-1>": "REL-8315683480",
"<Release Reference Label-2>": "REL-9191904884",
"<Release Reference Label-3>": "REL-0303692927",
"<Release Reference Label-4>": "REL-3263683094",
"PSO_DATE": "2021-11-10T00:00:00+07:00",
"REL_PREFIX": '',
"TEAM": '',
"EXP_RELEASE_DATE": '',
"INVOICE_NO": '',
"TP_VEHICLE_NO": '',
"TP_BOOKING": '',
"TP_CARRIER": '',
"TP_CONTAINER_NO_1": '',
"TP_CONTAINER_TYPE_1": '',
"TP_SEAL_NO_1": '',
"TP_CONTAINER_NO_2": '',
"TP_CONTAINER_TYPE_2": '',
"TP_SEAL_NO_2": '',
"TP_CONTAINER_NO_3": '',
"TP_CONTAINER_TYPE_3": '',
"TP_SEAL_NO_3": '',
"TP_AIRWAYBILL": '',
"TP_BILL_OF_LANDING": '',
"TP_ORIGIN": '',
"TP_VESSEL": '',
"TP_PORT_OF_LANDING": '',
"TP_PORT_DISCHARGE": '',
"TP_VOYAGE": '',
"TP_ETD": '',
"CON_CODE": '',
"CON_COMPANY": '',
"CON_TAX_NO": '',
"CON_BRANCH": '',
"CON_ADDRESS": '',
"CON_ADDRESS2": '',
"CON_AREA1": '',
"CON_AREA2": '',
"CON_AREA3": '',
"CON_AREA4": '',
"CON_ZONE1": '',
"CON_ZONE2": '',
"CON_REGION1": '',
"CON_REGION2": '',
"CON_COUNTRY": '',
"CON_ZIP": '',
"CON_CONTACT_NAME": '',
"CON_CONTACT_PHONE": '',
"CON_CONTACT_EMAIL": '',
"CON_NOTE": '',
"ST_CODE": '',
"ST_COMPANY": '',
"ST_ADDRESS": '',
"ST_ADDRESS2": '',
"ST_AREA1": '',
"ST_AREA2": '',
"ST_AREA3": '',
"ST_AREA4": '',
"ST_ZONE1": '',
"ST_ZONE2": '',
"ST_REGION1": '',
"ST_REGION2": '',
"ST_COUNTRY": '',
"ST_ZIP": '',
"ST_CONTACT_NAME": '',
"ST_CONTACT_PHONE": '',
"ST_CONTACT_EMAIL": '',
"ST_NOTE": '',
"DEL_CONSIGNMENT": '',
"DEL_URGENT_ORDER": '',
"DEL_COD": '',
"DEL_COD_AMOUNT": '',
"DELIVERY_TIME_FROM": '',
"DELIVERY_TIME_TO": '',
"SHIPPING_INSTRUCTION": '',
"REMARKS": '',
"BONDED": '',
"TOTAL_PALLET": '',
"STORAGE_CHARGE": '',
"<Receive Reference Label-1>": '',
"<Receive Reference Label-2>": '',
"<Receive Reference Label-3>": '',
"<Receive Reference Label-4>": '',
"<Release Header User Defined-01>": '',
"<Release Header User Defined-02>": '',
"<Release Header User Defined-03>": '',
"<Release Header User Defined-04>": '',
"<Release Header User Defined-05>": '',
"<Release Header User Defined-06>": '',
"<Release Header User Defined-07>": '',
"<Release Header User Defined-08>": '',
"<Release Header User Defined-09>": '',
"<Release Header User Defined-10>": '',
"<Release Header User Defined-11>": '',
"<Release Header User Defined-12>": '',
"<Release Header User Defined-13>": '',
"<Release Header User Defined-14>": '',
"<Release Header User Defined-15>": '',
"<Receive Header User Defined-01>": '',
"<Receive Header User Defined-02>": '',
"<Receive Header User Defined-03>": '',
"<Receive Header User Defined-04>": '',
"<Receive Header User Defined-05>": '',
"<Receive Header User Defined-06>": '',
"<Receive Header User Defined-07>": '',
"<Receive Header User Defined-08>": '',
"<Receive Header User Defined-09>": '',
"<Receive Header User Defined-10>": '',
"<Receive Header User Defined-11>": '',
"<Receive Header User Defined-12>": '',
"<Receive Header User Defined-13>": '',
"<Receive Header User Defined-14>": '',
"<Receive Header User Defined-15>": '',
"DETAIL": [
{
"SKU_CODE": "123456",
"RELEASE_QTY": 1,
"PRODUCT_TYPE": '',
"PACK_CODE": '',
"PACK_QTY": '',
"PALLET_ID": '',
"CARTON_ID": '',
"EXPIRY_DATE": '',
"MANUF_DATE": '',
"LOCATION_CODE": '',
"LOT_NO": '',
"REMARKS": '',
"DEBOM": '',
"SYS_ID": '',
"<Release User Defined-01>": '',
"<Release User Defined-02>": '',
"<Release User Defined-03>": '',
"<Release User Defined-04>": '',
"<Release User Defined-05>": '',
"<Release User Defined-06>": '',
"<Release User Defined-07>": '',
"<Release User Defined-08>": '',
"<Release User Defined-09>": '',
"<Release User Defined-10>": '',
"<Release User Defined-11>": '',
"<Release User Defined-12>": '',
"<Release User Defined-13>": '',
"<Release User Defined-14>": '',
"<Release User Defined-15>": '',
"<Release User Defined-16>": '',
"<Release User Defined-17>": '',
"<Release User Defined-18>": '',
"<Release User Defined-19>": '',
"<Release User Defined-20>": '',
"<Release User Defined-21>": '',
"<Release User Defined-22>": '',
"<Release User Defined-23>": '',
"<Release User Defined-24>": '',
"<Release User Defined-25>": '',
"<Release User Defined-26>": '',
"<Release User Defined-27>": '',
"<Release User Defined-28>": '',
"<Release User Defined-29>": '',
"<Release User Defined-30>": '',
"<Receive User Defined-01>": '',
"<Receive User Defined-02>": '',
"<Receive User Defined-03>": '',
"<Receive User Defined-04>": '',
"<Receive User Defined-05>": '',
"<Receive User Defined-06>": '',
"<Receive User Defined-07>": '',
"<Receive User Defined-08>": '',
"<Receive User Defined-09>": '',
"<Receive User Defined-10>": '',
"<Receive User Defined-11>": '',
"<Receive User Defined-12>": '',
"<Receive User Defined-13>": '',
"<Receive User Defined-14>": '',
"<Receive User Defined-15>": '',
"<Receive User Defined-16>": '',
"<Receive User Defined-17>": '',
"<Receive User Defined-18>": '',
"<Receive User Defined-19>": '',
"<Receive User Defined-20>": '',
"<Receive User Defined-21>": '',
"<Receive User Defined-22>": '',
"<Receive User Defined-23>": '',
"<Receive User Defined-24>": '',
"<Receive User Defined-25>": '',
"<Receive User Defined-26>": '',
"<Receive User Defined-27>": '',
"<Receive User Defined-28>": '',
"<Receive User Defined-29>": '',
"<Receive User Defined-30>": ''
}
]
}
]
}
return CRE_PSO_JOB
def create_or_update_partner():
COU_partner = {
"DATA": [
{
"ACTIVE": True,
"CODE": "PARTNER01",
"COMPANY_NAME": "COM01",
"ADDRESS": '',
"ADDRESS2": '',
"AREA1": '',
"AREA2": '',
"AREA3": '',
"AREA4": '',
"ZONE1": '',
"ZONE2": '',
"REGION1": '',
"REGION2": '',
"ZIP": '',
"COUNTRY": '',
"CONTACT_NAME": '',
"CONTACT_PHONE": '',
"CONTACT_EMAIL": '',
"NOTE": '',
"REF_TYPE": [
{
"TYPE": "P",
"INFORM_EMAIL": True
}
]
}
]
}
return COU_partner
def select_inventory_list():
SLL = {
"DATA": [
{
"COUNT": 1,
"PAGE": {
"CURRENT": 1,
"FIRST": 1,
"LAST": 1,
"NEXT": 1,
"PREV": 1,
"SIZE": 10,
"TOTAL": 1
},
"DATA": [
{
"STORER_CODE": "STR000060",
"WH_CODE": "WH01",
"SKU_CODE": "PILLOWB001",
"SKU_DESC": "PILLOWB001",
"SKU_DESC_LOCAL": "หมอน",
"PRODUCT_TYPE": "FG",
"PALLET_ID": "q",
"CARTON_ID": '',
"LOT_NO": '',
"RECEIVED_DATE": "2021-01-15T03:11:49.392Z",
"EXPIRY_DATE": '',
"MANUF_DATE": '',
"RETURN_DATE": '',
"GOOD_CONDITION": "G",
"RECEIVE_QTY": 100,
"DAMAGED_QTY": 0,
"AVAILABLE_QTY": 80,
"ON_HAND_QTY": 100,
"HOLD_QTY": 20,
"HOLD_REASON_CODE": "FREEZ",
"CREATE_DATE": "2021-01-15T03:13:06.365Z",
"MODIFY_DATE": "2021-10-04T12:23:08.938Z",
"PACK_CODE": "PCS",
"PACK_QTY": 1,
"PRICE": 0,
"LENGTH": 1,
"WIDTH": 1,
"HEIGHT": 1,
"UNIT_CUB": 0.1,
"WEIGHT_UOM": "KG",
"VOLUME_UOM": "CBM",
"LENGTH_UOM": "CM",
"GROSS_WEIGHT": 1,
"NET_WEIGHT": 1,
"Rec1 No": '',
"RecHU1 No": '',
"HREC16": '',
"RecU1 No": ''
}
]
}
]
}
return SLL
def select_goods_receive_by_job():
SGReceice_BY_JOB = {
"DATA": [
{
"STORER_CODE": "STR000060",
"WH_CODE": "WH01",
"RECEIVE_KEY": "0000134340",
"BATCH_NO": 1,
"STATUS_CODE": "CLOSED",
"Rec1 No": "1345",
"TP_VEHICLE_NO": '',
"TP_CARRIER": '',
"TP_CONTAINER_NO_1": '',
"TP_CONTAINER_TYPE_1": '',
"TP_SEAL_NO_1": '',
"TP_CONTAINER_NO_2": '',
"TP_CONTAINER_TYPE_2": '',
"TP_SEAL_NO_2": '',
"TP_CONTAINER_NO_3": '',
"TP_CONTAINER_TYPE_3": '',
"TP_SEAL_NO_3": '',
"TP_AIRWAYBILL": '',
"TP_BILL_OF_LANDING": '',
"TP_ORIGIN": '',
"TP_VESSEL": '',
"TP_PORT_OF_LANDING": '',
"TP_PORT_DISCHARGE": '',
"TP_VOYAGE": '',
"TP_ETA": '',
"RECEIVING_START_DATE": '',
"RECEIVED_DATE": "2020-11-08T06:35:03.570Z",
"PUTAWAY_START_DATE": '',
"PUTAWAY_DATE": "2020-11-08T06:36:16.550Z",
"PUTAWAY_BY": "ft",
"CLOSED_DATE": "2020-11-08T06:36:40.968Z",
"SUP_CODE": '',
"SUP_COMPANY": "******",
"SUP_ADDRESS": "******",
"SUP_ADDRESS2": "******",
"SUP_AREA1": "******",
"SUP_AREA2": "******",
"SUP_AREA3": "******",
"SUP_AREA4": "******",
"SUP_ZONE1": "******",
"SUP_ZONE2": "******",
"SUP_REGION1": "******",
"SUP_REGION2": "******",
"SUP_COUNTRY": "******",
"SUP_ZIP": "******",
"SUP_CONTACT_NAME": "******",
"SUP_CONTACT_PHONE": "******",
"SUP_CONTACT_EMAIL": "******",
"SUP_NOTE": '',
"BONDED": False,
"TOTAL_PALLET": 1,
"STORAGE_CHARGE": True,
"RETURN_DATE": '',
"REMARKS": '',
"RECEIVED_SIGNED": '',
"VERIFY_SIGNED": '',
"RecHU1 No": '',
"HUSER_DEFINED_16": '',
"IMG_TRUCK_01": '',
"IMG_TRUCK_02": '',
"IMG_TRUCK_03": '',
"IMG_TRUCK_04": '',
"IMG_TRUCK_05": '',
"IMG_TRUCK_06": '',
"IMG_TRUCK_07": '',
"IMG_TRUCK_08": '',
"IMG_TRUCK_09": '',
"IMG_TRUCK_10": '',
"IMG_TRUCK_THUMB_01": '',
"IMG_TRUCK_THUMB_02": '',
"IMG_TRUCK_THUMB_03": '',
"IMG_TRUCK_THUMB_04": '',
"IMG_TRUCK_THUMB_05": '',
"IMG_TRUCK_THUMB_06": '',
"IMG_TRUCK_THUMB_07": '',
"IMG_TRUCK_THUMB_08": '',
"IMG_TRUCK_THUMB_09": '',
"IMG_TRUCK_THUMB_10": '',
"IMG_GOODS_01": '',
"IMG_GOODS_02": '',
"IMG_GOODS_03": '',
"IMG_GOODS_04": '',
"IMG_GOODS_05": '',
"IMG_GOODS_06": '',
"IMG_GOODS_07": '',
"IMG_GOODS_08": '',
"IMG_GOODS_09": '',
"IMG_GOODS_10": '',
"IMG_GOODS_THUMB_01": '',
"IMG_GOODS_THUMB_02": '',
"IMG_GOODS_THUMB_03": '',
"IMG_GOODS_THUMB_04": '',
"IMG_GOODS_THUMB_05": '',
"IMG_GOODS_THUMB_06": '',
"IMG_GOODS_THUMB_07": '',
"IMG_GOODS_THUMB_08": '',
"IMG_GOODS_THUMB_09": '',
"IMG_GOODS_THUMB_10": '',
"CREATE_BY": "ft",
"MODIFY_BY": "ft",
"CREATE_DATE": "2020-11-08T06:35:05.843Z",
"MODIFY_DATE": "2020-11-08T06:36:40.969Z",
"DELETE_DATE": '',
"DETAIL": [
{
"SYS_ID": "0000003LCW",
"LINE_SEQ": 1,
"PRODUCT_TYPE": "FG",
"PALLET_ID": "AAQ",
"CARTON_ID": '',
"MANUF_DATE": '',
"EXPIRY_DATE": '',
"LENGTH": 1,
"WIDTH": 1,
"HEIGHT": 1,
"LENGTH_UOM": "CM",
"UNIT_CUB": 0.000001,
"VOLUME_UOM": "CBM",
"GROSS_WEIGHT": 1,
"NET_WEIGHT": 1,
"WEIGHT_UOM": "KG",
"GOOD_QTY": 100,
"DAMAGED_QTY": 0,
"PACK_CODE": "PCS",
"PACK_QTY": 1,
"CONFIRM_PUTAWAY_QTY": 100,
"HOLD": False,
"REMARKS": '',
"ASN_LINE_SEQ": '',
"DEBOM": False,
"NON_CONFORMANCE": '',
"NON_CONFORMANCE_REMARK": '',
"LOT_NO": '',
"RecU1 No": '',
"PRICE": 0,
"CREATE_BY": "ft",
"MODIFY_BY": "ft",
"CREATE_DATE": "2020-11-08T06:35:15.254Z",
"MODIFY_DATE": "2020-11-08T06:36:34.035Z",
"DELETE_DATE": '',
"SKU_CODE": "PILLOWB001",
"LOCATION_CODE": "LOC_WH01",
"HOLD_REASON_CODE": ''
}
]
}
]
}
return SGReceice_BY_JOB
def Cancel_DSO():
CD = {
"DATA": [
{
"<Release Reference Label-1>": "REL-2735287259",
"<Release Reference Label-2>": "REL-7563430644",
"<Release Reference Label-3>": "REL-9045200998",
"<Release Reference Label-4>": "REL-6992735291"
}
]
}
return CD
def Update_Invoice():
UI = {
"DATA": [
{
"<Release Reference Label-1>": "REL-6360249344",
"<Release Reference Label-2>": "REL-8065676721",
"<Release Reference Label-3>": "REL-1933819033",
"<Release Reference Label-4>": "REL-2768953543",
"INVOICE_DATA": {
"PAYMENT_TYPE": '',
"INV_FORMAT_FILE_DESTINATION": '',
"INVOICE_DATE": '',
"SALE_ORDER_NO": '',
"CUSTOMER_PHONE_NUMBER": '',
"POSTING_DATE": '',
"DUE_DATE": '',
"CUSTOMER_PURCHASE_ORDER_REFERENCE": '',
"CUSTOMER_ID": '',
"CUSTOMER_CODE": '',
"CUSTOMER_TAX_ID": '',
"CUSTOMER_BRANCH": '',
"CUSTOMER_NAME": '',
"CUSTOMER_EMAIL": '',
"BILL_TO_NAME": '',
"BILL_TO_CUSTOMER_ID": '',
"BILL_TO_CUSTOMER_NUMBER": '',
"BILL_TO_ADDRESS_1": '',
"BILL_TO_ADDRESS_2": '',
"BILL_TO_CITY": '',
"BILL_TO_COUNTRY": '',
"BILL_TO_STATE": '',
"BILL_TO_POST_CODE": '',
"SHIP_TO_CODE": '',
"SHIP_TO_NAME": '',
"SHIP_TO_CONTACT": '',
"SHIP_TO_ADDRESS_1": '',
"SHIP_TO_ADDRESS_2": '',
"SHIP_TO_CITY": '',
"SHIP_TO_COUNTRY": '',
"SHIP_TO_STATE": '',
"SHIP_TO_POST_CODE": '',
"CURRENCY_CODE": '',
"PAYMENT_TERM": '',
"SALES_PERSON": '',
"TAX_CODE": '',
"TAX_PERCENT": '',
"PRICES_INCLUDE_TAX": '',
"REMAINING_AMOUNT": '',
"TOTAL_AMOUNT_BEFORE_DISCOUNT": '',
"DISCOUNT_AMOUNT": '',
"DISCOUNT_APPLIED_BEFORE_TAX": '',
"TOTAL_AMOUNT_BEFORE_TAX": '',
"TOTAL_TAX_AMOUNT": '',
"TOTAL_AMOUNT_INCLUDING_TAX": '',
"DEPOSIT_AMOUNT": '',
"STATUS": '',
"<Release Header User Defined-01>": '',
"<Release Header User Defined-02>": '',
"<Release Header User Defined-03>": '',
"<Release Header User Defined-04>": '',
"<Release Header User Defined-05>": '',
"<Release Header User Defined-06>": '',
"<Release Header User Defined-07>": '',
"<Release Header User Defined-08>": '',
"<Release Header User Defined-09>": '',
"<Release Header User Defined-10>": '',
"<Release Header User Defined-11>": '',
"<Release Header User Defined-12>": '',
"<Release Header User Defined-13>": '',
"<Release Header User Defined-14>": '',
"<Release Header User Defined-15>": '',
"DETAIL": [
{
"SEQUENCE": '',
"ITEM_CODE": '',
"ITEM_DESC": '',
"PACK_CODE": '',
"UNIT_PRICE": '',
"QUANTITY": '',
"DISCOUNT_AMOUNT": '',
"DISCOUNT_PERCENT": '',
"DISCOUNT_APPLIED_BEFORE_TAX": '',
"AMOUNT_BEFORE_TAX": '',
"TAX_CODE": '',
"TAX_PERCENT": '',
"LINE_AMOUNT_BEFORE_DISCOUNT": '',
"INVOICE_DISCOUNT_ALLOCATION": '',
"LINE_AMOUNT_BEFORE_TAX": '',
"LINE_TAX_AMOUNT": '',
"LINE_AMOUNT_INCLUDING_TAX": '',
"<Release User Defined-01>": '',
"<Release User Defined-02>": '',
"<Release User Defined-03>": '',
"<Release User Defined-04>": '',
"<Release User Defined-05>": '',
"<Release User Defined-06>": '',
"<Release User Defined-07>": '',
"<Release User Defined-08>": '',
"<Release User Defined-09>": '',
"<Release User Defined-10>": '',
"<Release User Defined-11>": '',
"<Release User Defined-12>": '',
"<Release User Defined-13>": '',
"<Release User Defined-14>": '',
"<Release User Defined-15>": '',
"<Release User Defined-16>": '',
"<Release User Defined-17>": '',
"<Release User Defined-18>": '',
"<Release User Defined-19>": '',
"<Release User Defined-20>": '',
"<Release User Defined-21>": '',
"<Release User Defined-22>": '',
"<Release User Defined-23>": '',
"<Release User Defined-24>": '',
"<Release User Defined-25>": '',
"<Release User Defined-26>": '',
"<Release User Defined-27>": '',
"<Release User Defined-28>": '',
"<Release User Defined-29>": '',
"<Release User Defined-30>": ''
}
]
}
}
]
}
return UI
def Update_Consignee_and_ShipTo():
UC_and_SHIPTO = {
"DATA":[
{
"<Release Reference Label-1>": "REL-8714287303",
"<Release Reference Label-2>": "REL-7166195423",
"<Release Reference Label-3>": "REL-2579928879",
"<Release Reference Label-4>": "REL-8804956474",
"CON_CODE": '',
"CON_COMPANY": '',
"CON_TAX_NO": '',
"CON_BRANCH": '',
"CON_ADDRESS": '',
"CON_ADDRESS2": '',
"CON_AREA1": '',
"CON_AREA2": '',
"CON_AREA3": '',
"CON_AREA4": '',
"CON_ZONE1": '',
"CON_ZONE2": '',
"CON_REGION1": '',
"CON_REGION2": '',
"CON_COUNTRY": '',
"CON_ZIP": '',
"CON_CONTACT_NAME": '',
"CON_CONTACT_PHONE": '',
"CON_CONTACT_EMAIL": '',
"CON_NOTE": '',
"ST_CODE": '',
"ST_COMPANY": '',
"ST_ADDRESS": '',
"ST_ADDRESS2": '',
"ST_AREA1": '',
"ST_AREA2": '',
"ST_AREA3": '',
"ST_AREA4": '',
"ST_ZONE1": '',
"ST_ZONE2": '',
"ST_REGION1": '',
"ST_REGION2": '',
"ST_COUNTRY": '',
"ST_ZIP": '',
"ST_CONTACT_NAME": '',
"ST_CONTACT_PHONE": '',
"ST_CONTACT_EMAIL": '',
"ST_NOTE": ''
}
]
}
return UC_and_SHIPTO
def create_DSO_Job():
CRE_DSO_JOB = {
"DATA": [
{
"<Release Reference Label-1>": "REL-0682578235",
"<Release Reference Label-2>": "REL-2624922914",
"<Release Reference Label-3>": "REL-9766789709",
"<Release Reference Label-4>": "REL-5069149869",
"REL_PREFIX": '',
"TEAM": '',
"EXP_RELEASE_DATE": '',
"INVOICE_NO": '',
"TP_VEHICLE_NO": '',
"TP_BOOKING": '',
"TP_CARRIER": '',
"TP_CONTAINER_NO_1": '',
"TP_CONTAINER_TYPE_1": '',
"TP_SEAL_NO_1": '',
"TP_CONTAINER_NO_2": '',
"TP_CONTAINER_TYPE_2": '',
"TP_SEAL_NO_2": '',
"TP_CONTAINER_NO_3": '',
"TP_CONTAINER_TYPE_3": '',
"TP_SEAL_NO_3": '',
"TP_AIRWAYBILL": '',
"TP_BILL_OF_LANDING": '',
"TP_ORIGIN": '',
"TP_VESSEL": '',
"TP_PORT_OF_LANDING": '',
"TP_PORT_DISCHARGE": '',
"TP_VOYAGE": '',
"TP_ETD": '',
"CON_CODE": '',
"CON_COMPANY": '',
"CON_TAX_NO": '',
"CON_BRANCH": '',
"CON_ADDRESS": '',
"CON_ADDRESS2": '',
"CON_AREA1": '',
"CON_AREA2": '',
"CON_AREA3": '',
"CON_AREA4": '',
"CON_ZONE1": '',
"CON_ZONE2": '',
"CON_REGION1": '',
"CON_REGION2": '',
"CON_COUNTRY": '',
"CON_ZIP": '',
"CON_CONTACT_NAME": '',
"CON_CONTACT_PHONE": '',
"CON_CONTACT_EMAIL": '',
"CON_NOTE": '',
"ST_CODE": '',
"ST_COMPANY": '',
"ST_ADDRESS": '',
"ST_ADDRESS2": '',
"ST_AREA1": '',
"ST_AREA2": '',
"ST_AREA3": '',
"ST_AREA4": '',
"ST_ZONE1": '',
"ST_ZONE2": '',
"ST_REGION1": '',
"ST_REGION2": '',
"ST_COUNTRY": '',
"ST_ZIP": '',
"ST_CONTACT_NAME": '',
"ST_CONTACT_PHONE": '',
"ST_CONTACT_EMAIL": '',
"ST_NOTE": '',
"DEL_CONSIGNMENT": '',
"DEL_URGENT_ORDER": '',
"DEL_COD": '',
"DEL_COD_AMOUNT": '',
"DELIVERY_TIME_FROM": '',
"DELIVERY_TIME_TO": '',
"SHIPPING_INSTRUCTION": '',
"REMARKS": '',
"BONDED": '',
"TOTAL_PALLET": '',
"STORAGE_CHARGE": '',
"IMG_TRUCK_01": '',
"IMG_TRUCK_02": '',
"IMG_TRUCK_03": '',
"IMG_TRUCK_04": '',
"IMG_TRUCK_05": '',
"IMG_TRUCK_06": '',
"IMG_TRUCK_07": '',
"IMG_TRUCK_08": '',
"IMG_TRUCK_09": '',
"IMG_TRUCK_10": '',
"IMG_GOODS_01": '',
"IMG_GOODS_02": '',
"IMG_GOODS_03": '',
"IMG_GOODS_04": '',
"IMG_GOODS_05": '',
"IMG_GOODS_06": '',
"IMG_GOODS_07": '',
"IMG_GOODS_08": '',
"IMG_GOODS_09": '',
"IMG_GOODS_10": '',
"<Release Header User Defined-01>": '',
"<Release Header User Defined-02>": '',
"<Release Header User Defined-03>": '',
"<Release Header User Defined-04>": '',
"<Release Header User Defined-05>": '',
"<Release Header User Defined-06>": '',
"<Release Header User Defined-07>": '',
"<Release Header User Defined-08>": '',
"<Release Header User Defined-09>": '',
"<Release Header User Defined-10>": '',
"<Release Header User Defined-11>": '',
"<Release Header User Defined-12>": '',
"<Release Header User Defined-13>": '',
"<Release Header User Defined-14>": '',
"<Release Header User Defined-15>": '',
"DETAIL": [
{
"SKU_CODE": "123456",
"RELEASE_QTY": 1,
"PRODUCT_TYPE": '',
"PACK_CODE": '',
"PACK_QTY": '',
"PALLET_ID": '',
"CARTON_ID": '',
"EXPIRY_DATE": '',
"MANUF_DATE": '',
"PICK_BY_CODE": '',
"LOCATION_CODE": '',
"LOT_NO": '',
"REMARKS": '',
"DEBOM": '',
"SYS_ID": '',
"<Release User Defined-01>": '',
"<Release User Defined-02>": '',
"<Release User Defined-03>": '',
"<Release User Defined-04>": '',
"<Release User Defined-05>": '',
"<Release User Defined-06>": '',
"<Release User Defined-07>": '',
"<Release User Defined-08>": '',
"<Release User Defined-09>": '',
"<Release User Defined-10>": '',
"<Release User Defined-11>": '',
"<Release User Defined-12>": '',
"<Release User Defined-13>": '',
"<Release User Defined-14>": '',
"<Release User Defined-15>": '',
"<Release User Defined-16>": '',
"<Release User Defined-17>": '',
"<Release User Defined-18>": '',
"<Release User Defined-19>": '',
"<Release User Defined-20>": '',
"<Release User Defined-21>": '',
"<Release User Defined-22>": '',
"<Release User Defined-23>": '',
"<Release User Defined-24>": '',
"<Release User Defined-25>": '',
"<Release User Defined-26>": '',
"<Release User Defined-27>": '',
"<Release User Defined-28>": '',
"<Release User Defined-29>": '',
"<Release User Defined-30>": ''
}
]
}
]
}
return CRE_DSO_JOB
def select_summary_of_DSO():
SS_DSO = {
"DATA" : [
{
"JOB_COUNT": 10,
"JOB_QTY": 200
}
]
}
return SS_DSO
def select_summary_of_ASN():
SS_ASN = {
"DATA" : [
{
"JOB_COUNT": 10,
"JOB_QTY": 200
}
]
}
return SS_ASN
def select_confirm_shipped_by_job():
SCShipped_BY_JOB = {
"DATA": [
{
"STORER_CODE": "STR000060",
"WH_CODE": "WH01",
"RELEASE_KEY": "0000019990",
"BATCH_NO": 1,
"STATUS_CODE": "CLOSED",
"REL_PREFIX": '',
"TEAM": '',
"EXP_RELEASE_DATE": "2020-11-08T06:36:48.759Z",
"DRAFT_DATE": "2020-11-08T06:36:52.303Z",
"OPENED_DATE": "2020-11-08T06:36:48.759Z",
"ALLOCATING_DATE": "2020-11-08T06:37:33.866Z",
"ALLOCATED_DATE": "2020-11-08T06:37:46.542Z",
"PICKING_DATE": "2020-11-08T06:37:52.042Z",
"PICKED_DATE": "2020-11-08T06:37:53.646Z",
"PACKING_DATE": "2020-11-08T06:37:53.646Z",
"PACKED_DATE": "2020-11-08T06:37:53.646Z",
"READY_TO_SHIP_DATE": "2020-11-08T06:37:53.646Z",
"RELEASING_DATE": "2020-11-08T06:37:53.646Z",
"CLOSED_DATE": "2020-11-08T06:37:53.646Z",
"IN_TRANSIT_DATE": '',
"POD_DATE": '',
"Rel1 No": "17562",
"Rec1 No": '',
"INVOICE_NO": '',
"TP_VEHICLE_NO": '',
"TP_BOOKING": '',
"TP_CARRIER": '',
"TP_CONTAINER_NO_1": '',
"TP_CONTAINER_TYPE_1": '',
"TP_SEAL_NO_1": '',
"TP_CONTAINER_NO_2": '',
"TP_CONTAINER_TYPE_2": '',
"TP_SEAL_NO_2": '',
"TP_CONTAINER_NO_3": '',
"TP_CONTAINER_TYPE_3": '',
"TP_SEAL_NO_3": '',
"TP_AIRWAYBILL": '',
"TP_BILL_OF_LANDING": '',
"TP_ORIGIN": '',
"TP_VESSEL": '',
"TP_PORT_OF_LANDING": '',
"TP_PORT_DISCHARGE": '',
"TP_VOYAGE": '',
"TP_ETD": '',
"CON_CODE": '',
"CON_COMPANY": "******",
"CON_ADDRESS": "******",
"CON_ADDRESS2": "******",
"CON_AREA1": "******",
"CON_AREA2": "******",
"CON_AREA3": "******",
"CON_AREA4": "******",
"CON_ZONE1": "******",
"CON_ZONE2": "******",
"CON_REGION1": "******",
"CON_REGION2": "******",
"CON_COUNTRY": "******",
"CON_ZIP": "******",
"CON_CONTACT_NAME": "******",
"CON_CONTACT_PHONE": "******",
"CON_CONTACT_EMAIL": "******",
"CON_NOTE": '',
"ST_CODE": '',
"ST_COMPANY": "******",
"ST_ADDRESS": "******",
"ST_ADDRESS2": "******",
"ST_AREA1": "******",
"ST_AREA2": "******",
"ST_AREA3": "******",
"ST_AREA4": "******",
"ST_ZONE1": "******",
"ST_ZONE2": "******",
"ST_REGION1": "******",
"ST_REGION2": "******",
"ST_COUNTRY": "******",
"ST_ZIP": "******",
"ST_CONTACT_NAME": "******",
"ST_CONTACT_PHONE": "******",
"ST_CONTACT_EMAIL": "******",
"ST_NOTE": '',
"DEL_CONSIGNMENT": '',
"DEL_URGENT_ORDER": '',
"DEL_COD": '',
"DEL_COD_AMOUNT": '',
"DELIVERY_TIME_FROM": '',
"DELIVERY_TIME_TO": '',
"SHIPPING_INSTRUCTION": '',
"REMARKS": '',
"BONDED": False,
"TOTAL_PALLET": 0,
"STORAGE_CHARGE": True,
"IMG_TRUCK_01": '',
"IMG_TRUCK_02": '',
"IMG_TRUCK_03": '',
"IMG_TRUCK_04": '',
"IMG_TRUCK_05": '',
"IMG_TRUCK_06": '',
"IMG_TRUCK_07": '',
"IMG_TRUCK_08": '',
"IMG_TRUCK_09": '',
"IMG_TRUCK_10": '',
"IMG_TRUCK_THUMB_01": '',
"IMG_TRUCK_THUMB_02": '',
"IMG_TRUCK_THUMB_03": '',
"IMG_TRUCK_THUMB_04": '',
"IMG_TRUCK_THUMB_05": '',
"IMG_TRUCK_THUMB_06": '',
"IMG_TRUCK_THUMB_07": '',
"IMG_TRUCK_THUMB_08": '',
"IMG_TRUCK_THUMB_09": '',
"IMG_TRUCK_THUMB_10": '',
"IMG_GOODS_01": '',
"IMG_GOODS_02": '',
"IMG_GOODS_03": '',
"IMG_GOODS_04": '',
"IMG_GOODS_05": '',
"IMG_GOODS_06": '',
"IMG_GOODS_07": '',
"IMG_GOODS_08": '',
"IMG_GOODS_09": '',
"IMG_GOODS_10": '',
"IMG_GOODS_THUMB_01": '',
"IMG_GOODS_THUMB_02": '',
"IMG_GOODS_THUMB_03": '',
"IMG_GOODS_THUMB_04": '',
"IMG_GOODS_THUMB_05": '',
"IMG_GOODS_THUMB_06": '',
"IMG_GOODS_THUMB_07": '',
"IMG_GOODS_THUMB_08": '',
"IMG_GOODS_THUMB_09": '',
"IMG_GOODS_THUMB_10": '',
"MKP_JOB_BRAND": '',
"MKP_JOB_NO": '',
"CREATE_BY": "ft",
"MODIFY_BY": "ft",
"CREATE_DATE": "2020-11-08T06:36:52.306Z",
"MODIFY_DATE": "2020-11-20T08:44:51.457Z",
"DELETE_DATE": '',
"DETAIL": [
{
"LINE_SEQ": 1,
"PRODUCT_TYPE": "FG",
"PACK_CODE": "PCS",
"PACK_QTY": 1,
"PALLET_ID": "AAQ",
"CARTON_ID": '',
"EXPIRY_DATE": '',
"MANUF_DATE": '',
"EXP_RELEASE_QTY": 1,
"RELEASE_QTY": 1,
"ALLOCATED_QTY": 0,
"PICKING_QTY": 0,
"PICKED_QTY": 0,
"PACKED_QTY": 0,
"SHIPPED_QTY": 1,
"RELEASE_QTY_DIFF": 0,
"LOT_NO": '',
"REMARKS": '',
"DEBOM": False,
"SYS_ID": '',
"MKP_JOB_DETAIL_NO": '',
"MKP_DEL_CONSIGNMENT": '',
"MKP_ST_LABEL": '',
"MKP_STATUS": '',
"MKP_INVOICE_NO": '',
"MKP_PRICE_AFTER_DISCOUNT": '',
"CREATE_BY": "ft",
"MODIFY_BY": "ft",
"CREATE_DATE": "2020-11-08T06:36:59.256Z",
"MODIFY_DATE": "2020-11-20T08:44:51.442Z",
"DELETE_DATE": '',
"SKU_CODE": "PILLOWB001",
"LOCATION_CODE": '',
"PICK_BY_CODE": "FIFO"
}
]
}
]
}
return SCShipped_BY_JOB
def select_confirm_picked_by_job():
SCPicked_BY_JOB = {
"DATA": [
{
"STORER_CODE": "STR000060",
"WH_CODE": "WH01",
"RELEASE_KEY": "0000019990",
"BATCH_NO": 1,
"STATUS_CODE": "PICKED",
"REL_PREFIX": '',
"TEAM": '',
"EXP_RELEASE_DATE": "2020-11-08T06:36:48.759Z",
"DRAFT_DATE": "2020-11-08T06:36:52.303Z",
"OPENED_DATE": "2020-11-08T06:36:48.759Z",
"ALLOCATING_DATE": "2020-11-08T06:37:33.866Z",
"ALLOCATED_DATE": "2020-11-08T06:37:46.542Z",
"PICKING_DATE": "2020-11-08T06:37:52.042Z",
"PICKED_DATE": "2020-11-08T06:37:53.646Z",
"PACKING_DATE": '',
"PACKED_DATE": '',
"READY_TO_SHIP_DATE": '',
"RELEASING_DATE": '',
"CLOSED_DATE": '',
"IN_TRANSIT_DATE": '',
"POD_DATE": '',
"Rel1 No": "17562",
"Rec1 No": '',
"INVOICE_NO": '',
"TP_VEHICLE_NO": '',
"TP_BOOKING": '',
"TP_CARRIER": '',
"TP_CONTAINER_NO_1": '',
"TP_CONTAINER_TYPE_1": '',
"TP_SEAL_NO_1": '',
"TP_CONTAINER_NO_2": '',
"TP_CONTAINER_TYPE_2": '',
"TP_SEAL_NO_2": '',
"TP_CONTAINER_NO_3": '',
"TP_CONTAINER_TYPE_3": '',
"TP_SEAL_NO_3": '',
"TP_AIRWAYBILL": '',
"TP_BILL_OF_LANDING": '',
"TP_ORIGIN": '',
"TP_VESSEL": '',
"TP_PORT_OF_LANDING": '',
"TP_PORT_DISCHARGE": '',
"TP_VOYAGE": '',
"TP_ETD": '',
"CON_CODE": '',
"CON_COMPANY": "******",
"CON_ADDRESS": "******",
"CON_ADDRESS2": "******",
"CON_AREA1": "******",
"CON_AREA2": "******",
"CON_AREA3": "******",
"CON_AREA4": "******",
"CON_ZONE1": "******",
"CON_ZONE2": "******",
"CON_REGION1": "******",
"CON_REGION2": "******",
"CON_COUNTRY": "******",
"CON_ZIP": "******",
"CON_CONTACT_NAME": "******",
"CON_CONTACT_PHONE": "******",
"CON_CONTACT_EMAIL": "******",
"CON_NOTE": '',
"ST_CODE": '',
"ST_COMPANY": "******",
"ST_ADDRESS": "******",
"ST_ADDRESS2": "******",
"ST_AREA1": "******",
"ST_AREA2": "******",
"ST_AREA3": "******",
"ST_AREA4": "******",
"ST_ZONE1": "******",
"ST_ZONE2": "******",
"ST_REGION1": "******",
"ST_REGION2": "******",
"ST_COUNTRY": "******",
"ST_ZIP": "******",
"ST_CONTACT_NAME": "******",
"ST_CONTACT_PHONE": "******",
"ST_CONTACT_EMAIL": "******",
"ST_NOTE": '',
"DEL_CONSIGNMENT": '',
"DEL_URGENT_ORDER": '',
"DEL_COD": '',
"DEL_COD_AMOUNT": '',
"DELIVERY_TIME_FROM": '',
"DELIVERY_TIME_TO": '',
"SHIPPING_INSTRUCTION": '',
"REMARKS": '',
"BONDED": False,
"TOTAL_PALLET": 0,
"STORAGE_CHARGE": True,
"IMG_TRUCK_01": '',
"IMG_TRUCK_02": '',
"IMG_TRUCK_03": '',
"IMG_TRUCK_04": '',
"IMG_TRUCK_05": '',
"IMG_TRUCK_06": '',
"IMG_TRUCK_07": '',
"IMG_TRUCK_08": '',
"IMG_TRUCK_09": '',
"IMG_TRUCK_10": '',
"IMG_TRUCK_THUMB_01": '',
"IMG_TRUCK_THUMB_02": '',
"IMG_TRUCK_THUMB_03": '',
"IMG_TRUCK_THUMB_04": '',
"IMG_TRUCK_THUMB_05": '',
"IMG_TRUCK_THUMB_06": '',
"IMG_TRUCK_THUMB_07": '',
"IMG_TRUCK_THUMB_08": '',
"IMG_TRUCK_THUMB_09": '',
"IMG_TRUCK_THUMB_10": '',
"IMG_GOODS_01": '',
"IMG_GOODS_02": '',
"IMG_GOODS_03": '',
"IMG_GOODS_04": '',
"IMG_GOODS_05": '',
"IMG_GOODS_06": '',
"IMG_GOODS_07": '',
"IMG_GOODS_08": '',
"IMG_GOODS_09": '',
"IMG_GOODS_10": '',
"IMG_GOODS_THUMB_01": '',
"IMG_GOODS_THUMB_02": '',
"IMG_GOODS_THUMB_03": '',
"IMG_GOODS_THUMB_04": '',
"IMG_GOODS_THUMB_05": '',
"IMG_GOODS_THUMB_06": '',
"IMG_GOODS_THUMB_07": '',
"IMG_GOODS_THUMB_08": '',
"IMG_GOODS_THUMB_09": '',
"IMG_GOODS_THUMB_10": '',
"MKP_JOB_BRAND": '',
"MKP_JOB_NO": '',
"CREATE_BY": "ft",
"MODIFY_BY": "ft",
"CREATE_DATE": "2020-11-08T06:36:52.306Z",
"MODIFY_DATE": "2020-11-20T08:44:51.457Z",
"DELETE_DATE": '',
"DETAIL": [
{
"LINE_SEQ": 1,
"PRODUCT_TYPE": "FG",
"PACK_CODE": "PCS",
"PACK_QTY": 1,
"PALLET_ID": "AAQ",
"CARTON_ID": '',
"EXPIRY_DATE": '',
"MANUF_DATE": '',
"EXP_RELEASE_QTY": 1,
"RELEASE_QTY": 1,
"ALLOCATED_QTY": 0,
"PICKING_QTY": 0,
"PICKED_QTY": 0,
"PACKED_QTY": 1,
"SHIPPED_QTY": 0,
"RELEASE_QTY_DIFF": 0,
"LOT_NO": '',
"REMARKS": '',
"DEBOM": False,
"SYS_ID": '',
"MKP_JOB_DETAIL_NO": '',
"MKP_DEL_CONSIGNMENT": '',
"MKP_ST_LABEL": '',
"MKP_STATUS": '',
"MKP_INVOICE_NO": '',
"MKP_PRICE_AFTER_DISCOUNT": '',
"CREATE_BY": "ft",
"MODIFY_BY": "ft",
"CREATE_DATE": "2020-11-08T06:36:59.256Z",
"MODIFY_DATE": "2020-11-20T08:44:51.442Z",
"DELETE_DATE": '',
"SKU_CODE": "PILLOWB001",
"LOCATION_CODE": '',
"PICK_BY_CODE": "FIFO"
}
]
}
]
}
return SCPicked_BY_JOB
def select_confirm_packed_by_job():
SCPacked_BY_JOB = {
"DATA": [
{
"STORER_CODE": "STR000060",
"WH_CODE": "WH01",
"RELEASE_KEY": "0000019989",
"BATCH_NO": 1,
"STATUS_CODE": "PACKED",
"REL_PREFIX": '',
"TEAM": '',
"EXP_RELEASE_DATE": "2020-11-08T06:36:48.759Z",
"DRAFT_DATE": "2020-11-08T06:36:52.303Z",
"OPENED_DATE": "2020-11-08T06:36:48.759Z",
"ALLOCATING_DATE": "2020-11-08T06:37:33.866Z",
"ALLOCATED_DATE": "2020-11-08T06:37:46.542Z",
"PICKING_DATE": "2020-11-08T06:37:52.042Z",
"PICKED_DATE": "2020-11-08T06:37:53.646Z",
"PACKING_DATE": "2020-11-08T06:55:13.989Z",
"PACKED_DATE": "2020-11-08T06:55:13.989Z",
"READY_TO_SHIP_DATE": '',
"RELEASING_DATE": '',
"CLOSED_DATE": '',
"IN_TRANSIT_DATE": '',
"POD_DATE": '',
"Rel1 No": "17561",
"Rec1 No": '',
"INVOICE_NO": '',
"TP_VEHICLE_NO": '',
"TP_BOOKING": '',
"TP_CARRIER": '',
"TP_CONTAINER_NO_1": '',
"TP_CONTAINER_TYPE_1": '',
"TP_SEAL_NO_1": '',
"TP_CONTAINER_NO_2": '',
"TP_CONTAINER_TYPE_2": '',
"TP_SEAL_NO_2": '',
"TP_CONTAINER_NO_3": '',
"TP_CONTAINER_TYPE_3": '',
"TP_SEAL_NO_3": '',
"TP_AIRWAYBILL": '',
"TP_BILL_OF_LANDING": '',
"TP_ORIGIN": '',
"TP_VESSEL": '',
"TP_PORT_OF_LANDING": '',
"TP_PORT_DISCHARGE": '',
"TP_VOYAGE": '',
"TP_ETD": '',
"CON_CODE": '',
"CON_COMPANY": "******",
"CON_ADDRESS": "******",
"CON_ADDRESS2": "******",
"CON_AREA1": "******",
"CON_AREA2": "******",
"CON_AREA3": "******",
"CON_AREA4": "******",
"CON_ZONE1": "******",
"CON_ZONE2": "******",
"CON_REGION1": "******",
"CON_REGION2": "******",
"CON_COUNTRY": "******",
"CON_ZIP": "******",
"CON_CONTACT_NAME": "******",
"CON_CONTACT_PHONE": "******",
"CON_CONTACT_EMAIL": "******",
"CON_NOTE": '',
"ST_CODE": '',
"ST_COMPANY": "******",
"ST_ADDRESS": "******",
"ST_ADDRESS2": "******",
"ST_AREA1": "******",
"ST_AREA2": "******",
"ST_AREA3": "******",
"ST_AREA4": "******",
"ST_ZONE1": "******",
"ST_ZONE2": "******",
"ST_REGION1": "******",
"ST_REGION2": "******",
"ST_COUNTRY": "******",
"ST_ZIP": "******",
"ST_CONTACT_NAME": "******",
"ST_CONTACT_PHONE": "******",
"ST_CONTACT_EMAIL": "******",
"ST_NOTE": '',
"DEL_CONSIGNMENT": '',
"DEL_URGENT_ORDER": '',
"DEL_COD": '',
"DEL_COD_AMOUNT": '',
"DELIVERY_TIME_FROM": '',
"DELIVERY_TIME_TO": '',
"SHIPPING_INSTRUCTION": '',
"REMARKS": '',
"BONDED": False,
"TOTAL_PALLET": 0,
"STORAGE_CHARGE": True,
"IMG_TRUCK_01": '',
"IMG_TRUCK_02": '',
"IMG_TRUCK_03": '',
"IMG_TRUCK_04": '',
"IMG_TRUCK_05": '',
"IMG_TRUCK_06": '',
"IMG_TRUCK_07": '',
"IMG_TRUCK_08": '',
"IMG_TRUCK_09": '',
"IMG_TRUCK_10": '',
"IMG_TRUCK_THUMB_01": '',
"IMG_TRUCK_THUMB_02": '',
"IMG_TRUCK_THUMB_03": '',
"IMG_TRUCK_THUMB_04": '',
"IMG_TRUCK_THUMB_05": '',
"IMG_TRUCK_THUMB_06": '',
"IMG_TRUCK_THUMB_07": '',
"IMG_TRUCK_THUMB_08": '',
"IMG_TRUCK_THUMB_09": '',
"IMG_TRUCK_THUMB_10": '',
"IMG_GOODS_01": '',
"IMG_GOODS_02": '',
"IMG_GOODS_03": '',
"IMG_GOODS_04": '',
"IMG_GOODS_05": '',
"IMG_GOODS_06": '',
"IMG_GOODS_07": '',
"IMG_GOODS_08": '',
"IMG_GOODS_09": '',
"IMG_GOODS_10": '',
"IMG_GOODS_THUMB_01": '',
"IMG_GOODS_THUMB_02": '',
"IMG_GOODS_THUMB_03": '',
"IMG_GOODS_THUMB_04": '',
"IMG_GOODS_THUMB_05": '',
"IMG_GOODS_THUMB_06": '',
"IMG_GOODS_THUMB_07": '',
"IMG_GOODS_THUMB_08": '',
"IMG_GOODS_THUMB_09": '',
"IMG_GOODS_THUMB_10": '',
"MKP_JOB_BRAND": '',
"MKP_JOB_NO": '',
"CREATE_BY": "ft",
"MODIFY_BY": "ft",
"CREATE_DATE": "2020-11-08T06:36:52.306Z",
"MODIFY_DATE": "2020-11-20T08:44:51.457Z",
"DELETE_DATE": '',
"DETAIL": [
{
"LINE_SEQ": 1,
"PRODUCT_TYPE": "FG",
"PACK_CODE": "PCS",
"PACK_QTY": 1,
"PALLET_ID": "AAQ",
"CARTON_ID": '',
"EXPIRY_DATE": '',
"MANUF_DATE": '',
"EXP_RELEASE_QTY": 1,
"RELEASE_QTY": 1,
"ALLOCATED_QTY": 0,
"PICKING_QTY": 0,
"PICKED_QTY": 0,
"PACKED_QTY": 1,
"SHIPPED_QTY": 0,
"RELEASE_QTY_DIFF": 0,
"LOT_NO": '',
"REMARKS": '',
"DEBOM": False,
"SYS_ID": '',
"MKP_JOB_DETAIL_NO": '',
"MKP_DEL_CONSIGNMENT": '',
"MKP_ST_LABEL": '',
"MKP_STATUS": '',
"MKP_INVOICE_NO": '',
"MKP_PRICE_AFTER_DISCOUNT": '',
"CREATE_BY": "ft",
"MODIFY_BY": "ft",
"CREATE_DATE": "2020-11-08T06:36:59.256Z",
"MODIFY_DATE": "2020-11-20T08:44:51.442Z",
"DELETE_DATE": '',
"SKU_CODE": "PILLOWB001",
"LOCATION_CODE": '',
"PICK_BY_CODE": "FIFO"
}
]
}
]
}
return SCPacked_BY_JOB
def create_asn_job():
ASN = {
'DATA': [
{
"<Receive Reference Label-1>": "REC-1611127844",
"<Receive Reference Label-2>": "REC-3692523139",
"<Receive Reference Label-3>": "REC-8350842351",
"<Receive Reference Label-4>": "REC-7576886510",
"EXP_RECEIVED_DATE": "2021-11-10T00:00:00+07:00",
"TP_VEHICLE_NO": '',
"TP_CARRIER": '',
"TP_CONTAINER_NO_1": '',
"TP_CONTAINER_TYPE_1": '',
"TP_SEAL_NO_1": '',
"TP_CONTAINER_NO_2": '',
"TP_CONTAINER_TYPE_2": '',
"TP_SEAL_NO_2": '',
"TP_CONTAINER_NO_3": '',
"TP_CONTAINER_TYPE_3": '',
"TP_SEAL_NO_3": '',
"TP_AIRWAYBILL": '',
"TP_BILL_OF_LANDING": '',
"TP_ORIGIN": '',
"TP_VESSEL": '',
"TP_PORT_OF_LANDING": '',
"TP_PORT_DISCHARGE": '',
"TP_VOYAGE": '',
"TP_ETA": '',
"SUP_CODE": '',
"SUP_COMPANY": '',
"SUP_ADDRESS": '',
"SUP_ADDRESS2": '',
"SUP_AREA1": '',
"SUP_AREA2": '',
"SUP_AREA3": '',
"SUP_AREA4": '',
"SUP_ZONE1": '',
"SUP_ZONE2": '',
"SUP_REGION1": '',
"SUP_REGION2": '',
"SUP_COUNTRY": '',
"SUP_ZIP": '',
"SUP_CONTACT_NAME": '',
"SUP_CONTACT_PHONE": '',
"SUP_CONTACT_EMAIL": '',
"SUP_NOTE": '',
"REMARKS": '',
"<Receive Header User Defined-01>": '',
"<Receive Header User Defined-02>": '',
"<Receive Header User Defined-03>": '',
"<Receive Header User Defined-04>": '',
"<Receive Header User Defined-05>": '',
"<Receive Header User Defined-06>": '',
"<Receive Header User Defined-07>": '',
"<Receive Header User Defined-08>": '',
"<Receive Header User Defined-09>": '',
"<Receive Header User Defined-10>": '',
"<Receive Header User Defined-11>": '',
"<Receive Header User Defined-12>": '',
"<Receive Header User Defined-13>": '',
"<Receive Header User Defined-14>": '',
"<Receive Header User Defined-15>": '',
"DETAIL": [
{
"SKU_CODE": "123456",
"RECEIVED_QTY": 1,
"PRODUCT_TYPE": '',
"PALLET_ID": '',
"CARTON_ID": '',
"MANUF_DATE": '',
"EXPIRY_DATE": '',
"LENGTH": '',
"WIDTH": '',
"HEIGHT": '',
"LENGTH_UOM": '',
"VOLUME_UOM": '',
"GROSS_WEIGHT": '',
"NET_WEIGHT": '',
"WEIGHT_UOM": '',
"PACK_CODE": '',
"PACK_QTY": '',
"REMARKS": '',
"DEBOM": '',
"LOT_NO": '',
"PRICE": '',
"<Receive User Defined-01>": '',
"<Receive User Defined-02>": '',
"<Receive User Defined-03>": '',
"<Receive User Defined-04>": '',
"<Receive User Defined-05>": '',
"<Receive User Defined-06>": '',
"<Receive User Defined-07>": '',
"<Receive User Defined-08>": '',
"<Receive User Defined-09>": '',
"<Receive User Defined-10>": '',
"<Receive User Defined-11>": '',
"<Receive User Defined-12>": '',
"<Receive User Defined-13>": '',
"<Receive User Defined-14>": '',
"<Receive User Defined-15>": '',
"<Receive User Defined-16>": '',
"<Receive User Defined-17>": '',
"<Receive User Defined-18>": '',
"<Receive User Defined-19>": '',
"<Receive User Defined-20>": '',
"<Receive User Defined-21>": '',
"<Receive User Defined-22>": '',
"<Receive User Defined-23>": '',
"<Receive User Defined-24>": '',
"<Receive User Defined-25>": '',
"<Receive User Defined-26>": '',
"<Receive User Defined-27>": '',
"<Receive User Defined-28>": '',
"<Receive User Defined-29>": '',
"<Receive User Defined-30>": ''
}
]
}
]
}
return ASN
def update_SKU_Bom():
BOM = {
"DATA": [
{
"SKU_CODE": "string",
"CHILDREN": [
{
"CHILD_SKU_CODE": "string",
"QTY": 1
}
]
}
]
}
return BOM
def create_or_Update_SKU():
SKU = {
"DATA": [
{
"SKU_CODE": "123456",
"ACTIVE": True,
"STORAGE_TYPE": "AMBIENT",
"STORAGE_CAT_SERVICE": "GENERAL",
"PRODUCT_TYPE": "NORMAL",
"PICK_BY_CODE": "FIFO",
"CBM_RATIO": 0,
"BOM_SKU": False,
"DEFAULT_EXPIRY": True,
"SKU_DESC": '',
"SKU_DESC_LOCAL": '',
"MATERIAL_TYPE": '',
"PRODUCT_FAMILY_01": '',
"PRODUCT_FAMILY_02": '',
"PRODUCT_FAMILY_03": '',
"ABC_VALUE": '',
"ABC_TRANSACTION": '',
"EXPIRY_FROM": '',
"EXPIRY_DAYS": '',
"EXPIRY_PERIOD": '',
"DEFAULT_MANF": '',
"MANUF_FROM": '',
"MANUF_DAYS": '',
"MANUF_PERIOD": '',
"LAST_CC_DATE": '',
"MIN_STOCK_QTY": '',
"MIN_REORDER_QTY": '',
"MAX_STOCK_QTY": '',
"MIN_SHELF_LIFE": '',
"DAY_PREPARE_SHIP": '',
"REQUIRE_SCAN_IN_SERIAL": '',
"REQUIRE_SCAN_OUT_SERIAL": '',
"SKU_IMAGE_1": '',
"SKU_IMAGE_2": '',
"SKU_IMAGE_3": '',
"SKU_IMAGE_4": '',
"SKU_IMAGE_5": '',
"SKU_IMAGE_6": '',
"SKU_IMAGE_7": '',
"SKU_IMAGE_8": '',
"SKU_IMAGE_9": '',
"SKU_IMAGE_10": '',
"REMARKS": '',
"COLOUR": '',
"STYLE": '',
"SKU_ADS_DETAIL": '',
"RECEIVE_MIN_DAYS": '',
"<SKU User Defined-01>": '',
"<SKU User Defined-02>": '',
"<SKU User Defined-03>": '',
"<SKU User Defined-04>": '',
"<SKU User Defined-05>": '',
"<SKU User Defined-06>": '',
"<SKU User Defined-07>": '',
"<SKU User Defined-08>": '',
"<SKU User Defined-09>": '',
"<SKU User Defined-10>": '',
"DIMENSION": [
{
"SKU_LEVEL": 1,
"PACK_CODE": "PCS",
"PACK_QTY": 1,
"DEF_PACK": True,
"LENGTH": 1,
"WIDTH": 1,
"HEIGHT": 1,
"LENGTH_UOM": "CM",
"VOLUME": 1,
"VOLUME_UOM": "CBM",
"NET_WEIGHT": 1,
"GROSS_WEIGHT": 1,
"WEIGHT_UOM": "TON",
"DEF_PACK_CAL": '',
"DEF_MKP": '',
"PRICE": '',
"DISCOUNT_BY": '',
"DISCOUNT_VALUE": '',
"BUBBLE_RATIO": '',
"SUG_PACKAGE_SIZE": '',
"BARCODE": [
{
"SKU_REF_TYPE": "M",
"SKU_REF_BARCODE": "CODE123456",
"SKU_REF_CODE": '',
"GS1_EXTENSION": ''
}
]
}
],
"BOM": [
{
"SKU_CHILD_CODE": "CH123456",
"QTY": 1
}
]
}
]
}
return SKU
| 43.280637
| 73
| 0.315783
| 4,769
| 70,634
| 4.30174
| 0.088488
| 0.126005
| 0.078967
| 0.052644
| 0.800682
| 0.773142
| 0.757251
| 0.741311
| 0.739069
| 0.731172
| 0
| 0.07645
| 0.53722
| 70,634
| 1,632
| 74
| 43.280637
| 0.55115
| 0
| 0
| 0.721637
| 0
| 0
| 0.335877
| 0.024422
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009919
| false
| 0
| 0
| 0
| 0.019839
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
83ddb02b11690c6c5cc4c276e99661d45da5abea
| 134
|
py
|
Python
|
URI/1 - INICIANTE/Python/1020 - IdadesEmDias.py
|
william-james-pj/LogicaProgramacao
|
629f746e34da2e829dc7ea2e489ac36bb1b1fb13
|
[
"MIT"
] | 1
|
2020-04-14T16:48:16.000Z
|
2020-04-14T16:48:16.000Z
|
URI/1 - INICIANTE/Python/1020 - IdadesEmDias.py
|
william-james-pj/LogicaProgramacao
|
629f746e34da2e829dc7ea2e489ac36bb1b1fb13
|
[
"MIT"
] | null | null | null |
URI/1 - INICIANTE/Python/1020 - IdadesEmDias.py
|
william-james-pj/LogicaProgramacao
|
629f746e34da2e829dc7ea2e489ac36bb1b1fb13
|
[
"MIT"
] | null | null | null |
x = int(input())
print('{} ano(s)'.format(x//365))
print('{} mes(es)'.format((x%365)//30))
print('{} dia(s)'.format(((x%365)%30)//1))
| 26.8
| 42
| 0.537313
| 24
| 134
| 3
| 0.541667
| 0.291667
| 0.416667
| 0.305556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112
| 0.067164
| 134
| 5
| 42
| 26.8
| 0.464
| 0
| 0
| 0
| 0
| 0
| 0.207407
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
f7cb26ece665a60397058365d4e0ca65775c3117
| 8,712
|
py
|
Python
|
flashcard_app/views.py
|
ptyadana/django-WEB-flashcards
|
47486bc31f08fe21ae1530f1ec6f00044a2e954b
|
[
"MIT"
] | 1
|
2020-10-22T01:19:40.000Z
|
2020-10-22T01:19:40.000Z
|
flashcard_app/views.py
|
ptyadana/django-WEB-flashcards
|
47486bc31f08fe21ae1530f1ec6f00044a2e954b
|
[
"MIT"
] | 3
|
2021-06-04T22:42:45.000Z
|
2021-09-22T18:46:56.000Z
|
flashcard_app/views.py
|
ptyadana/django-WEB-flashcards
|
47486bc31f08fe21ae1530f1ec6f00044a2e954b
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from random import randint
# Create your views here.
def home(request):
"""home page"""
return render(request,'flashcard_app/home.html',{})
def addition(request):
"""addition page"""
#generate two new random numbers for quizz
new_num_1 = randint(0,9)
new_num_2 = randint(0,9)
#check the request method
if request.method == 'POST':
answer = request.POST['answer']
old_num_1 = request.POST['old_num_1']
old_num_2 = request.POST['old_num_2']
#color scheme for bootstrap alert boxes
alert_color = 'danger'
try:
answer = int(answer)
old_num_1 = int(old_num_1)
old_num_2 = int(old_num_2)
correct_answer = old_num_1 + old_num_2
#check the answer and display the result
if answer == correct_answer:
result = 'Correct! '
msg = f'{old_num_1} + {old_num_2} = {correct_answer}'
alert_color = 'success'
return render(request,'flashcard_app/addition.html',{
'alert_color':alert_color,
'msg':msg,
'result':result,
'new_num_1':new_num_1,
'new_num_2':new_num_2
})
else:
result = 'Incorrect! '
msg = f'{old_num_1} + {old_num_2} = {correct_answer}'
alert_color = 'warning'
return render(request,'flashcard_app/addition.html',{
'alert_color':alert_color,
'msg':msg,
'result':result,
'new_num_1':new_num_1,
'new_num_2':new_num_2
})
except ValueError:
msg = 'The answer is invalid. Please try again.'
return render(request,'flashcard_app/addition.html',{
'alert_color':alert_color,
'msg':msg,
'new_num_1':old_num_1,
'new_num_2':old_num_2
})
return render(request,'flashcard_app/addition.html',{
'new_num_1':new_num_1,
'new_num_2':new_num_2,
})
def subtraction(request):
"""subtraction page"""
#generate two new random numbers for quizz
new_num_1 = randint(0,9)
new_num_2 = randint(0,9)
#check the request method
if request.method == 'POST':
answer = request.POST['answer']
old_num_1 = request.POST['old_num_1']
old_num_2 = request.POST['old_num_2']
#color scheme for bootstrap alert boxes
alert_color = 'danger'
try:
answer = int(answer)
old_num_1 = int(old_num_1)
old_num_2 = int(old_num_2)
correct_answer = old_num_1 - old_num_2
#check the answer and display the result
if answer == correct_answer:
result = 'Correct! '
msg = f'{old_num_1} - {old_num_2} = {correct_answer}'
alert_color = 'success'
return render(request,'flashcard_app/subtraction.html',{
'alert_color':alert_color,
'msg':msg,
'result':result,
'new_num_1':new_num_1,
'new_num_2':new_num_2
})
else:
result = 'Incorrect! '
msg = f'{old_num_1} - {old_num_2} = {correct_answer}'
alert_color = 'warning'
return render(request,'flashcard_app/subtraction.html',{
'alert_color':alert_color,
'msg':msg,
'result':result,
'new_num_1':new_num_1,
'new_num_2':new_num_2
})
except ValueError:
msg = 'The answer is invalid. Please try again.'
return render(request,'flashcard_app/subtraction.html',{
'alert_color':alert_color,
'msg':msg,
'new_num_1':old_num_1,
'new_num_2':old_num_2
})
return render(request,'flashcard_app/subtraction.html',{
'new_num_1':new_num_1,
'new_num_2':new_num_2,
})
def multiplication(request):
"""multiplication page"""
#generate two new random numbers for quizz
new_num_1 = randint(0,9)
new_num_2 = randint(0,9)
#check the request method
if request.method == 'POST':
answer = request.POST['answer']
old_num_1 = request.POST['old_num_1']
old_num_2 = request.POST['old_num_2']
#color scheme for bootstrap alert boxes
alert_color = 'danger'
try:
answer = int(answer)
old_num_1 = int(old_num_1)
old_num_2 = int(old_num_2)
correct_answer = old_num_1 * old_num_2
#check the answer and display the result
if answer == correct_answer:
result = 'Correct! '
msg = f'{old_num_1} × {old_num_2} = {correct_answer}'
alert_color = 'success'
return render(request,'flashcard_app/multiplication.html',{
'alert_color':alert_color,
'msg':msg,
'result':result,
'new_num_1':new_num_1,
'new_num_2':new_num_2
})
else:
result = 'Incorrect! '
msg = f'{old_num_1} × {old_num_2} = {correct_answer}'
alert_color = 'warning'
return render(request,'flashcard_app/multiplication.html',{
'alert_color':alert_color,
'msg':msg,
'result':result,
'new_num_1':new_num_1,
'new_num_2':new_num_2
})
except ValueError:
msg = 'The answer is invalid. Please try again.'
return render(request,'flashcard_app/multiplication.html',{
'alert_color':alert_color,
'msg':msg,
'new_num_1':old_num_1,
'new_num_2':old_num_2
})
return render(request,'flashcard_app/multiplication.html',{
'new_num_1':new_num_1,
'new_num_2':new_num_2,
})
def division(request):
"""division page"""
#generate two new random numbers for quizz
new_num_1 = randint(0,9)
new_num_2 = randint(1,9) #to avoid division by zero
#check the request method
if request.method == 'POST':
answer = request.POST['answer']
old_num_1 = request.POST['old_num_1']
old_num_2 = request.POST['old_num_2']
#color scheme for bootstrap alert boxes
alert_color = 'danger'
try:
answer = float(answer)
old_num_1 = int(old_num_1)
old_num_2 = int(old_num_2)
#handle showing 1.0 instead of 1
if (old_num_1 % old_num_2) == 0:
correct_answer = int(old_num_1 / old_num_2)
else:
correct_answer = round(old_num_1 / old_num_2,2)
#check the answer and display the result
if answer == correct_answer:
result = 'Correct! '
msg = f'{old_num_1} ÷ {old_num_2} = {correct_answer}'
alert_color = 'success'
return render(request,'flashcard_app/division.html',{
'alert_color':alert_color,
'msg':msg,
'result':result,
'new_num_1':new_num_1,
'new_num_2':new_num_2
})
else:
result = 'Incorrect! '
msg = f'{old_num_1} ÷ {old_num_2} = {correct_answer}'
alert_color = 'warning'
return render(request,'flashcard_app/division.html',{
'alert_color':alert_color,
'msg':msg,
'result':result,
'new_num_1':new_num_1,
'new_num_2':new_num_2
})
except ValueError:
msg = 'The answer is invalid. Please try again.'
return render(request,'flashcard_app/division.html',{
'alert_color':alert_color,
'msg':msg,
'new_num_1':old_num_1,
'new_num_2':old_num_2
})
return render(request,'flashcard_app/division.html',{
'new_num_1':new_num_1,
'new_num_2':new_num_2,
})
| 33.637066
| 75
| 0.511364
| 1,009
| 8,712
| 4.088206
| 0.076313
| 0.098909
| 0.057697
| 0.067879
| 0.925576
| 0.918061
| 0.911273
| 0.897212
| 0.897212
| 0.897212
| 0
| 0.028749
| 0.389118
| 8,712
| 259
| 76
| 33.637066
| 0.745584
| 0.082989
| 0
| 0.917949
| 0
| 0
| 0.223886
| 0.061792
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0
| 0.010256
| 0
| 0.123077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79131504a1dc500ddb88266068b4ba148acd29cb
| 264
|
py
|
Python
|
allennlp_models/generation/dataset_readers/__init__.py
|
matt-peters/allennlp-models
|
cdd505ed539fdc2b82e4cc0a23eae4bfd3368e7e
|
[
"Apache-2.0"
] | 402
|
2020-03-11T22:58:35.000Z
|
2022-03-29T09:05:27.000Z
|
allennlp_models/generation/dataset_readers/__init__.py
|
matt-peters/allennlp-models
|
cdd505ed539fdc2b82e4cc0a23eae4bfd3368e7e
|
[
"Apache-2.0"
] | 116
|
2020-03-11T01:26:57.000Z
|
2022-03-25T13:03:56.000Z
|
allennlp_models/generation/dataset_readers/__init__.py
|
matt-peters/allennlp-models
|
cdd505ed539fdc2b82e4cc0a23eae4bfd3368e7e
|
[
"Apache-2.0"
] | 140
|
2020-03-11T00:51:35.000Z
|
2022-03-29T09:05:36.000Z
|
from allennlp_models.generation.dataset_readers.copynet_seq2seq import CopyNetDatasetReader
from allennlp_models.generation.dataset_readers.seq2seq import Seq2SeqDatasetReader
from allennlp_models.generation.dataset_readers.cnn_dm import CNNDailyMailDatasetReader
| 66
| 91
| 0.920455
| 29
| 264
| 8.103448
| 0.482759
| 0.153191
| 0.229787
| 0.357447
| 0.53617
| 0.53617
| 0
| 0
| 0
| 0
| 0
| 0.011905
| 0.045455
| 264
| 3
| 92
| 88
| 0.920635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7920e6bce0d3333037ef076ccdceaa89d33a857c
| 106
|
py
|
Python
|
src/test/pythonFiles/terminalExec/sample8_raw.py
|
ChaseKnowlden/vscode-jupyter
|
9bdaf87f0b6dcd717c508e9023350499a6093f97
|
[
"MIT"
] | 2,461
|
2016-01-21T16:40:43.000Z
|
2022-03-31T12:01:55.000Z
|
src/test/pythonFiles/terminalExec/sample8_raw.py
|
ChaseKnowlden/vscode-jupyter
|
9bdaf87f0b6dcd717c508e9023350499a6093f97
|
[
"MIT"
] | 12,536
|
2019-05-06T21:26:14.000Z
|
2022-03-31T23:06:48.000Z
|
src/test/pythonFiles/terminalExec/sample8_raw.py
|
vasili8m/vscode-python
|
846eee870e8b7bab38172600836faedb5fb80166
|
[
"MIT"
] | 871
|
2019-05-15T13:43:55.000Z
|
2022-03-31T03:04:35.000Z
|
if True:
print(1)
print(1)
else:
print(2)
print(2)
print(3)
| 10.6
| 16
| 0.377358
| 13
| 106
| 3.076923
| 0.538462
| 0.3
| 0.55
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096154
| 0.509434
| 106
| 9
| 17
| 11.777778
| 0.673077
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.714286
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
792961a7fb7e1df9a4412759f5a078e2e8b25cde
| 5,424
|
py
|
Python
|
tcrdist/tests/longtest_simulate_cdr3_w_olga.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 26
|
2020-12-28T17:37:01.000Z
|
2022-01-29T01:31:13.000Z
|
tcrdist/tests/longtest_simulate_cdr3_w_olga.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 31
|
2020-08-17T22:17:57.000Z
|
2022-03-18T23:47:34.000Z
|
tcrdist/tests/longtest_simulate_cdr3_w_olga.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 7
|
2020-08-18T23:55:40.000Z
|
2021-09-22T18:15:54.000Z
|
import olga.load_model as load_model
import olga.generation_probability as pgen
import olga.sequence_generation as seq_gen
import pandas as pd
def generate_simulated_beta_seqs(params_file_name = 'tcrdist/default_models/human_T_beta/model_params.txt',
marginals_file_name = 'tcrdist/default_models/human_T_beta/model_marginals.txt',
V_anchor_pos_file ='tcrdist/default_models/human_T_beta/V_gene_CDR3_anchors.csv',
J_anchor_pos_file = 'tcrdist/default_models/human_T_beta/J_gene_CDR3_anchors.csv',
output_cols = ['cdr3_b_aa', "v_b_gene",'j_b_gene'],
n = 100000):
#Load data
genomic_data = load_model.GenomicDataVDJ()
genomic_data.load_igor_genomic_data(params_file_name, V_anchor_pos_file, J_anchor_pos_file)
#Load model
generative_model = load_model.GenerativeModelVDJ()
generative_model.load_and_process_igor_model(marginals_file_name)
seq_gen_model = seq_gen.SequenceGenerationVDJ(generative_model, genomic_data)
#Generate some random sequences
vs=[x[0] for x in genomic_data.__dict__['genV']]
js=[x[0] for x in genomic_data.__dict__['genJ']]
vs = {i:k for i,k in enumerate(vs)}
js = {i:k for i,k in enumerate(js)}
sim_cdr3 = [seq_gen_model.gen_rnd_prod_CDR3()[1:4] for x in range(n)]
sim_cdr3_long = [(i,vs[v],js[j]) for i,v,j in sim_cdr3 ]
df = pd.DataFrame(sim_cdr3_long, columns = output_cols)
return df
def generate_simulated_alpha_seqs(params_file_name = 'tcrdist/default_models/human_T_alpha/model_params.txt',
marginals_file_name = 'tcrdist/default_models/human_T_alpha/model_marginals.txt',
V_anchor_pos_file ='tcrdist/default_models/human_T_alpha/V_gene_CDR3_anchors.csv',
J_anchor_pos_file = 'tcrdist/default_models/human_T_alpha/J_gene_CDR3_anchors.csv',
output_cols = ['cdr3_a_aa', "v_a_gene",'j_a_gene'],
n = 100000):
#Load data
genomic_data = load_model.GenomicDataVJ()
genomic_data.load_igor_genomic_data(params_file_name, V_anchor_pos_file, J_anchor_pos_file)
#Load model
generative_model = load_model.GenerativeModelVJ()
generative_model.load_and_process_igor_model(marginals_file_name)
seq_gen_model = seq_gen.SequenceGenerationVJ(generative_model, genomic_data)
#Generate some random sequences
vs=[x[0] for x in genomic_data.__dict__['genV']]
js=[x[0] for x in genomic_data.__dict__['genJ']]
vs = {i:k for i,k in enumerate(vs)}
js = {i:k for i,k in enumerate(js)}
sim_cdr3 = [seq_gen_model.gen_rnd_prod_CDR3()[1:4] for x in range(n)]
sim_cdr3_long = [(i,vs[v],js[j]) for i,v,j in sim_cdr3 ]
df = pd.DataFrame(sim_cdr3_long, columns = output_cols)
return df
if __name__ == "__main__":
"""
Using Olga See:
---------------
Zachary Sethna, Yuval Elhanati, Curtis G Callan, Aleksandra M Walczak, Thierry Mora
`Bioinformatics (2019) <https://doi.org/10.1093/bioinformatics/btz035>`_
OLGA: fast computation of generation probabilities of B- and T-cell receptor amino acid sequences and motifs
Generate 1000K (1M) CDR3s using default Olga Models
Human (Alpha/Beta) and Mouse (Beta)
human_T_alpha_sim1000K.csv
human_T_beta_sim1000K.csv
mouse_T_beta_sim1000K.csv
contained in:
olga_T_alpha_beta_1000K_simulated_cdr3.zip
"""
dfb= generate_simulated_beta_seqs(params_file_name = 'tcrdist/default_models/human_T_beta/model_params.txt',
marginals_file_name = 'tcrdist/default_models/human_T_beta/model_marginals.txt',
V_anchor_pos_file ='tcrdist/default_models/human_T_beta/V_gene_CDR3_anchors.csv',
J_anchor_pos_file = 'tcrdist/default_models/human_T_beta/J_gene_CDR3_anchors.csv',
output_cols = ['cdr3_b_aa', "v_b_gene",'j_b_gene'], n = 1000000)
dfb.to_csv('human_T_beta_sim1000K.csv', index = False)
dfa = generate_simulated_alpha_seqs(params_file_name = 'tcrdist/default_models/human_T_alpha/model_params.txt',
marginals_file_name = 'tcrdist/default_models/human_T_alpha/model_marginals.txt',
V_anchor_pos_file ='tcrdist/default_models/human_T_alpha/V_gene_CDR3_anchors.csv',
J_anchor_pos_file = 'tcrdist/default_models/human_T_alpha/J_gene_CDR3_anchors.csv',
output_cols = ['cdr3_a_aa', "v_a_gene",'j_a_gene'],
n = 1000000)
dfa.to_csv('human_T_alpha_sim1000K.csv', index = False)
dfb= generate_simulated_beta_seqs(params_file_name = 'tcrdist/default_models/mouse_T_beta/model_params.txt',
marginals_file_name = 'tcrdist/default_models/mouse_T_beta/model_marginals.txt',
V_anchor_pos_file ='tcrdist/default_models/mouse_T_beta/V_gene_CDR3_anchors.csv',
J_anchor_pos_file = 'tcrdist/default_models/mouse_T_beta/J_gene_CDR3_anchors.csv',
output_cols = ['cdr3_b_aa', "v_b_gene",'j_b_gene'], n = 1000000)
dfb.to_csv('mouse_T_beta_sim1000K.csv', index = False)
| 53.176471
| 115
| 0.671645
| 767
| 5,424
| 4.305085
| 0.166884
| 0.084797
| 0.121139
| 0.121139
| 0.816778
| 0.804361
| 0.771956
| 0.771956
| 0.771956
| 0.745306
| 0
| 0.027496
| 0.235619
| 5,424
| 101
| 116
| 53.70297
| 0.768934
| 0.018068
| 0
| 0.612903
| 1
| 0
| 0.287286
| 0.255765
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.064516
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7179491275351836a0592d841bc9b9fe7d43c7d
| 151
|
py
|
Python
|
src/GracefulKiller/__init__.py
|
MaxMaxoff/GracefulKiller
|
dab06ecc7573211ae7acf90e5f889e37d48a88d2
|
[
"MIT"
] | 1
|
2021-10-04T09:09:12.000Z
|
2021-10-04T09:09:12.000Z
|
src/GracefulKiller/__init__.py
|
MaxMaxoff/GracefulKiller
|
dab06ecc7573211ae7acf90e5f889e37d48a88d2
|
[
"MIT"
] | null | null | null |
src/GracefulKiller/__init__.py
|
MaxMaxoff/GracefulKiller
|
dab06ecc7573211ae7acf90e5f889e37d48a88d2
|
[
"MIT"
] | null | null | null |
try:
from GracefulKiller.GracefulKiller import GracefulKiller, Loop
except:
from src.GracefulKiller.GracefulKiller import GracefulKiller, Loop
| 30.2
| 70
| 0.821192
| 15
| 151
| 8.266667
| 0.466667
| 0.451613
| 0.548387
| 0.774194
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13245
| 151
| 4
| 71
| 37.75
| 0.946565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
f7204650310a54c5dad514d4271f2b18cd7ca4c9
| 43,074
|
py
|
Python
|
pruning/prune_resnet_tools.py
|
18463105800/ssd.pruning.pytorch
|
39592ee00e02f28742028a97592beec18d07258c
|
[
"MIT"
] | 13
|
2019-11-15T16:18:55.000Z
|
2022-03-23T06:04:49.000Z
|
pruning/prune_resnet_tools.py
|
XUHUAKing/ssd.pruning.pytorch
|
39592ee00e02f28742028a97592beec18d07258c
|
[
"MIT"
] | null | null | null |
pruning/prune_resnet_tools.py
|
XUHUAKing/ssd.pruning.pytorch
|
39592ee00e02f28742028a97592beec18d07258c
|
[
"MIT"
] | 3
|
2019-11-27T07:27:38.000Z
|
2020-10-21T08:46:21.000Z
|
'''
This file contains functions for pruning resnet-like model in layer level
1. prune_resconv_layer (resnet: conv layers)
2. prune_resnet_lconv_layer (resnet: lconv means identity layer)
3. prune_rbconv_by_indices (resnet: rbconv means right path's bottom layer)
4. prune_rbconv_by_number (resnet: used when you prune lconv but next block/layer cannot absorb your effect)
5. prune_ruconv1_layer (resnet: for resnet normal conv1 layers (i.e. right path's first upper layers))
6. prune_ruconv2_layer (resnet: for resnet normal conv2 layers (i.e. right path's second upper layers))
Author: xuhuahuang as intern in YouTu 07/2018
'''
import torch
from torch.autograd import Variable
from torchvision import models
import cv2
cv2.setNumThreads(0) # pytorch issue 1355: possible deadlock in DataLoader
# OpenCL may be enabled by default in OpenCV3;
# disable it because it because it's not thread safe and causes unwanted GPU memory allocations
cv2.ocl.setUseOpenCL(False)
import sys
import numpy as np
from models.resnet import BasicBlock, Bottleneck
def replace_layers(model, i, indexes, layers):
if i in indexes:
# layers and indexes store new layers used to update old layers
return layers[indexes.index(i)]
# if i not in indexes, use old layers
return model[i]
# helper function
'''
Helper function for updating immediate following layer/block's input channels
Args:
model: model after pruning current layer/block
layer_index: current layer index. Locate the block/layer being pruned filters NOW
filters_to_prune: the output channels indices being pruned
**Note**
Not handle case described by prune_rbconv_by_number()
Not handle case inside prune_ruconv1_layer() and prune_ruconv2_layer() because they are inside same block
'''
def update_next_layers(model, layer_index, filters_to_prune):
# only need to change in_channels for all following objects based on filters_to_prune
next_conv = None
next_blk = None
next_ds = None # if next one is a block, and this block has downsample path, you need to update both residual and downsample path
offset = 1
# search for the next conv, based on current conv with id = (layer_index, filter_index)
while layer_index + offset < len(model.base._modules.items()):
res = list(model.base._modules.items())[layer_index+offset] # name, module
if isinstance(res[1], torch.nn.modules.conv.Conv2d):
next_name, next_conv = res
next_is_block = False
break
elif isinstance(res[1], (BasicBlock, Bottleneck)):
next_is_block = True
next_blk = res[1]
if res[1].downsample is None:
next_conv = res[1].conv1
next_ds = None
else:
next_conv = res[1].conv1
next_ds = res[1].downsample
break
offset = offset + 1
if next_conv is None:
print("No filter will be prunned for this layer (last layer)")
return model
if len(filters_to_prune) == 0:
print("No filter will be prunned for this layer")
return model
cut = len(filters_to_prune)
# next_conv must exists
next_new_conv = \
torch.nn.Conv2d(in_channels = next_conv.in_channels - cut,\
out_channels = next_conv.out_channels, \
kernel_size = next_conv.kernel_size, \
stride = next_conv.stride,
padding = next_conv.padding,
dilation = next_conv.dilation,
groups = next_conv.groups,
bias = next_conv.bias is not None)
old_weights = next_conv.weight.data.cpu().numpy()
new_weights = next_new_conv.weight.data.cpu().numpy()
new_weights = np.delete(old_weights, filters_to_prune, axis = 1)
next_new_conv.weight.data = torch.from_numpy(new_weights).cuda()
if next_conv.bias is not None:
next_new_conv.bias.data = next_conv.bias.data
# next_ds exists or not is okay, no matter next_is_block is True or not
if next_ds is not None:
old_conv_in_next_ds = next_ds[0]
new_conv_in_next_new_ds = \
torch.nn.Conv2d(in_channels = old_conv_in_next_ds.in_channels - cut,\
out_channels = old_conv_in_next_ds.out_channels, \
kernel_size = old_conv_in_next_ds.kernel_size, \
stride = old_conv_in_next_ds.stride,
padding = old_conv_in_next_ds.padding,
dilation = old_conv_in_next_ds.dilation,
groups = old_conv_in_next_ds.groups,
bias = old_conv_in_next_ds.bias is not None)
old_weights = old_conv_in_next_ds.weight.data.cpu().numpy()
new_weights = new_conv_in_next_new_ds.weight.data.cpu().numpy()
new_weights = np.delete(old_weights, filters_to_prune, axis = 1)
new_conv_in_next_new_ds.weight.data = torch.from_numpy(new_weights).cuda()
if old_conv_in_next_ds.bias is not None:
new_conv_in_next_new_ds.bias.data = old_conv_in_next_ds.bias.data # bias won't change
next_new_ds = torch.nn.Sequential(new_conv_in_next_new_ds, next_ds[1]) # BN keeps unchanged
else:
next_new_ds = None
# next_new_ds and next_new_conv are ready now, create a next_new_block for replace_layers()
if next_is_block: #same as next_blk is not None:
if isinstance(next_blk, BasicBlock):
# rely on conv1 of old block to get in_planes, out_planes, tride
next_new_block = BasicBlock(next_blk.conv1.in_channels - cut, \
next_blk.conv1.out_channels, next_blk.stride, downsample = next_new_ds)
next_new_block.conv1 = next_new_conv # only update in_channels
next_new_block.bn1 = next_blk.bn1
next_new_block.relu = next_blk.relu
next_new_block.conv2 = next_blk.conv2
next_new_block.bn2 = next_blk.bn2
else:
next_new_block = Bottleneck(next_blk.conv1.in_channels - cut, \
next_blk.conv1.out_channels, next_blk.stride, downsample = next_new_ds)
next_new_block.conv1 = next_new_conv # only update in_channels
next_new_block.bn1 = next_blk.bn1
next_new_block.conv2 = next_blk.conv2
next_new_block.bn2 = next_blk.bn2
next_new_block.conv3 = next_blk.conv3
next_new_block.bn3 = next_blk.bn3
next_new_block.relu = next_blk.relu
if not next_is_block:
base = torch.nn.Sequential(
*(replace_layers(model.base, i, [layer_index+offset], \
[next_new_conv]) for i, _ in enumerate(model.base)))
else:
base = torch.nn.Sequential(
*(replace_layers(model.base, i, [layer_index+offset], \
[next_new_block]) for i, _ in enumerate(model.base)))
del model.base # delete and replace with brand new one
model.base = base
print("Finished update next layers.")
return model
'''
--------------------------------------------------------------------------------
1. Prune conv layers in resnet with/without BN (only support layers stored in model.base for now)
Args:
model: model for pruning
layer_index: index the pruned layer's location within model
cut_ratio: the ratio of filters you want to prune from this layer (e.g. 20% - cut 20% lowest weights layers)
Adapted from: https://github.com/jacobgil/pytorch-pruning
'''
def prune_resconv_layer(model, layer_index, cut_ratio=0.2, use_bn = True):
_, conv = list(model.base._modules.items())[layer_index]
if use_bn:
_, old_bn = list(model.base._modules.items())[layer_index + 1]
next_conv = None
offset = 1
# search for the next conv, based on current conv with id = (layer_index, filter_index)
while layer_index + offset < len(model.base._modules.items()):
res = list(model.base._modules.items())[layer_index+offset] # name, module
if isinstance(res[1], torch.nn.modules.conv.Conv2d):
next_name, next_conv = res
break
elif isinstance(res[1], (BasicBlock, Bottleneck)):
next_conv = res[1].conv1
break
offset = offset + 1
if next_conv is None:
print("No filter will be prunned for this layer (last layer)")
return model
num_filters = conv.weight.data.size(0) # out_channels x in_channels x 3 x 3
# skip the layer with only one filter left
if num_filters <= 1:
print("No filter will be prunned for this layer (num_filters<=1)")
return model
cut = int(cut_ratio * num_filters)
if cut < 1:
print("No filter will be prunned for this layer (cut<1)")
return model
if (num_filters - cut) < 1:
print("No filter will be prunned for this layer (no filter left after cutting)")
return model
# rank the filters within this layer and store into filter_ranks
abs_wgt = torch.abs(conv.weight.data)
values = \
torch.sum(abs_wgt, dim = 1, keepdim = True).\
sum(dim=2, keepdim = True).sum(dim=3, keepdim = True)[:, 0, 0, 0]# .data
# Normalize the sum of weight by the filter dimensions in x 3 x 3
values = values / (abs_wgt.size(1) * abs_wgt.size(2) * abs_wgt.size(3)) # (filter_number for this layer, 1)
print("Ranking filters.. ")
filters_to_prune = np.argsort(values.cpu().numpy())[:cut] # order from smallest to largest
print("Filters that will be prunned", filters_to_prune)
print("Pruning filters.. ")
# the updated conv for current conv, with cut output channels being pruned
new_conv = \
torch.nn.Conv2d(in_channels = conv.in_channels, \
out_channels = conv.out_channels - cut,
kernel_size = conv.kernel_size, \
stride = conv.stride,
padding = conv.padding,
dilation = conv.dilation,
groups = conv.groups,
bias = conv.bias is not None) #(out_channels)
old_weights = conv.weight.data.cpu().numpy() # (out_channels, in_channels, kernel_size[0], kernel_size[1]
new_weights = new_conv.weight.data.cpu().numpy()
# skip that filter's weight inside old_weights and store others into new_weights
new_weights = np.delete(old_weights, filters_to_prune, axis = 0)
new_conv.weight.data = torch.from_numpy(new_weights).cuda()
if conv.bias is not None: # no bias for conv layers
bias_numpy = conv.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - cut), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune, axis = None)
new_conv.bias.data = torch.from_numpy(bias).cuda()
# BatchNorm modification
# TODO: Extract this function outside as a separate func.
if use_bn:
new_bn = torch.nn.BatchNorm2d(num_features=new_conv.out_channels, \
eps=old_bn.eps, momentum=old_bn.momentum, affine=old_bn.affine)
# old_bn.affine == True, need to copy learning gamma and beta to new_bn
# gamma: size = (num_features)
old_weights = old_bn.weight.data.cpu().numpy()
new_weights = new_bn.weight.data.cpu().numpy()
new_weights = np.delete(old_weights, filters_to_prune)
new_bn.weight.data = torch.from_numpy(new_weights).cuda()
# beta: size = (num_features)
bias_numpy = old_bn.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - cut), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune)
new_bn.bias.data = torch.from_numpy(bias).cuda()
if use_bn:
# BatchNorm modification
base = torch.nn.Sequential(
*(replace_layers(model.base, i, [layer_index, layer_index+1], \
[new_conv, new_bn]) for i, _ in enumerate(model.base)))
del old_bn
else:
# replace current layer and next_conv with new_conv and next_new_conv respectively
base = torch.nn.Sequential(
*(replace_layers(model.base, i, [layer_index], \
[new_conv]) for i, _ in enumerate(model.base)))
del model.base # delete and replace with brand new one
del conv
model.base = base # update current layer
model = update_next_layers(model, layer_index, filters_to_prune) # update following layers
message = str(100*float(cut) / num_filters) + "%"
print("Filters prunned", str(message))
return model
'''
--------------------------------------------------------------------------------
2. Prune identity conv layers without/with BN in a resnet block
(*Note: NOT used for normal layer, the 'layer' here must locate inside a block indexed by block_index)
Args:
block_index: a block also named as a 'layer' in torchvision implementation, locate lconv layer
*Note:
The index criteria based on 'one single block' unit, which means 1 index represents 1 BasicBlock/Bottleneck, instead of one layer (3-6 blocks)
Return:
cut_indices: the filters_to_prune in this layer, will be used in function 5.
'''
def prune_resnet_lconv_layer(model, block_index, cut_ratio=0.2, use_bn = True):
_, blk = list(model.base._modules.items())[block_index]
cut_indices = None
if not use_bn:
print("ResNet without BN is not supported for prunning")
return cut_indices, model
# check whether the left path has conv layer for prunning
if blk.downsample == None:
print("No filters will be prunned because lconv doesn't exist")
return cut_indices, model
if not isinstance(blk, (BasicBlock, Bottleneck)):
print("Only support for ResNet with BasicBlock or Bottleneck defined in torchvision")
return cut_indices, model
# get old conv and bn on the left
lconv = blk.downsample[0] # nn.Sequential for (lconv, lbn)
lbn = blk.downsample[1]
next_conv = None
offset = 1
# search for the next conv, can be conv1 within next block, or a normal conv layer
while block_index + offset < len(model.base._modules.items()):
res = list(model.base._modules.items())[block_index+offset] # name, module
if isinstance(res[1], torch.nn.modules.conv.Conv2d):
next_name, next_conv = res
break
elif isinstance(res[1], (BasicBlock, Bottleneck)):
next_conv = res[1].conv1
break
offset = offset + 1
if next_conv is None:
print("No filters will be prunned because this is the last block")
return cut_indices, model
num_filters = lconv.weight.data.size(0) # out_channels x in_channels x 3 x 3
# skip the layer with only one filter left
if num_filters <= 1:
print("No filter will be prunned for this layer (num_filters<=1)")
return cut_indices, model
cut = int(cut_ratio * num_filters)
if cut < 1:
print("No filter will be prunned for this layer (cut<1)")
return cut_indices, model
if (num_filters - cut) < 1:
print("No filter will be prunned for this layer (no filter left after cutting)")
return cut_indices, model
# rank the filters within this layer and store into filter_ranks
abs_wgt = torch.abs(lconv.weight.data)
values = \
torch.sum(abs_wgt, dim = 1, keepdim = True).\
sum(dim=2, keepdim = True).sum(dim=3, keepdim = True)[:, 0, 0, 0]# .data
# Normalize the sum of weight by the filter dimensions in x 3 x 3
values = values / (abs_wgt.size(1) * abs_wgt.size(2) * abs_wgt.size(3)) # (filter_number for this layer, 1)
print("Ranking filters.. ")
filters_to_prune = np.argsort(values.cpu().numpy())[:cut] # order from smallest to largest
print("Filters that will be prunned", filters_to_prune)
print("Pruning filters.. ")
# the updated conv for old lconv, with cut output channels being pruned
new_conv = \
torch.nn.Conv2d(in_channels = lconv.in_channels, \
out_channels = lconv.out_channels - cut,
kernel_size = lconv.kernel_size, \
stride = lconv.stride,
padding = lconv.padding,
dilation = lconv.dilation,
groups = lconv.groups,
bias = lconv.bias is not None) #(out_channels)
old_weights = lconv.weight.data.cpu().numpy() # (out_channels, in_channels, kernel_size[0], kernel_size[1]
new_weights = new_conv.weight.data.cpu().numpy()
# skip that filter's weight inside old_weights and store others into new_weights
new_weights = np.delete(old_weights, filters_to_prune, axis = 0)
new_conv.weight.data = torch.from_numpy(new_weights).cuda()
if lconv.bias is not None:
bias_numpy = lconv.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - cut), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune, axis = None)
new_conv.bias.data = torch.from_numpy(bias).cuda()
# new BN layer after new_conv
new_bn = torch.nn.BatchNorm2d(num_features=new_conv.out_channels, \
eps=lbn.eps, momentum=lbn.momentum, affine=lbn.affine)
# old_bn.affine == True, need to copy learnable gamma and beta to new_bn
# gamma: size = (num_features)
old_weights = lbn.weight.data.cpu().numpy()
new_weights = new_bn.weight.data.cpu().numpy()
new_weights = np.delete(old_weights, filters_to_prune)
new_bn.weight.data = torch.from_numpy(new_weights).cuda()
# beta: size = (num_features)
bias_numpy = lbn.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - cut), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune)
new_bn.bias.data = torch.from_numpy(bias).cuda()
# replace
# update current left conv + left BN layer, have BN by default
new_ds = torch.nn.Sequential(
*(replace_layers(blk.downsample, i, [0, 1], \
[new_conv, new_bn]) for i, _ in enumerate(blk.downsample)))
# delete current and replace with a brand new BLOCK
if isinstance(blk, BasicBlock):
# rely on conv1 of old block to get in_planes, out_planes, tride
new_blk = BasicBlock(blk.conv1.in_channels, blk.conv1.out_channels, \
blk.stride, downsample = new_ds)
# keep all layers in residual path unchanged tempararily
new_blk.conv1 = blk.conv1
new_blk.bn1 = blk.bn1
new_blk.relu = blk.relu
new_blk.conv2 = blk.conv2
new_blk.bn2 = blk.bn2
else:
new_blk = Bottleneck(blk.conv1.in_channels, blk.conv1.out_channels, \
blk.stride, downsample = new_ds)
# keep all layers in residual path unchanged tempararily
new_blk.conv1 = blk.conv1
new_blk.bn1 = blk.bn1
new_blk.conv2 = blk.conv2
new_blk.bn2 = blk.bn2
new_blk.conv3 = blk.conv3
new_blk.bn3 = blk.bn3
new_blk.relu = blk.relu
# now new_blk is ready, it can act as a layer and replace old blk with replace_layers()
base = torch.nn.Sequential(
*(replace_layers(model.base, i, [block_index], \
[new_blk]) for i, _ in enumerate(model.base)))
# delete and replace with brand new one
del model.base # delete the things pointed by pointer
del blk
model.base = base # update current layer
model = update_next_layers(model, block_index, filters_to_prune) # update following layers
cut_indices = filters_to_prune
message = str(100*float(cut) / num_filters) + "%"
print("Filters prunned", str(message))
return cut_indices, model
'''
--------------------------------------------------------------------------------
3. Prune residual conv layer, the one at the bottom of residual side with/without BN
(*Note: MUST call this after you prune identity path with downsample, the size won't fit because upper functions only update left path)
Args:
block_index: the BasicBlock or Bottleneck Block this layer locates
filters_to_prune: the filters' indices waiting for being pruned
use_bn: use Batch Norm or not
'''
def prune_rbconv_by_indices(model, block_index, filters_to_prune, use_bn = True):
_, blk = list(model.base._modules.items())[block_index]
if not use_bn:
print("ResNet without BN is not supported for prunning")
return model
# check whether the left path has conv layer for prunning
if blk.downsample == None:
print("Only support pruning for rbconv after lconv was pruned")
return model
if not isinstance(blk, (BasicBlock, Bottleneck)):
print("Only support for ResNet with BasicBlock or Bottleneck defined in torchvision")
return model
if isinstance(blk, BasicBlock):
# when it is BasicBlock, the rbconv is conv2, and its bn is bn2
conv = blk.conv2
bn = blk.bn2
else:
# when it is Bottleneck, the rbconv is conv3, and its bn is bn3
conv = blk.conv3
bn = blk.bn3
# only need to update itself, no need to care about others such as next_ds/next_conv
new_conv = \
torch.nn.Conv2d(in_channels = conv.in_channels, \
out_channels = conv.out_channels - len(filters_to_prune),
kernel_size = conv.kernel_size, \
stride = conv.stride,
padding = conv.padding,
dilation = conv.dilation,
groups = conv.groups,
bias = conv.bias is not None) #(out_channels)
old_weights = conv.weight.data.cpu().numpy() # (out_channels, in_channels, kernel_size[0], kernel_size[1]
new_weights = new_conv.weight.data.cpu().numpy()
# skip that filter's weight inside old_weights and store others into new_weights
new_weights = np.delete(old_weights, filters_to_prune, axis = 0)
new_conv.weight.data = torch.from_numpy(new_weights).cuda()
if conv.bias is not None:
bias_numpy = conv.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - len(filters_to_prune)), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune, axis = None)
new_conv.bias.data = torch.from_numpy(bias).cuda()
# new BN layer after new_conv
new_bn = torch.nn.BatchNorm2d(num_features=new_conv.out_channels, \
eps=bn.eps, momentum=bn.momentum, affine=bn.affine)
# old_bn.affine == True, need to copy learnable gamma and beta to new_bn
# gamma: size = (num_features)
old_weights = bn.weight.data.cpu().numpy()
new_weights = new_bn.weight.data.cpu().numpy()
new_weights = np.delete(old_weights, filters_to_prune)
new_bn.weight.data = torch.from_numpy(new_weights).cuda()
# beta: size = (num_features)
bias_numpy = bn.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - len(filters_to_prune)), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune)
new_bn.bias.data = torch.from_numpy(bias).cuda()
if isinstance(blk, BasicBlock):
# replace with new block
new_blk = BasicBlock(blk.conv1.in_channels, blk.conv1.out_channels, \
blk.stride, downsample = blk.downsample)
# keep all layers in residual path unchanged tempararily
new_blk.conv1 = blk.conv1
new_blk.bn1 = blk.bn1
new_blk.relu = blk.relu
new_blk.conv2 = new_conv # update with new conv
new_blk.bn2 = new_bn # update with new bn
else:
# replace with new block
new_blk = Bottleneck(blk.conv1.in_channels, blk.conv1.out_channels, \
blk.stride, downsample = blk.downsample)
# keep all layers in residual path unchanged tempararily
new_blk.conv1 = blk.conv1
new_blk.bn1 = blk.bn1
new_blk.conv2 = blk.conv2
new_blk.bn2 = blk.bn2
new_blk.conv3 = new_conv
new_blk.bn3 = new_bn
new_blk.relu = blk.relu
base = torch.nn.Sequential(
*(replace_layers(model.base, i, [block_index], \
[new_blk]) for i, _ in enumerate(model.base)))
# delete and replace
del model.base
model.base = base
print("Filters prunned for rb layer:", filters_to_prune)
return model
'''
--------------------------------------------------------------------------------
4. Prune residual conv layer, the one at the bottom of residual side with/without BN, based on its own weights
(*Note: MUST call this when you prune lconv layer,
the immediate following block/conv cannot absorb your effect due to its empty left path)
Args:
block_index: the BasicBlock or Bottleneck Block this layer locates
num_cut: the number of filters waiting for being pruned
use_bn: use Batch Norm or not
'''
def prune_rbconv_by_number(model, block_index, num_cut, use_bn = True):
_, blk = list(model.base._modules.items())[block_index]
if not use_bn:
print("ResNet without BN is not supported for prunning")
return model
if not isinstance(blk, (BasicBlock, Bottleneck)):
print("Only support for ResNet with BasicBlock or Bottleneck defined in torchvision")
return model
if isinstance(blk, BasicBlock):
# when it is BasicBlock, the rbconv is conv2, and its bn is bn2
conv = blk.conv2
bn = blk.bn2
else:
# when it is Bottleneck, the rbconv is conv3, and its bn is bn3
conv = blk.conv3
bn = blk.bn3
num_filters = conv.weight.data.size(0) # out_channels x in_channels x 3 x 3
# skip the layer with only one filter left
if num_filters <= 1:
print("No filter will be prunned for this layer (num_filters<=1)")
return model
if num_cut < 1:
print("Error: No filter will be prunned for this layer (cut<1)")
return model
if (num_filters - num_cut) < 1:
print("Error: No filter will be prunned for this layer (no filter left after cutting)")
return model
# rank the filters within this layer and store into filter_ranks
abs_wgt = torch.abs(conv.weight.data)
values = \
torch.sum(abs_wgt, dim = 1, keepdim = True).\
sum(dim=2, keepdim = True).sum(dim=3, keepdim = True)[:, 0, 0, 0]# .data
# Normalize the sum of weight by the filter dimensions in x 3 x 3
values = values / (abs_wgt.size(1) * abs_wgt.size(2) * abs_wgt.size(3)) # (filter_number for this layer, 1)
print("Ranking filters.. ")
filters_to_prune = np.argsort(values.cpu().numpy())[:num_cut] # order from smallest to largest
print("Filters that will be prunned", filters_to_prune)
print("Pruning filters.. ")
# only need to update itself, no need to care about others such as next_ds/next_conv
new_conv = \
torch.nn.Conv2d(in_channels = conv.in_channels, \
out_channels = conv.out_channels - num_cut,
kernel_size = conv.kernel_size, \
stride = conv.stride,
padding = conv.padding,
dilation = conv.dilation,
groups = conv.groups,
bias = conv.bias is not None) #(out_channels)
old_weights = conv.weight.data.cpu().numpy() # (out_channels, in_channels, kernel_size[0], kernel_size[1]
new_weights = new_conv.weight.data.cpu().numpy()
# skip that filter's weight inside old_weights and store others into new_weights
new_weights = np.delete(old_weights, filters_to_prune, axis = 0)
new_conv.weight.data = torch.from_numpy(new_weights).cuda()
if conv.bias is not None:
bias_numpy = conv.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - num_cut), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune, axis = None)
new_conv.bias.data = torch.from_numpy(bias).cuda()
# new BN layer after new_conv
new_bn = torch.nn.BatchNorm2d(num_features=new_conv.out_channels, \
eps=bn.eps, momentum=bn.momentum, affine=bn.affine)
# old_bn.affine == True, need to copy learnable gamma and beta to new_bn
# gamma: size = (num_features)
old_weights = bn.weight.data.cpu().numpy()
new_weights = new_bn.weight.data.cpu().numpy()
new_weights = np.delete(old_weights, filters_to_prune)
new_bn.weight.data = torch.from_numpy(new_weights).cuda()
# beta: size = (num_features)
bias_numpy = bn.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - num_cut), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune)
new_bn.bias.data = torch.from_numpy(bias).cuda()
if isinstance(blk, BasicBlock):
# replace with new block
new_blk = BasicBlock(blk.conv1.in_channels, blk.conv1.out_channels, \
blk.stride, downsample = blk.downsample)
# keep all layers in residual path unchanged tempararily
new_blk.conv1 = blk.conv1
new_blk.bn1 = blk.bn1
new_blk.relu = blk.relu
new_blk.conv2 = new_conv # update with new conv
new_blk.bn2 = new_bn # update with new bn
else:
# replace with new block
new_blk = Bottleneck(blk.conv1.in_channels, blk.conv1.out_channels, \
blk.stride, downsample = blk.downsample)
# keep all layers in residual path unchanged tempararily
new_blk.conv1 = blk.conv1
new_blk.bn1 = blk.bn1
new_blk.conv2 = blk.conv2
new_blk.bn2 = blk.bn2
new_blk.conv3 = new_conv
new_blk.bn3 = new_bn
new_blk.relu = blk.relu
base = torch.nn.Sequential(
*(replace_layers(model.base, i, [block_index], \
[new_blk]) for i, _ in enumerate(model.base)))
# delete and replace
del model.base
del blk
model.base = base
model = update_next_layers(model, block_index, filters_to_prune) # update following layers
print("Filters prunned for rb layer:", filters_to_prune)
return model
'''
--------------------------------------------------------------------------------
5. Prune normal residual conv layer, the FRIST one at the upper of residual side with/without BN
Args:
block_index: the BasicBlock or Bottleneck Block this layer locates
cut_ratio: the ratio of filters pruned from conv1 (and conv2 if Bottleneck)
use_bn: use Batch Norm or not
'''
def prune_ruconv1_layer(model, block_index, cut_ratio=0.2, use_bn = True):
_, blk = list(model.base._modules.items())[block_index]
if not use_bn:
print("ResNet without BN is not supported for prunning")
return model
if not isinstance(blk, (BasicBlock, Bottleneck)):
print("Conv1 only for ResNet with BasicBlock or Bottleneck defined in torchvision")
return model
# cut conv1, and next conv is conv2
conv = blk.conv1
bn = blk.bn1
next_conv = blk.conv2
num_filters = conv.weight.data.size(0) # out_channels x in_channels x 3 x 3
# skip the layer with only one filter left
if num_filters <= 1:
print("No filter will be prunned for this layer (num_filters<=1)")
return model
cut = int(cut_ratio * num_filters)
if cut < 1:
print("No filter will be prunned for this layer (cut<1)")
return model
if (num_filters - cut) < 1:
print("No filter will be prunned for this layer (no filter left after cutting)")
return model
# rank the filters within this layer and store into filter_ranks
abs_wgt = torch.abs(conv.weight.data)
values = \
torch.sum(abs_wgt, dim = 1, keepdim = True).\
sum(dim=2, keepdim = True).sum(dim=3, keepdim = True)[:, 0, 0, 0]# .data
# Normalize the sum of weight by the filter dimensions in x 3 x 3
values = values / (abs_wgt.size(1) * abs_wgt.size(2) * abs_wgt.size(3)) # (filter_number for this layer, 1)
print("Ranking filters.. ")
filters_to_prune = np.argsort(values.cpu().numpy())[:cut] # order from smallest to largest
print("Filters that will be prunned", filters_to_prune)
print("Pruning filters.. ")
# the updated conv for current conv, with cut output channels being pruned
new_conv = \
torch.nn.Conv2d(in_channels = conv.in_channels, \
out_channels = conv.out_channels - cut,
kernel_size = conv.kernel_size, \
stride = conv.stride,
padding = conv.padding,
dilation = conv.dilation,
groups = conv.groups,
bias = conv.bias is not None) #(out_channels)
old_weights = conv.weight.data.cpu().numpy() # (out_channels, in_channels, kernel_size[0], kernel_size[1]
new_weights = new_conv.weight.data.cpu().numpy()
# skip that filter's weight inside old_weights and store others into new_weights
new_weights = np.delete(old_weights, filters_to_prune, axis = 0)
new_conv.weight.data = torch.from_numpy(new_weights).cuda()
if conv.bias is not None:
bias_numpy = conv.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - cut), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune, axis = None)
new_conv.bias.data = torch.from_numpy(bias).cuda() # new conv1
# BatchNorm layer
new_bn = torch.nn.BatchNorm2d(num_features=new_conv.out_channels, \
eps=bn.eps, momentum=bn.momentum, affine=bn.affine)
# gamma: size = (num_features)
old_weights = bn.weight.data.cpu().numpy()
new_weights = bn.weight.data.cpu().numpy()
new_weights = np.delete(old_weights, filters_to_prune)
new_bn.weight.data = torch.from_numpy(new_weights).cuda()
# beta: size = (num_features)
bias_numpy = bn.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - cut), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune)
new_bn.bias.data = torch.from_numpy(bias).cuda() # new bn1
# new conv for next_conv
next_new_conv = \
torch.nn.Conv2d(in_channels = next_conv.in_channels - cut,\
out_channels = next_conv.out_channels, \
kernel_size = next_conv.kernel_size, \
stride = next_conv.stride,
padding = next_conv.padding,
dilation = next_conv.dilation,
groups = next_conv.groups,
bias = next_conv.bias is not None)
old_weights = next_conv.weight.data.cpu().numpy()
new_weights = next_new_conv.weight.data.cpu().numpy()
new_weights = np.delete(old_weights, filters_to_prune, axis = 1)
next_new_conv.weight.data = torch.from_numpy(new_weights).cuda()
if next_conv.bias is not None:
next_new_conv.bias.data = next_conv.bias.data # new conv2
# replace with new block
if isinstance(blk, BasicBlock):
new_blk = BasicBlock(blk.conv1.in_channels, blk.conv1.out_channels, \
blk.stride, downsample = blk.downsample)
# keep all layers in residual path unchanged tempararily
new_blk.conv1 = new_conv
new_blk.bn1 = new_bn
new_blk.relu = blk.relu
new_blk.conv2 = next_new_conv # update with new conv
new_blk.bn2 = blk.bn2 # update with new bn
else:
new_blk = Bottleneck(blk.conv1.in_channels, blk.conv1.out_channels, \
blk.stride, downsample = blk.downsample)
# keep all layers in residual path unchanged tempararily
new_blk.conv1 = new_conv
new_blk.bn1 = new_bn
new_blk.conv2 = next_new_conv
new_blk.bn2 = blk.bn2
new_blk.conv3 = blk.conv3
new_blk.bn3 = blk.bn3
new_blk.relu = blk.relu
base = torch.nn.Sequential(
*(replace_layers(model.base, i, [block_index], \
[new_blk]) for i, _ in enumerate(model.base)))
# delete and replace
del model.base
model.base = base
print("Filters prunned:", filters_to_prune)
return model
'''
--------------------------------------------------------------------------------
6. Prune normal residual conv layer, the SECOND one at the upper of residual side with/without BN
(*for Bottleneck only)
Args:
block_index: the BasicBlock or Bottleneck Block this layer locates
cut_ratio: the ratio of filters pruned from conv1 (and conv2 if Bottleneck)
use_bn: use Batch Norm or not
'''
def prune_ruconv2_layer(model, block_index, cut_ratio=0.2, use_bn = True):
_, blk = list(model.base._modules.items())[block_index]
if not use_bn:
print("ResNet without BN is not supported for prunning")
return model
if not isinstance(blk, Bottleneck):
print("Conv2 only for ResNet with Bottleneck defined in torchvision")
return model
# cut conv1, and next conv is conv2
conv = blk.conv2
bn = blk.bn2
next_conv = blk.conv3
num_filters = conv.weight.data.size(0) # out_channels x in_channels x 3 x 3
# skip the layer with only one filter left
if num_filters <= 1:
print("No filter will be prunned for this layer (num_filters<=1)")
return model
cut = int(cut_ratio * num_filters)
if cut < 1:
print("No filter will be prunned for this layer (cut<1)")
return model
if (num_filters - cut) < 1:
print("No filter will be prunned for this layer (no filter left after cutting)")
return model
# rank the filters within this layer and store into filter_ranks
abs_wgt = torch.abs(conv.weight.data)
values = \
torch.sum(abs_wgt, dim = 1, keepdim = True).\
sum(dim=2, keepdim = True).sum(dim=3, keepdim = True)[:, 0, 0, 0]# .data
# Normalize the sum of weight by the filter dimensions in x 3 x 3
values = values / (abs_wgt.size(1) * abs_wgt.size(2) * abs_wgt.size(3)) # (filter_number for this layer, 1)
print("Ranking filters.. ")
filters_to_prune = np.argsort(values.cpu().numpy())[:cut] # order from smallest to largest
print("Filters that will be prunned", filters_to_prune)
print("Pruning filters.. ")
# the updated conv for current conv, with cut output channels being pruned
new_conv = \
torch.nn.Conv2d(in_channels = conv.in_channels, \
out_channels = conv.out_channels - cut,
kernel_size = conv.kernel_size, \
stride = conv.stride,
padding = conv.padding,
dilation = conv.dilation,
groups = conv.groups,
bias = conv.bias is not None) #(out_channels)
old_weights = conv.weight.data.cpu().numpy() # (out_channels, in_channels, kernel_size[0], kernel_size[1]
new_weights = new_conv.weight.data.cpu().numpy()
# skip that filter's weight inside old_weights and store others into new_weights
new_weights = np.delete(old_weights, filters_to_prune, axis = 0)
new_conv.weight.data = torch.from_numpy(new_weights).cuda()
if conv.bias is not None:
bias_numpy = conv.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - cut), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune, axis = None)
new_conv.bias.data = torch.from_numpy(bias).cuda() # new conv2
# BatchNorm layer
new_bn = torch.nn.BatchNorm2d(num_features=new_conv.out_channels, \
eps=bn.eps, momentum=bn.momentum, affine=bn.affine)
# gamma: size = (num_features)
old_weights = bn.weight.data.cpu().numpy()
new_weights = bn.weight.data.cpu().numpy()
new_weights = np.delete(old_weights, filters_to_prune)
new_bn.weight.data = torch.from_numpy(new_weights).cuda()
# beta: size = (num_features)
bias_numpy = bn.bias.data.cpu().numpy()
# change size to (out_channels - cut)
bias = np.zeros(shape = (bias_numpy.shape[0] - cut), dtype = np.float32)
bias = np.delete(bias_numpy, filters_to_prune)
new_bn.bias.data = torch.from_numpy(bias).cuda() # new bn2
# new conv for next_conv
next_new_conv = \
torch.nn.Conv2d(in_channels = next_conv.in_channels - cut,\
out_channels = next_conv.out_channels, \
kernel_size = next_conv.kernel_size, \
stride = next_conv.stride,
padding = next_conv.padding,
dilation = next_conv.dilation,
groups = next_conv.groups,
bias = next_conv.bias is not None)
old_weights = next_conv.weight.data.cpu().numpy()
new_weights = next_new_conv.weight.data.cpu().numpy()
new_weights = np.delete(old_weights, filters_to_prune, axis = 1)
next_new_conv.weight.data = torch.from_numpy(new_weights).cuda()
if next_conv.bias is not None:
next_new_conv.bias.data = next_conv.bias.data # new conv3
# replace with new block
new_blk = Bottleneck(blk.conv1.in_channels, blk.conv1.out_channels, \
blk.stride, downsample = blk.downsample)
# keep all layers in residual path unchanged tempararily
new_blk.conv1 = blk.conv1
new_blk.bn1 = blk.bn1
new_blk.conv2 = new_conv
new_blk.bn2 = new_bn
new_blk.conv3 = next_new_conv
new_blk.bn3 = blk.bn3
new_blk.relu = blk.relu
base = torch.nn.Sequential(
*(replace_layers(model.base, i, [block_index], \
[new_blk]) for i, _ in enumerate(model.base)))
# delete and replace
del model.base
model.base = base
print("Filters prunned:", filters_to_prune)
return model
| 43.953061
| 151
| 0.631402
| 6,015
| 43,074
| 4.34015
| 0.056359
| 0.019842
| 0.030568
| 0.022064
| 0.833487
| 0.817628
| 0.802995
| 0.793036
| 0.783307
| 0.770857
| 0
| 0.013389
| 0.266541
| 43,074
| 979
| 152
| 43.997957
| 0.812933
| 0.182198
| 0
| 0.826625
| 0
| 0
| 0.074423
| 0
| 0
| 0
| 0
| 0.001021
| 0
| 1
| 0.012384
| false
| 0
| 0.010836
| 0
| 0.085139
| 0.082043
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f72c27667d2dfd2d0ef541566592c06f430e047b
| 147
|
py
|
Python
|
scene/__init__.py
|
cloose/ray-tracer-challenge
|
5e9dd56fb67c5cba47172986a963fc22a8cbcaa2
|
[
"MIT"
] | null | null | null |
scene/__init__.py
|
cloose/ray-tracer-challenge
|
5e9dd56fb67c5cba47172986a963fc22a8cbcaa2
|
[
"MIT"
] | null | null | null |
scene/__init__.py
|
cloose/ray-tracer-challenge
|
5e9dd56fb67c5cba47172986a963fc22a8cbcaa2
|
[
"MIT"
] | null | null | null |
from .camera import *
from .obj_file import *
from .obj_parser import *
from .ray_tracer import *
from .scene_parser import *
from .world import *
| 21
| 27
| 0.755102
| 22
| 147
| 4.863636
| 0.454545
| 0.46729
| 0.242991
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 147
| 6
| 28
| 24.5
| 0.869919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f72fdda2808488fef61058f47c4ebf00428e8bf0
| 9,861
|
py
|
Python
|
devel/lib/python2.7/dist-packages/mav_manager/srv/_GoalTimed.py
|
MultiRobotUPenn/groundstation_ws_vio_swarm
|
60e01af6bf32bafb5bc31626b055436278dc8311
|
[
"MIT"
] | 1
|
2020-03-10T06:32:51.000Z
|
2020-03-10T06:32:51.000Z
|
install/lib/python2.7/dist-packages/mav_manager/srv/_GoalTimed.py
|
MultiRobotUPenn/groundstation_ws_vio_swarm
|
60e01af6bf32bafb5bc31626b055436278dc8311
|
[
"MIT"
] | null | null | null |
install/lib/python2.7/dist-packages/mav_manager/srv/_GoalTimed.py
|
MultiRobotUPenn/groundstation_ws_vio_swarm
|
60e01af6bf32bafb5bc31626b055436278dc8311
|
[
"MIT"
] | 1
|
2018-11-07T03:37:23.000Z
|
2018-11-07T03:37:23.000Z
|
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from mav_manager/GoalTimedRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import genpy
class GoalTimedRequest(genpy.Message):
_md5sum = "3c9a1ea281c62219122f22aa2b508b97"
_type = "mav_manager/GoalTimedRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """float32[4] goal
duration duration
time t_start
"""
__slots__ = ['goal','duration','t_start']
_slot_types = ['float32[4]','duration','time']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
goal,duration,t_start
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GoalTimedRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.goal is None:
self.goal = [0.] * 4
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
else:
self.goal = [0.] * 4
self.duration = genpy.Duration()
self.t_start = genpy.Time()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_get_struct_4f().pack(*self.goal))
_x = self
buff.write(_get_struct_2i2I().pack(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
end = 0
start = end
end += 16
self.goal = _get_struct_4f().unpack(str[start:end])
_x = self
start = end
end += 16
(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs,) = _get_struct_2i2I().unpack(str[start:end])
self.duration.canon()
self.t_start.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(self.goal.tostring())
_x = self
buff.write(_get_struct_2i2I().pack(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
end = 0
start = end
end += 16
self.goal = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=4)
_x = self
start = end
end += 16
(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs,) = _get_struct_2i2I().unpack(str[start:end])
self.duration.canon()
self.t_start.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_4f = None
def _get_struct_4f():
global _struct_4f
if _struct_4f is None:
_struct_4f = struct.Struct("<4f")
return _struct_4f
_struct_2i2I = None
def _get_struct_2i2I():
global _struct_2i2I
if _struct_2i2I is None:
_struct_2i2I = struct.Struct("<2i2I")
return _struct_2i2I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from mav_manager/GoalTimedResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GoalTimedResponse(genpy.Message):
_md5sum = "937c9679a518e3a18d831e57125ea522"
_type = "mav_manager/GoalTimedResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """bool success
string message
"""
__slots__ = ['success','message']
_slot_types = ['bool','string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
success,message
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GoalTimedResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.success is None:
self.success = False
if self.message is None:
self.message = ''
else:
self.success = False
self.message = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_get_struct_B().pack(self.success))
_x = self.message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message = str[start:end].decode('utf-8')
else:
self.message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(_get_struct_B().pack(self.success))
_x = self.message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message = str[start:end].decode('utf-8')
else:
self.message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
class GoalTimed(object):
_type = 'mav_manager/GoalTimed'
_md5sum = '3200a97d30222d1d03961acacb87f306'
_request_class = GoalTimedRequest
_response_class = GoalTimedResponse
| 33.540816
| 145
| 0.653179
| 1,350
| 9,861
| 4.597778
| 0.131111
| 0.028355
| 0.021266
| 0.024488
| 0.817786
| 0.814725
| 0.814725
| 0.814725
| 0.814725
| 0.814725
| 0
| 0.021118
| 0.226853
| 9,861
| 293
| 146
| 33.65529
| 0.793022
| 0.246324
| 0
| 0.762626
| 1
| 0
| 0.082198
| 0.024702
| 0
| 0
| 0.002839
| 0
| 0
| 1
| 0.085859
| false
| 0
| 0.035354
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f7607b46f4ccf3495a92e4628bcc34436f39dbf1
| 1,824
|
py
|
Python
|
src/riotwatcher/Handlers/RequestHandler.py
|
acgandhi/Riot-Watcher
|
f8a7ea144a00fc62f9ffaee5bad3158f41251589
|
[
"MIT"
] | null | null | null |
src/riotwatcher/Handlers/RequestHandler.py
|
acgandhi/Riot-Watcher
|
f8a7ea144a00fc62f9ffaee5bad3158f41251589
|
[
"MIT"
] | null | null | null |
src/riotwatcher/Handlers/RequestHandler.py
|
acgandhi/Riot-Watcher
|
f8a7ea144a00fc62f9ffaee5bad3158f41251589
|
[
"MIT"
] | null | null | null |
class RequestHandler:
def __init__(self):
pass
def preview_request(
self,
region: str,
endpoint_name: str,
method_name: str,
url: str,
query_params: dict,
):
"""
called before a request is processed.
:param string region: the region of this request
:param string endpoint_name: the name of the endpoint being requested
:param string method_name: the name of the method being requested
:param url: the URL that is being requested.
:param query_params: dict: the parameters to the url that is being queried,
e.g. ?key1=val&key2=val2
"""
def after_request(
self, region: str, endpoint_name: str, method_name: str, url: str, response
):
"""
Called after a response is received and before it is returned to the user.
:param string region: the region of this request
:param string endpoint_name: the name of the endpoint that was requested
:param string method_name: the name of the method that was requested
:param url: The url that was requested
:param response: the response received. This is a response from the "requests"
library
"""
def preview_static_request(self, url: str, query_params: dict):
"""
Called before a request to DataDragon is processed
:param url: The url that was requested
"""
def after_static_request(self, url: str, response):
"""
Called after a response is received and before it is returned to the user.
:param url: The url that was requested
:param response: the response received. This is a response from the "requests"
library
"""
| 34.415094
| 86
| 0.616776
| 234
| 1,824
| 4.717949
| 0.222222
| 0.088768
| 0.04529
| 0.047101
| 0.822464
| 0.769928
| 0.737319
| 0.710145
| 0.710145
| 0.641304
| 0
| 0.002427
| 0.322368
| 1,824
| 52
| 87
| 35.076923
| 0.890777
| 0.602522
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3125
| false
| 0.0625
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
f79debcbb34a60afd831d5cdfcd46fb59e808492
| 21,896
|
py
|
Python
|
sources/architecture.py
|
dykuang/Unsupervised-brain-leision-segmentation
|
c83462db3cebcf8af357fc42d1a2592b67eace9b
|
[
"MIT"
] | null | null | null |
sources/architecture.py
|
dykuang/Unsupervised-brain-leision-segmentation
|
c83462db3cebcf8af357fc42d1a2592b67eace9b
|
[
"MIT"
] | null | null | null |
sources/architecture.py
|
dykuang/Unsupervised-brain-leision-segmentation
|
c83462db3cebcf8af357fc42d1a2592b67eace9b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 23 16:17:32 2019
@author: dykua
architectures for the network
"""
from keras.layers import Input, Conv2D, Conv2DTranspose, Reshape, Lambda, MaxPooling2D, UpSampling2D, Dropout, concatenate, multiply, add, BatchNormalization, PReLU, GaussianNoise, ZeroPadding2D
from keras.models import Model
from Mylayers import ClusteringLayer_ver2, Feature_split
import tensorflow as tf
def encoder_block(x, dim_list):
y = Conv2D(dim_list[0], kernel_size=3, strides=1, activation='relu', padding='same')(x)
y = Conv2D(dim_list[0], kernel_size=3, strides=2, activation='relu')(y)
for dim in dim_list[1:-1]:
y = Conv2D(dim, kernel_size=3, strides=1, activation='relu', padding='same')(y)
y = Conv2D(dim, kernel_size=3, strides=2, activation='relu')(y)
y = Conv2D(dim_list[-1], kernel_size=1, strides=1, activation='relu', padding='same')(y) # emebding layer
return y
def decoder_block(x, dim_list):
y = Conv2DTranspose(dim_list[0], kernel_size=3, strides=2,activation='relu')(x)
y = Conv2D(dim_list[0], kernel_size=3, strides=1, activation='relu', padding='same')(y)
for dim in dim_list[1:-1]:
y = Conv2DTranspose(dim, kernel_size=3, strides=2,activation='relu')(y)
y = Conv2D(dim, kernel_size=3, strides=1, activation='relu', padding='same')(y)
y = Conv2D(dim_list[-1], kernel_size=1, strides=1,activation='relu', padding='same')(y) # output layer
return y
def create_encoder(inputs, dim_list):
output = encoder_block(inputs, dim_list)
return Model(inputs, output)
def create_decoder(inputs, dim_list):
output = decoder_block(inputs, dim_list)
return Model(inputs, output)
#def make_cluster(inputs, filter_func = lambda x: 1/(1+tf.exp(-10*(x-0.5))), n_clusters, name='clustering'):
# clusters = ClusteringLayer_ver2(n_clusters, filter_func, name)(inputs)
# return clusters
def build_whole_model(inputs, en_dim_list, de_dim_list, n_clusters, filter_func = lambda x: 1/(1+tf.exp(-10*(x-0.5)))):
encoder = create_encoder(inputs, en_dim_list)
feature = encoder(inputs) # end of encoder
feature_reshaped = Reshape( (feature.shape[1] * feature.shape[2], en_dim_list[-1]) )(feature) # Did not specify batch size explicitly in Reshape layers
CLayer = ClusteringLayer_ver2(n_clusters, filter_func, name='clustering')
x_clusters_reshaped = CLayer(feature_reshaped)
x_clusters = Reshape((feature.shape[1], feature.shape[2], n_clusters))(x_clusters_reshaped) # end of clustering
x_splited=Feature_split(en_dim_list[-1], n_clusters)([feature, x_clusters]) # feature splitted according to clusters
decoder_input = Input((feature.shape[1], feature.shape[2], en_dim_list[-1]))
decoder = create_decoder(decoder_input, de_dim_list)
decoded = decoder(feature) # end of decoder
Pred_label=[]
for i in range(n_clusters):
Pred_label.append(decoder(x_splited[i]))
Squeezed = Lambda(lambda x: tf.squeeze(tf.stack(x,axis=-1), axis=-2))
AE = Model(inputs, decoded)
feature_map = Model(inputs, x_clusters_reshaped)
mask_map = Model(inputs, Squeezed(Pred_label))
whole_model = Model(inputs, [AE.output, feature_map.output, mask_map.output])
return AE, feature_map, mask_map, whole_model
def unet_CL(n_clusters, filter_func = lambda x: 1/(1+tf.exp(-10*(x-0.5))), pretrained_weights = None,input_size = (256,256,1)):
inputs = Input(input_size)
conv1 = Conv2D(32, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(32, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3)
# conv4 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4)
# drop4 = Dropout(0.25)(conv4)
# pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)
#
# conv5 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4)
# conv5 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
# drop5 = Dropout(0.25)(conv5)
#
# up6 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(drop5))
# merge6 = concatenate([drop4,up6], axis = 3)
# conv6 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge6)
# conv6 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6)
up7 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv4))
merge7 = concatenate([conv3,up7], axis = 3)
conv7 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge7)
conv7 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7)
up8 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = concatenate([conv2,up8], axis = 3)
conv8 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge8)
conv8 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8)
up9 = Conv2D(32, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = concatenate([conv1,up9], axis = 3)
conv9 = Conv2D(32, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge9)
conv9 = Conv2D(32, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
# conv9 = Conv2D(2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv10 = Conv2D(1, 1, activation = 'relu')(conv9)
# segmentation branch
feature_reshaped = Reshape( (conv4.shape[1] * conv4.shape[2], 256) )(conv4) # Did not specify batch size explicitly in Reshape layers
CLayer = ClusteringLayer_ver2(n_clusters, filter_func, name='clustering')
x_clusters_reshaped = CLayer(feature_reshaped)
x_clusters = Reshape((conv4.shape[1], conv4.shape[2], n_clusters))(x_clusters_reshaped) # end of clustering
x_splited=Feature_split(conv4.shape[3], n_clusters)([conv4, x_clusters]) # feature splitted according to clusters
decoder_input = Input((conv4.shape[1], conv4.shape[2], conv4.shape[3]))
decoder = Model([inputs, decoder_input], conv10)
Pred_label=[]
for i in range(n_clusters):
Pred_label.append(decoder([inputs, x_splited[i]]))
Squeezed = Lambda(lambda x: tf.squeeze(tf.stack(x,axis=-1), axis=-2))
#models
encoder = Model(inputs, conv4)
feature = encoder(inputs)
decoded = decoder([inputs, feature])
AE = Model(inputs, decoded)
feature_map = Model(inputs, x_clusters_reshaped)
mask_map = Model(inputs, Squeezed(Pred_label))
whole_model = Model(inputs, [AE.output, feature_map.output, mask_map.output])
#model.summary()
if(pretrained_weights):
AE.load_weights(pretrained_weights[0])
whole_model.get_layer(name='clustering').set_weights(pretrained_weights[1])
return AE, encoder, feature_map, mask_map, whole_model
def unet_AE(input_size = (256,256,1), pretrained_weights = None):
inputs = Input(input_size)
conv1 = Conv2D(32, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(32, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3)
# conv4 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4)
# drop4 = Dropout(0.25)(conv4)
# drop4 = conv4
# pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)
#
# conv5 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4)
# conv5 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
## drop5 = Dropout(0.25)(conv5)
# drop5 = conv5
# up6 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv4))
# merge6 = concatenate([conv4,up6], axis = 3)
# conv6 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge6)
# conv6 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6)
up7 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv4))
merge7 = concatenate([conv3,up7], axis = 3)
conv7 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge7)
conv7 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7)
up8 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = concatenate([conv2,up8], axis = 3)
conv8 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge8)
conv8 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8)
up9 = Conv2D(32, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = concatenate([conv1,up9], axis = 3)
conv9 = Conv2D(32, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge9)
conv9 = Conv2D(32, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
# conv9 = Conv2D(2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv10 = Conv2D(1, 1, activation = 'relu')(conv9)
AE = Model(inputs, conv10)
#model.summary()
if(pretrained_weights):
AE.load_weights(pretrained_weights[0])
return AE
def build_model_2(n_clusters, num_start = 16, pretrained_weights = None,input_size = (256,256,1)):
inputs = Input(input_size, name='input--encoder')
conv1 = Conv2D(num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(2*num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(2*num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(2*num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(2*num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
up4 = Conv2D(2*num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv3))
merge4 = concatenate([conv2,up4], axis = 3)
conv5 = Conv2D(2*num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge4)
conv5 = Conv2D(2*num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
up6 = Conv2D(num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv5))
merge6 = concatenate([conv1,up6], axis = 3)
conv7_0 = Conv2D(num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge6)
encoder = Model(inputs, conv7_0) # where to put output of the encoder? merge6, conv7_0, conv7_1
input_de = Input( (input_size[0], input_size[1], num_start) , name = 'input--decoder')
conv7_1 = Conv2D(num_start, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(input_de)
conv8 = Conv2D(3, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7_1)
conv9 = Conv2D(input_size[2], 1, activation = 'relu')(conv8)
decoder = Model(input_de, conv9)
feature = encoder(inputs)
decoded = decoder(feature)
feature_reshaped = Reshape( (feature.shape[1] * feature.shape[2], num_start) )(feature) # Did not specify batch size explicitly in Reshape layers
CLayer = ClusteringLayer_ver2(n_clusters, name='clustering')
x_clusters_reshaped = CLayer(feature_reshaped)
AE = Model(inputs, decoded)
feature_map = Model(inputs, x_clusters_reshaped)
return AE, feature_map
def build_model(input_size = (256,256,1), en_spec = [8,16,16], de_spec=[8,4], n_features = 8, n_clusters=3):
inputs = Input(input_size, name='input--encoder')
memo = []
aug_input = GaussianNoise(0.05)(inputs)
conv = Conv2D(en_spec[0], 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(aug_input)
conv = Conv2D(en_spec[0], 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
pool = MaxPooling2D(pool_size=(2, 2))(conv)
memo.append(conv)
for num in en_spec[1:-1]:
conv = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool)
conv = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
pool = MaxPooling2D(pool_size=(2, 2))(conv)
memo.append(conv)
conv = Conv2D(en_spec[-1], 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool)
conv = Conv2D(en_spec[-1], 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
for i, num in enumerate(en_spec[-2::-1]):
up = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv))
merge = concatenate([memo[-i-1],up], axis = 3)
#merge = add([memo[-i-1],up])
conv = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge)
# conv = multiply([conv, memo[-i-1]])
if i== (len(en_spec) - 2 ):
conv = Conv2D(n_features, 1, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
else:
conv = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
# conv = BatchNormalization()(conv)
# conv = PReLU(shared_axes=(1,2))(conv)
encoder = Model(inputs, conv) # where to put output of the encoder?
input_de = Input( (input_size[0], input_size[1], n_features) , name = 'input--decoder')
conv_de = Conv2D(de_spec[0], 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(input_de)
if len(de_spec) > 1:
for num in de_spec[1:]:
conv_de = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv_de)
conv_de = Conv2D(input_size[2], 1, activation = 'relu')(conv_de)
decoder = Model(input_de, conv_de)
feature = encoder(inputs)
decoded = decoder(feature)
feature_reshaped = Reshape( (feature.shape[1] * feature.shape[2], n_features) )(feature) # Did not specify batch size explicitly in Reshape layers
CLayer = ClusteringLayer_ver2(n_clusters, name='clustering')
x_clusters_reshaped = CLayer(feature_reshaped)
AE = Model(inputs, decoded)
feature_map = Model(inputs, x_clusters_reshaped)
return AE, feature_map
def build_model_3(input_size = (256,256,1), en_spec = [8,16,16], de_spec=[8,4], n_features = 8, n_clusters=3):
inputs = Input(input_size, name='input--encoder')
memo = []
aug_input = GaussianNoise(0.05)(inputs)
conv = Conv2D(en_spec[0], 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(aug_input)
memo.append(conv)
conv = Conv2D(en_spec[0], 3, strides=2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
for num in en_spec[1:-1]:
conv = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
memo.append(conv)
conv = Conv2D(num, 3, strides=2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
conv = Conv2D(en_spec[-1], 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
for i, num in enumerate(en_spec[-2::-1]):
up = Conv2DTranspose(num, 3, strides=2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
# if up.shape[1] == memo[-i-1].shape[1]: # shape is (?,?,?,int) for up?
merge = concatenate([memo[-i-1],up], axis = 3)
#merge = add([memo[-i-1],up])
conv = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge)
# conv = multiply([conv, memo[-i-1]])
if i== (len(en_spec) - 2 ):
conv = Conv2D(n_features, 1, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
else:
conv = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv)
# conv = BatchNormalization()(conv)
# conv = PReLU(shared_axes=(1,2))(conv)
encoder = Model(inputs, conv) # where to put output of the encoder?
input_de = Input( (input_size[0], input_size[1], n_features) , name = 'input--decoder')
conv_de = Conv2D(de_spec[0], 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(input_de)
# if len(de_spec) > 1:
# for num in de_spec[1:]:
# conv_de = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv_de)
for num in de_spec:
conv_de_1 = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv_de)
conv_de_1 = Conv2D(num, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv_de_1)
conv_de = add([conv_de, conv_de_1])
conv_de = Conv2D(input_size[2], 1, activation = 'relu')(conv_de)
decoder = Model(input_de, conv_de)
feature = encoder(inputs)
decoded = decoder(feature)
feature_reshaped = Reshape( (encoder.output_shape[1] * encoder.output_shape[2], n_features) )(feature) # Did not specify batch size explicitly in Reshape layers
CLayer = ClusteringLayer_ver2(n_clusters, name='clustering')
x_clusters_reshaped = CLayer(feature_reshaped)
AE = Model(inputs, decoded)
feature_map = Model(inputs, x_clusters_reshaped)
return AE, feature_map
| 60.486188
| 293
| 0.61203
| 2,686
| 21,896
| 4.814594
| 0.067759
| 0.107176
| 0.146149
| 0.173987
| 0.886174
| 0.87759
| 0.873956
| 0.859109
| 0.852304
| 0.83854
| 0
| 0.050544
| 0.244611
| 21,896
| 361
| 294
| 60.65374
| 0.731318
| 0.15432
| 0
| 0.659389
| 0
| 0
| 0.075956
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043668
| false
| 0
| 0.017467
| 0
| 0.104803
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3c5205cbc00cd681324cf19342c9414e37d439c
| 14,521
|
py
|
Python
|
tests/parser/stratcomp.c60.p180.3.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/stratcomp.c60.p180.3.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/stratcomp.c60.p180.3.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
controlled_by(1,10,13,13).
controlled_by(2,30,30,30).
controlled_by(3,9,46,46).
controlled_by(4,55,3,3).
controlled_by(4,55,38,57).
controlled_by(5,18,49,49).
controlled_by(5,18,51,44).
controlled_by(6,1,23,54).
controlled_by(6,1,31,33).
controlled_by(7,16,44,8).
controlled_by(8,14,56,56).
controlled_by(9,21,21,21).
controlled_by(10,5,48,48).
controlled_by(10,5,56,56).
controlled_by(11,28,49,55).
controlled_by(11,28,47,57).
controlled_by(12,35,10,10).
controlled_by(13,4,36,36).
controlled_by(14,16,2,22).
controlled_by(14,16,10,55).
controlled_by(15,19,19,19).
controlled_by(16,33,33,33).
controlled_by(17,51,49,49).
controlled_by(17,51,16,16).
controlled_by(18,9,9,9).
controlled_by(19,9,34,34).
controlled_by(20,52,4,4).
controlled_by(21,40,4,4).
controlled_by(22,26,39,24).
controlled_by(22,26,60,36).
controlled_by(23,10,39,39).
controlled_by(23,10,49,49).
controlled_by(24,3,3,3).
controlled_by(25,7,1,1).
controlled_by(25,7,29,52).
controlled_by(26,33,53,21).
controlled_by(27,29,33,33).
controlled_by(28,59,52,52).
controlled_by(29,3,48,2).
controlled_by(29,3,48,39).
controlled_by(30,25,50,9).
controlled_by(30,25,20,3).
controlled_by(31,20,3,3).
controlled_by(31,20,45,45).
controlled_by(32,51,11,20).
controlled_by(32,51,28,49).
controlled_by(33,4,60,60).
controlled_by(33,4,42,29).
controlled_by(34,46,42,42).
controlled_by(34,46,35,35).
controlled_by(35,60,55,20).
controlled_by(35,60,13,54).
controlled_by(36,3,3,3).
controlled_by(37,4,50,50).
controlled_by(37,4,17,23).
controlled_by(38,56,11,11).
controlled_by(39,44,57,57).
controlled_by(39,44,14,43).
controlled_by(40,14,24,24).
controlled_by(40,14,35,35).
controlled_by(41,54,7,31).
controlled_by(41,54,7,55).
controlled_by(42,56,31,31).
controlled_by(43,17,33,33).
controlled_by(43,17,56,56).
controlled_by(44,33,21,21).
controlled_by(44,33,48,48).
controlled_by(45,56,12,12).
controlled_by(46,30,2,2).
controlled_by(47,3,48,48).
controlled_by(47,3,60,60).
controlled_by(48,46,8,8).
controlled_by(49,15,37,35).
controlled_by(50,41,21,21).
controlled_by(50,41,58,5).
controlled_by(51,5,27,27).
controlled_by(51,5,13,13).
controlled_by(52,57,57,57).
controlled_by(53,51,2,2).
controlled_by(54,42,5,50).
controlled_by(55,14,59,59).
controlled_by(55,14,17,17).
controlled_by(56,20,48,25).
controlled_by(56,20,39,38).
controlled_by(57,26,13,2).
controlled_by(57,26,13,38).
controlled_by(58,31,45,45).
controlled_by(59,25,52,13).
controlled_by(59,25,29,21).
controlled_by(60,31,25,33).
produced_by(p1, 3,4).
produced_by(p2, 19,29).
produced_by(p3, 44,36).
produced_by(p4, 19,2).
produced_by(p5, 6,21).
produced_by(p6, 17,9).
produced_by(p7, 2,1).
produced_by(p8, 33,33).
produced_by(p9, 15,18).
produced_by(p10, 39,22).
produced_by(p11, 57,18).
produced_by(p12, 30,31).
produced_by(p13, 39,32).
produced_by(p14, 37,42).
produced_by(p15, 29,26).
produced_by(p16, 21,54).
produced_by(p17, 36,53).
produced_by(p18, 20,29).
produced_by(p19, 41,16).
produced_by(p20, 47,57).
produced_by(p21, 30,36).
produced_by(p22, 4,40).
produced_by(p23, 56,2).
produced_by(p24, 50,23).
produced_by(p25, 44,22).
produced_by(p26, 58,56).
produced_by(p27, 25,46).
produced_by(p28, 51,7).
produced_by(p29, 11,53).
produced_by(p30, 18,6).
produced_by(p31, 37,44).
produced_by(p32, 13,40).
produced_by(p33, 48,19).
produced_by(p34, 44,19).
produced_by(p35, 50,41).
produced_by(p36, 5,5).
produced_by(p37, 8,29).
produced_by(p38, 29,11).
produced_by(p39, 47,16).
produced_by(p40, 4,20).
produced_by(p41, 2,22).
produced_by(p42, 13,44).
produced_by(p43, 25,57).
produced_by(p44, 49,21).
produced_by(p45, 9,28).
produced_by(p46, 57,26).
produced_by(p47, 22,20).
produced_by(p48, 35,22).
produced_by(p49, 28,44).
produced_by(p50, 12,34).
produced_by(p51, 17,52).
produced_by(p52, 14,54).
produced_by(p53, 17,1).
produced_by(p54, 44,50).
produced_by(p55, 43,23).
produced_by(p56, 3,60).
produced_by(p57, 3,59).
produced_by(p58, 20,45).
produced_by(p59, 39,31).
produced_by(p60, 55,55).
produced_by(p61, 46,6).
produced_by(p62, 10,34).
produced_by(p63, 23,1).
produced_by(p64, 45,54).
produced_by(p65, 49,6).
produced_by(p66, 40,12).
produced_by(p67, 8,40).
produced_by(p68, 38,24).
produced_by(p69, 35,27).
produced_by(p70, 36,37).
produced_by(p71, 49,60).
produced_by(p72, 22,11).
produced_by(p73, 17,41).
produced_by(p74, 51,25).
produced_by(p75, 48,9).
produced_by(p76, 43,22).
produced_by(p77, 52,38).
produced_by(p78, 56,38).
produced_by(p79, 18,35).
produced_by(p80, 19,41).
produced_by(p81, 18,53).
produced_by(p82, 24,23).
produced_by(p83, 46,10).
produced_by(p84, 37,54).
produced_by(p85, 28,35).
produced_by(p86, 39,23).
produced_by(p87, 27,34).
produced_by(p88, 54,20).
produced_by(p89, 3,47).
produced_by(p90, 45,42).
produced_by(p91, 53,42).
produced_by(p92, 54,18).
produced_by(p93, 5,57).
produced_by(p94, 20,13).
produced_by(p95, 11,47).
produced_by(p96, 51,31).
produced_by(p97, 31,56).
produced_by(p98, 15,29).
produced_by(p99, 40,8).
produced_by(p100, 2,10).
produced_by(p101, 7,46).
produced_by(p102, 50,2).
produced_by(p103, 23,38).
produced_by(p104, 51,9).
produced_by(p105, 13,25).
produced_by(p106, 3,54).
produced_by(p107, 57,23).
produced_by(p108, 48,24).
produced_by(p109, 32,18).
produced_by(p110, 33,29).
produced_by(p111, 24,21).
produced_by(p112, 31,40).
produced_by(p113, 20,40).
produced_by(p114, 58,24).
produced_by(p115, 36,22).
produced_by(p116, 34,22).
produced_by(p117, 28,19).
produced_by(p118, 20,17).
produced_by(p119, 16,19).
produced_by(p120, 25,57).
produced_by(p121, 32,15).
produced_by(p122, 21,51).
produced_by(p123, 24,59).
produced_by(p124, 16,58).
produced_by(p125, 13,46).
produced_by(p126, 17,52).
produced_by(p127, 3,47).
produced_by(p128, 12,56).
produced_by(p129, 22,28).
produced_by(p130, 8,6).
produced_by(p131, 36,39).
produced_by(p132, 25,24).
produced_by(p133, 43,39).
produced_by(p134, 35,59).
produced_by(p135, 59,21).
produced_by(p136, 59,15).
produced_by(p137, 6,7).
produced_by(p138, 42,34).
produced_by(p139, 42,47).
produced_by(p140, 59,17).
produced_by(p141, 33,40).
produced_by(p142, 54,16).
produced_by(p143, 55,40).
produced_by(p144, 25,3).
produced_by(p145, 8,48).
produced_by(p146, 42,29).
produced_by(p147, 35,35).
produced_by(p148, 26,10).
produced_by(p149, 21,1).
produced_by(p150, 48,38).
produced_by(p151, 58,33).
produced_by(p152, 21,16).
produced_by(p153, 13,26).
produced_by(p154, 11,4).
produced_by(p155, 40,22).
produced_by(p156, 25,48).
produced_by(p157, 2,36).
produced_by(p158, 33,7).
produced_by(p159, 36,33).
produced_by(p160, 31,5).
produced_by(p161, 45,9).
produced_by(p162, 20,30).
produced_by(p163, 57,31).
produced_by(p164, 34,18).
produced_by(p165, 4,38).
produced_by(p166, 55,48).
produced_by(p167, 15,35).
produced_by(p168, 17,17).
produced_by(p169, 8,58).
produced_by(p170, 20,55).
produced_by(p171, 1,41).
produced_by(p172, 17,49).
produced_by(p173, 30,43).
produced_by(p174, 5,11).
produced_by(p175, 2,48).
produced_by(p176, 22,38).
produced_by(p177, 47,38).
produced_by(p178, 42,57).
produced_by(p179, 10,30).
produced_by(p180, 1,34).
"""
output = """
controlled_by(1,10,13,13).
controlled_by(2,30,30,30).
controlled_by(3,9,46,46).
controlled_by(4,55,3,3).
controlled_by(4,55,38,57).
controlled_by(5,18,49,49).
controlled_by(5,18,51,44).
controlled_by(6,1,23,54).
controlled_by(6,1,31,33).
controlled_by(7,16,44,8).
controlled_by(8,14,56,56).
controlled_by(9,21,21,21).
controlled_by(10,5,48,48).
controlled_by(10,5,56,56).
controlled_by(11,28,49,55).
controlled_by(11,28,47,57).
controlled_by(12,35,10,10).
controlled_by(13,4,36,36).
controlled_by(14,16,2,22).
controlled_by(14,16,10,55).
controlled_by(15,19,19,19).
controlled_by(16,33,33,33).
controlled_by(17,51,49,49).
controlled_by(17,51,16,16).
controlled_by(18,9,9,9).
controlled_by(19,9,34,34).
controlled_by(20,52,4,4).
controlled_by(21,40,4,4).
controlled_by(22,26,39,24).
controlled_by(22,26,60,36).
controlled_by(23,10,39,39).
controlled_by(23,10,49,49).
controlled_by(24,3,3,3).
controlled_by(25,7,1,1).
controlled_by(25,7,29,52).
controlled_by(26,33,53,21).
controlled_by(27,29,33,33).
controlled_by(28,59,52,52).
controlled_by(29,3,48,2).
controlled_by(29,3,48,39).
controlled_by(30,25,50,9).
controlled_by(30,25,20,3).
controlled_by(31,20,3,3).
controlled_by(31,20,45,45).
controlled_by(32,51,11,20).
controlled_by(32,51,28,49).
controlled_by(33,4,60,60).
controlled_by(33,4,42,29).
controlled_by(34,46,42,42).
controlled_by(34,46,35,35).
controlled_by(35,60,55,20).
controlled_by(35,60,13,54).
controlled_by(36,3,3,3).
controlled_by(37,4,50,50).
controlled_by(37,4,17,23).
controlled_by(38,56,11,11).
controlled_by(39,44,57,57).
controlled_by(39,44,14,43).
controlled_by(40,14,24,24).
controlled_by(40,14,35,35).
controlled_by(41,54,7,31).
controlled_by(41,54,7,55).
controlled_by(42,56,31,31).
controlled_by(43,17,33,33).
controlled_by(43,17,56,56).
controlled_by(44,33,21,21).
controlled_by(44,33,48,48).
controlled_by(45,56,12,12).
controlled_by(46,30,2,2).
controlled_by(47,3,48,48).
controlled_by(47,3,60,60).
controlled_by(48,46,8,8).
controlled_by(49,15,37,35).
controlled_by(50,41,21,21).
controlled_by(50,41,58,5).
controlled_by(51,5,27,27).
controlled_by(51,5,13,13).
controlled_by(52,57,57,57).
controlled_by(53,51,2,2).
controlled_by(54,42,5,50).
controlled_by(55,14,59,59).
controlled_by(55,14,17,17).
controlled_by(56,20,48,25).
controlled_by(56,20,39,38).
controlled_by(57,26,13,2).
controlled_by(57,26,13,38).
controlled_by(58,31,45,45).
controlled_by(59,25,52,13).
controlled_by(59,25,29,21).
controlled_by(60,31,25,33).
produced_by(p1, 3,4).
produced_by(p2, 19,29).
produced_by(p3, 44,36).
produced_by(p4, 19,2).
produced_by(p5, 6,21).
produced_by(p6, 17,9).
produced_by(p7, 2,1).
produced_by(p8, 33,33).
produced_by(p9, 15,18).
produced_by(p10, 39,22).
produced_by(p11, 57,18).
produced_by(p12, 30,31).
produced_by(p13, 39,32).
produced_by(p14, 37,42).
produced_by(p15, 29,26).
produced_by(p16, 21,54).
produced_by(p17, 36,53).
produced_by(p18, 20,29).
produced_by(p19, 41,16).
produced_by(p20, 47,57).
produced_by(p21, 30,36).
produced_by(p22, 4,40).
produced_by(p23, 56,2).
produced_by(p24, 50,23).
produced_by(p25, 44,22).
produced_by(p26, 58,56).
produced_by(p27, 25,46).
produced_by(p28, 51,7).
produced_by(p29, 11,53).
produced_by(p30, 18,6).
produced_by(p31, 37,44).
produced_by(p32, 13,40).
produced_by(p33, 48,19).
produced_by(p34, 44,19).
produced_by(p35, 50,41).
produced_by(p36, 5,5).
produced_by(p37, 8,29).
produced_by(p38, 29,11).
produced_by(p39, 47,16).
produced_by(p40, 4,20).
produced_by(p41, 2,22).
produced_by(p42, 13,44).
produced_by(p43, 25,57).
produced_by(p44, 49,21).
produced_by(p45, 9,28).
produced_by(p46, 57,26).
produced_by(p47, 22,20).
produced_by(p48, 35,22).
produced_by(p49, 28,44).
produced_by(p50, 12,34).
produced_by(p51, 17,52).
produced_by(p52, 14,54).
produced_by(p53, 17,1).
produced_by(p54, 44,50).
produced_by(p55, 43,23).
produced_by(p56, 3,60).
produced_by(p57, 3,59).
produced_by(p58, 20,45).
produced_by(p59, 39,31).
produced_by(p60, 55,55).
produced_by(p61, 46,6).
produced_by(p62, 10,34).
produced_by(p63, 23,1).
produced_by(p64, 45,54).
produced_by(p65, 49,6).
produced_by(p66, 40,12).
produced_by(p67, 8,40).
produced_by(p68, 38,24).
produced_by(p69, 35,27).
produced_by(p70, 36,37).
produced_by(p71, 49,60).
produced_by(p72, 22,11).
produced_by(p73, 17,41).
produced_by(p74, 51,25).
produced_by(p75, 48,9).
produced_by(p76, 43,22).
produced_by(p77, 52,38).
produced_by(p78, 56,38).
produced_by(p79, 18,35).
produced_by(p80, 19,41).
produced_by(p81, 18,53).
produced_by(p82, 24,23).
produced_by(p83, 46,10).
produced_by(p84, 37,54).
produced_by(p85, 28,35).
produced_by(p86, 39,23).
produced_by(p87, 27,34).
produced_by(p88, 54,20).
produced_by(p89, 3,47).
produced_by(p90, 45,42).
produced_by(p91, 53,42).
produced_by(p92, 54,18).
produced_by(p93, 5,57).
produced_by(p94, 20,13).
produced_by(p95, 11,47).
produced_by(p96, 51,31).
produced_by(p97, 31,56).
produced_by(p98, 15,29).
produced_by(p99, 40,8).
produced_by(p100, 2,10).
produced_by(p101, 7,46).
produced_by(p102, 50,2).
produced_by(p103, 23,38).
produced_by(p104, 51,9).
produced_by(p105, 13,25).
produced_by(p106, 3,54).
produced_by(p107, 57,23).
produced_by(p108, 48,24).
produced_by(p109, 32,18).
produced_by(p110, 33,29).
produced_by(p111, 24,21).
produced_by(p112, 31,40).
produced_by(p113, 20,40).
produced_by(p114, 58,24).
produced_by(p115, 36,22).
produced_by(p116, 34,22).
produced_by(p117, 28,19).
produced_by(p118, 20,17).
produced_by(p119, 16,19).
produced_by(p120, 25,57).
produced_by(p121, 32,15).
produced_by(p122, 21,51).
produced_by(p123, 24,59).
produced_by(p124, 16,58).
produced_by(p125, 13,46).
produced_by(p126, 17,52).
produced_by(p127, 3,47).
produced_by(p128, 12,56).
produced_by(p129, 22,28).
produced_by(p130, 8,6).
produced_by(p131, 36,39).
produced_by(p132, 25,24).
produced_by(p133, 43,39).
produced_by(p134, 35,59).
produced_by(p135, 59,21).
produced_by(p136, 59,15).
produced_by(p137, 6,7).
produced_by(p138, 42,34).
produced_by(p139, 42,47).
produced_by(p140, 59,17).
produced_by(p141, 33,40).
produced_by(p142, 54,16).
produced_by(p143, 55,40).
produced_by(p144, 25,3).
produced_by(p145, 8,48).
produced_by(p146, 42,29).
produced_by(p147, 35,35).
produced_by(p148, 26,10).
produced_by(p149, 21,1).
produced_by(p150, 48,38).
produced_by(p151, 58,33).
produced_by(p152, 21,16).
produced_by(p153, 13,26).
produced_by(p154, 11,4).
produced_by(p155, 40,22).
produced_by(p156, 25,48).
produced_by(p157, 2,36).
produced_by(p158, 33,7).
produced_by(p159, 36,33).
produced_by(p160, 31,5).
produced_by(p161, 45,9).
produced_by(p162, 20,30).
produced_by(p163, 57,31).
produced_by(p164, 34,18).
produced_by(p165, 4,38).
produced_by(p166, 55,48).
produced_by(p167, 15,35).
produced_by(p168, 17,17).
produced_by(p169, 8,58).
produced_by(p170, 20,55).
produced_by(p171, 1,41).
produced_by(p172, 17,49).
produced_by(p173, 30,43).
produced_by(p174, 5,11).
produced_by(p175, 2,48).
produced_by(p176, 22,38).
produced_by(p177, 47,38).
produced_by(p178, 42,57).
produced_by(p179, 10,30).
produced_by(p180, 1,34).
"""
| 26.644037
| 28
| 0.700985
| 2,882
| 14,521
| 3.344552
| 0.08501
| 0.373483
| 0.019919
| 0.011619
| 0.998859
| 0.998859
| 0.998859
| 0.998859
| 0.998859
| 0.998859
| 0
| 0.268339
| 0.099718
| 14,521
| 544
| 29
| 26.693015
| 0.468982
| 0
| 0
| 0.996324
| 0
| 0
| 0.997783
| 0.338173
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
5422677a2ca950b5dee26133d83c2659b78c396b
| 734
|
py
|
Python
|
lib/masksFakesGeneraters_py_imports.py
|
pkdone/mongo-mangler
|
75dd0162f08d521e18b7f3726f90d1ee7ef782ea
|
[
"MIT"
] | 9
|
2021-11-29T07:06:41.000Z
|
2022-02-07T07:45:05.000Z
|
lib/masksFakesGeneraters_py_imports.py
|
pkdone/mongo-mangler
|
75dd0162f08d521e18b7f3726f90d1ee7ef782ea
|
[
"MIT"
] | 1
|
2021-12-22T14:48:18.000Z
|
2022-01-06T15:28:20.000Z
|
lib/masksFakesGeneraters_py_imports.py
|
pkdone/mongo-mangler
|
75dd0162f08d521e18b7f3726f90d1ee7ef782ea
|
[
"MIT"
] | 2
|
2021-12-22T11:49:55.000Z
|
2022-03-29T20:51:05.000Z
|
from masksFakesGeneraters import fakeDateMillisFromEpoch, fakeDateAfterNow, fakeDateBeforeNow, fakeNumber, fakeNumberBounded, fakePaddedNumberAsText, fakeDecimal, fakeDecimalSignificantPlaces, fakeMoneyAmountDecimal, fakeBoolean, fakeBooleanWeighted, fakeOneOfTwoValuesWeighted, fakeValueFromList, fakeValueFromListWeighted, fakeListOfSubDocs, fakeNSameChars, fakeNAnyUpperChars, fakeNAnyLowerChars, fakeFirstName, fakeLastName, fakeFirstAndLastName, fakeEmailAddress, fakeIPAddress, fakeStreetName, fakeTownName, fakeCountryName, fakeZipCode, fakeCompanyName, maskReplaceFirstPart, maskReplaceLastPart, maskReplaceAll, maskAlterDecimal, maskAlterNumber, maskAlterDate, maskAlterBoolean, maskAlterValueFromList, maskAlterListFromList
| 367
| 733
| 0.896458
| 40
| 734
| 16.45
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054496
| 734
| 1
| 734
| 734
| 0.948127
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5809f32b835f4a46223da24b2e69138dbdd40f95
| 99
|
py
|
Python
|
logexpose/toolbox.py
|
idlesign/django-logexpose
|
5f9839d3211bc5bf39ada11a928b95a2efd2525d
|
[
"BSD-3-Clause"
] | 1
|
2016-08-28T14:51:12.000Z
|
2016-08-28T14:51:12.000Z
|
logexpose/toolbox.py
|
idlesign/django-logexpose
|
5f9839d3211bc5bf39ada11a928b95a2efd2525d
|
[
"BSD-3-Clause"
] | 1
|
2019-07-08T00:18:02.000Z
|
2019-07-08T08:10:49.000Z
|
logexpose/toolbox.py
|
idlesign/django-logexpose
|
5f9839d3211bc5bf39ada11a928b95a2efd2525d
|
[
"BSD-3-Clause"
] | 2
|
2016-05-25T08:14:26.000Z
|
2019-07-08T00:12:19.000Z
|
from .utils import thread_get_logger
def get_logger(alias):
return thread_get_logger(alias)
| 14.142857
| 36
| 0.787879
| 15
| 99
| 4.866667
| 0.6
| 0.369863
| 0.410959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 99
| 6
| 37
| 16.5
| 0.869048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
5888a6641b59c180e8959d1fa867b40901e94a90
| 217
|
py
|
Python
|
baselines/env/box2d/__init__.py
|
ShuoZ9379/Integration_SIL_and_MBL
|
d7df6501a665d65eb791f7fd9b8e85fd660e6320
|
[
"MIT"
] | null | null | null |
baselines/env/box2d/__init__.py
|
ShuoZ9379/Integration_SIL_and_MBL
|
d7df6501a665d65eb791f7fd9b8e85fd660e6320
|
[
"MIT"
] | null | null | null |
baselines/env/box2d/__init__.py
|
ShuoZ9379/Integration_SIL_and_MBL
|
d7df6501a665d65eb791f7fd9b8e85fd660e6320
|
[
"MIT"
] | null | null | null |
from baselines.env.box2d.lunar_lander_pomdp import LunarLanderContinuousPOMDP
from baselines.env.box2d.lunar_lander_pomdp_v0 import LunarLanderContinuousPOMDPv0
from baselines.env.box2d.lunar_lander import LunarLander
| 72.333333
| 82
| 0.907834
| 27
| 217
| 7.074074
| 0.444444
| 0.204188
| 0.251309
| 0.329843
| 0.554974
| 0.554974
| 0.387435
| 0
| 0
| 0
| 0
| 0.024272
| 0.050691
| 217
| 3
| 83
| 72.333333
| 0.902913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
58a449d68d05459e727ad53a7802f7d6f3a5b818
| 90
|
py
|
Python
|
grade/__init__.py
|
srakrn/Coding-Dojo
|
4ad8bbc1a81e2c9a03a67060ebaea3a368b14a17
|
[
"MIT"
] | null | null | null |
grade/__init__.py
|
srakrn/Coding-Dojo
|
4ad8bbc1a81e2c9a03a67060ebaea3a368b14a17
|
[
"MIT"
] | null | null | null |
grade/__init__.py
|
srakrn/Coding-Dojo
|
4ad8bbc1a81e2c9a03a67060ebaea3a368b14a17
|
[
"MIT"
] | null | null | null |
from grade._grade import Record, GradeRange, thai_grade_range, thai_grade_range_wo_charge
| 45
| 89
| 0.877778
| 14
| 90
| 5.142857
| 0.642857
| 0.25
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077778
| 90
| 1
| 90
| 90
| 0.86747
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
546bdb33a09cba3f404d7612ca34a5e8b7cb55fb
| 3,988
|
py
|
Python
|
battery_management_system_test.py
|
clean-code-craft-tcq-1/function-ext-python-Aruna1396
|
2b32dbd24aa94c69b17f199ee1bf12ea3d72bce1
|
[
"MIT"
] | null | null | null |
battery_management_system_test.py
|
clean-code-craft-tcq-1/function-ext-python-Aruna1396
|
2b32dbd24aa94c69b17f199ee1bf12ea3d72bce1
|
[
"MIT"
] | null | null | null |
battery_management_system_test.py
|
clean-code-craft-tcq-1/function-ext-python-Aruna1396
|
2b32dbd24aa94c69b17f199ee1bf12ea3d72bce1
|
[
"MIT"
] | null | null | null |
import check_battery_limits as checker
import bms_temperature_handler as temp
if __name__ == '__main__':
""" battery Health Language Based Tests"""
""" Test Function: is_battery_overall_health_ok for German"""
assert (checker.is_battery_overall_health_ok({'charging_temperature': 25, 'state_of_charge': 70,
'charge_rate': 0.8}, "English", "Celsius") is True)
assert (checker.is_battery_overall_health_ok({'charging_temperature': 125, 'state_of_charge': 70,
'charge_rate': 0.8}, "German", 'Celsius') is False)
assert (checker.is_battery_overall_health_ok({'charging_temperature': 25, 'state_of_charge': 470,
'charge_rate': 0.8}, "German", 'Celsius') is False)
assert (checker.is_battery_overall_health_ok({'charging_temperature': 25, 'state_of_charge': 70,
'charge_rate': 6.8}, "German", 'Celsius') is False)
assert (checker.is_battery_overall_health_ok({'charging_temperature': 125, 'state_of_charge': 170,
'charge_rate': 0.8}, "German", 'Celsius') is False)
assert (checker.is_battery_overall_health_ok({'charging_temperature': 125, 'state_of_charge': 170,
'charge_rate': 1.8}, "German", 'Celsius') is False)
""" Test Function: is_battery_overall_health_ok English"""
assert (checker.is_battery_overall_health_ok({'charging_temperature': 25, 'state_of_charge': 70,
'charge_rate': 0.8}, "English", 'Celsius') is True)
assert (checker.is_battery_overall_health_ok({'charging_temperature': 125, 'state_of_charge': 70,
'charge_rate': 0.8}, "English", 'Celsius') is False)
""" Test Function: is_battery_overall_health_ok to print in English for non-supported languages"""
assert (checker.is_battery_overall_health_ok({'charging_temperature': 125, 'state_of_charge': 170,
'charge_rate': 1.8}, "hebrew", 'Celsius') is False)
assert (checker.is_battery_overall_health_ok({'charging_temperature': 125, 'state_of_charge': 170,
'charge_rate': 1.8}, "Latin", 'Celsius') is False)
""" Test Function: is_battery_overall_health_ok for different charging temperature Units"""
""" Charging Temperature in Fahrenheit"""
assert (checker.is_battery_overall_health_ok({'charging_temperature': 77, 'state_of_charge': 70,
'charge_rate': 0.8}, "German", "Fahrenheit") is True)
assert (checker.is_battery_overall_health_ok({'charging_temperature': 257, 'state_of_charge': 70,
'charge_rate': 0.8}, "English", 'Fahrenheit') is False)
""" Charging Temperature in Kelvin"""
assert (checker.is_battery_overall_health_ok({'charging_temperature': 298.15, 'state_of_charge': 70,
'charge_rate': 0.8}, "English", "Kelvin") is True)
assert (checker.is_battery_overall_health_ok({'charging_temperature': 398.15, 'state_of_charge': 170,
'charge_rate': 0.8}, "German", 'Kelvin') is False)
""" Charging Temperature for unsupported temperature units"""
assert (checker.is_battery_overall_health_ok({'charging_temperature': 25, 'state_of_charge': 70,
'charge_rate': 0.8}, "English", "Rankine") is True)
""" Test Function: perform_temperature_processing """
assert (temp.perform_temperature_processing('Rankine', 25) == 25)
assert (temp.perform_temperature_processing('Fahrenheit', 158) == 70)
assert (temp.perform_temperature_processing('Kelvin', 401.15) == 128)
| 73.851852
| 105
| 0.601304
| 440
| 3,988
| 5.095455
| 0.152273
| 0.076271
| 0.135593
| 0.186441
| 0.790812
| 0.737288
| 0.737288
| 0.737288
| 0.718555
| 0.608385
| 0
| 0.043887
| 0.28009
| 3,988
| 53
| 106
| 75.245283
| 0.737025
| 0
| 0
| 0.361111
| 0
| 0
| 0.268427
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| true
| 0
| 0.055556
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
49a3d6807b2d7ba6caa085f7ef7c3675b3583c78
| 1,357
|
py
|
Python
|
python/lib/measure/mean_squared_error.py
|
timtyree/bgmc
|
891e003a9594be9e40c53822879421c2b8c44eed
|
[
"MIT"
] | null | null | null |
python/lib/measure/mean_squared_error.py
|
timtyree/bgmc
|
891e003a9594be9e40c53822879421c2b8c44eed
|
[
"MIT"
] | null | null | null |
python/lib/measure/mean_squared_error.py
|
timtyree/bgmc
|
891e003a9594be9e40c53822879421c2b8c44eed
|
[
"MIT"
] | null | null | null |
import numpy as np, pandas as pd
def comp_rmse_via_interp(x, *args):
'''find a best r,kappa pair for a given a,D pair for a given full model'''
a,D,q_values,w_values,interp=args
r,kappa=x
#interpolate nu and M using interp
X=np.array([[r,kappa,a,D]])
yhat = interp(X)
nu=yhat[0,0]
M=yhat[0,1]
# M=yhat[0,2]
what_values=M*q_values**nu
rmse_full=np.sqrt(np.mean((what_values-w_values)**2))
return rmse_full
def comp_mse_via_interp(x, *args):
'''find a best r,kappa pair for a given a,D pair for a given full model
x=r,kappa
args=a,D,q_values,w_values,interp
'''
a,D,q_values,w_values,interp=args
r,kappa=x
#interpolate nu and M using interp
X=np.array([[r,kappa,a,D]])
yhat = interp(X)
nu=yhat[0,0]
M=yhat[0,1]
# M=yhat[0,2]
what_values=M*q_values**nu
mse=np.mean((what_values-w_values)**2)
return mse
def comp_mse_via_interp_unattractive(x, *args):
'''find a best r,kappa pair for a given a,D pair for a given full model
x=r,kappa,D
args=q_values,w_values,interp
'''
q_values,w_values,interp=args
r,kappa,D=x
#interpolate nu and M using interp
X=np.array([[r,kappa,D]])
yhat = interp(X)
nu=yhat[0,0]
M=yhat[0,1]
# M=yhat[0,2]
what_values=M*q_values**nu
mse=np.mean((what_values-w_values)**2)
return mse
| 27.14
| 78
| 0.646279
| 268
| 1,357
| 3.145522
| 0.160448
| 0.078292
| 0.123369
| 0.092527
| 0.905101
| 0.846975
| 0.846975
| 0.820878
| 0.749703
| 0.749703
| 0
| 0.019535
| 0.207811
| 1,357
| 49
| 79
| 27.693878
| 0.764651
| 0.322771
| 0
| 0.709677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096774
| false
| 0
| 0.032258
| 0
| 0.225806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.