hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dca3af2fcd2c78c6548cc6382661f7d0e31dd10b
| 8,474
|
py
|
Python
|
tasks-deploy/it-knows/check.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | 3
|
2021-03-30T06:27:58.000Z
|
2021-04-03T17:56:35.000Z
|
tasks-deploy/it-knows/check.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | null | null | null |
tasks-deploy/it-knows/check.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | null | null | null |
def check(attempt, context):
if attempt.answer == flags[attempt.participant.id % len(flags)]:
return Checked(True)
if attempt.answer in flags:
return CheckedPlagiarist(False, flags.index(attempt.answer))
return Checked(False)
flags = ['LKL{U5E_STR1NG5_UT1LITY_jwUOzL5ymvjO}', 'LKL{U5E_STR1NG5_UT1LITY_ZozvUkEbyY6g}', 'LKL{U5E_STR1NG5_UT1LITY_vVtM5gEmDbO6}', 'LKL{U5E_STR1NG5_UT1LITY_JoidvdCZWyjQ}', 'LKL{U5E_STR1NG5_UT1LITY_FKuaHlUbsk4X}', 'LKL{U5E_STR1NG5_UT1LITY_XYEzXnDkUM4s}', 'LKL{U5E_STR1NG5_UT1LITY_bQTJ2URYaT7a}', 'LKL{U5E_STR1NG5_UT1LITY_6rlkl9Yrusea}', 'LKL{U5E_STR1NG5_UT1LITY_hSoqoN6Am81X}', 'LKL{U5E_STR1NG5_UT1LITY_iNGkGvlhLR8i}', 'LKL{U5E_STR1NG5_UT1LITY_qN8XGSD7grwU}', 'LKL{U5E_STR1NG5_UT1LITY_YSbS6GwnjjAR}', 'LKL{U5E_STR1NG5_UT1LITY_0iRZoFn2NENG}', 'LKL{U5E_STR1NG5_UT1LITY_epeVj1U0wtN9}', 'LKL{U5E_STR1NG5_UT1LITY_Qs2mdDp0B4Y4}', 'LKL{U5E_STR1NG5_UT1LITY_95lYxv6Ycf60}', 'LKL{U5E_STR1NG5_UT1LITY_oOaN0haoFYDR}', 'LKL{U5E_STR1NG5_UT1LITY_mJwdQ5PuDrhp}', 'LKL{U5E_STR1NG5_UT1LITY_W5ToZaqNwiOL}', 'LKL{U5E_STR1NG5_UT1LITY_i6z46kmehWCd}', 'LKL{U5E_STR1NG5_UT1LITY_hIo8cyLSW0NP}', 'LKL{U5E_STR1NG5_UT1LITY_kmV1k4XcfOMV}', 'LKL{U5E_STR1NG5_UT1LITY_7iTcO6kTwgX8}', 'LKL{U5E_STR1NG5_UT1LITY_5RUAPPqWaEIQ}', 'LKL{U5E_STR1NG5_UT1LITY_8T3JU1w0s85G}', 'LKL{U5E_STR1NG5_UT1LITY_CvBNKzwSNOtt}', 'LKL{U5E_STR1NG5_UT1LITY_sLPHX4kwCPDv}', 'LKL{U5E_STR1NG5_UT1LITY_rN8JtLLtpoPA}', 'LKL{U5E_STR1NG5_UT1LITY_jMfF7AE4euxG}', 'LKL{U5E_STR1NG5_UT1LITY_Y8WB3MmFKIu6}', 'LKL{U5E_STR1NG5_UT1LITY_rVNHVO3N3Kez}', 'LKL{U5E_STR1NG5_UT1LITY_iCTrc43nDJ8R}', 'LKL{U5E_STR1NG5_UT1LITY_zq8fdvMp9bBO}', 'LKL{U5E_STR1NG5_UT1LITY_KnPkdfRfCZme}', 'LKL{U5E_STR1NG5_UT1LITY_pjbTkzcbxNUK}', 'LKL{U5E_STR1NG5_UT1LITY_iZvptavJrLEr}', 'LKL{U5E_STR1NG5_UT1LITY_fKJzUzYc4WXV}', 'LKL{U5E_STR1NG5_UT1LITY_N5xMAvPlF4TY}', 'LKL{U5E_STR1NG5_UT1LITY_FE85xVTIHKKg}', 'LKL{U5E_STR1NG5_UT1LITY_3nWQlLuXhkur}', 'LKL{U5E_STR1NG5_UT1LITY_FRnbvH1N6D7L}', 'LKL{U5E_STR1NG5_UT1LITY_9Ecppi0XB9ix}', 'LKL{U5E_STR1NG5_UT1LITY_hZhAPyQsThNt}', 'LKL{U5E_STR1NG5_UT1LITY_spIIsJb8NTV4}', 'LKL{U5E_STR1NG5_UT1LITY_dU0Pp8gotsO0}', 'LKL{U5E_STR1NG5_UT1LITY_p95rpRegFxya}', 'LKL{U5E_STR1NG5_UT1LITY_kOIH13QbvhpB}', 'LKL{U5E_STR1NG5_UT1LITY_slFCZmxJb1K8}', 'LKL{U5E_STR1NG5_UT1LITY_48ACAtm0iLoW}', 'LKL{U5E_STR1NG5_UT1LITY_XPIIwEocHywU}', 'LKL{U5E_STR1NG5_UT1LITY_JfKTCIShG78Y}', 'LKL{U5E_STR1NG5_UT1LITY_7u6aXQbOpd2S}', 'LKL{U5E_STR1NG5_UT1LITY_uhscWj6Pfj3f}', 'LKL{U5E_STR1NG5_UT1LITY_qVLeE2bDgVPa}', 'LKL{U5E_STR1NG5_UT1LITY_YiQBqTU7d31Q}', 'LKL{U5E_STR1NG5_UT1LITY_UzAsBTx8RFgt}', 'LKL{U5E_STR1NG5_UT1LITY_v1YPmGHzHiVy}', 'LKL{U5E_STR1NG5_UT1LITY_JUQOcJ2tIWsW}', 'LKL{U5E_STR1NG5_UT1LITY_Yw0i1QbqD9Cr}', 'LKL{U5E_STR1NG5_UT1LITY_g6cKZQwlzues}', 'LKL{U5E_STR1NG5_UT1LITY_w6bAI0TxoII5}', 'LKL{U5E_STR1NG5_UT1LITY_8n5KlDRvYTPR}', 'LKL{U5E_STR1NG5_UT1LITY_wgHGBhwbeVzX}', 'LKL{U5E_STR1NG5_UT1LITY_zBteDiGwjGzU}', 'LKL{U5E_STR1NG5_UT1LITY_FJbJH4LyirAB}', 'LKL{U5E_STR1NG5_UT1LITY_Kwuc5m4HmCbt}', 'LKL{U5E_STR1NG5_UT1LITY_ErwoZDHKSJ2o}', 'LKL{U5E_STR1NG5_UT1LITY_9fcl3u9FLUJE}', 'LKL{U5E_STR1NG5_UT1LITY_zcFpbwSaEk4P}', 'LKL{U5E_STR1NG5_UT1LITY_8OEpnanyoJ4A}', 'LKL{U5E_STR1NG5_UT1LITY_4NtEnL6Q77vl}', 'LKL{U5E_STR1NG5_UT1LITY_eN54PLpbn3vL}', 'LKL{U5E_STR1NG5_UT1LITY_j1ciS0It0kcQ}', 'LKL{U5E_STR1NG5_UT1LITY_Ahql07eKAZBA}', 'LKL{U5E_STR1NG5_UT1LITY_a6lNl4xLDIFG}', 'LKL{U5E_STR1NG5_UT1LITY_ZN7d2JT9eAqi}', 'LKL{U5E_STR1NG5_UT1LITY_OhFYTNnbonEa}', 'LKL{U5E_STR1NG5_UT1LITY_mWXkWmaMJpuw}', 'LKL{U5E_STR1NG5_UT1LITY_mzIytXPUI19F}', 'LKL{U5E_STR1NG5_UT1LITY_GGYZERTKrz36}', 'LKL{U5E_STR1NG5_UT1LITY_K6f4hUMV49Wf}', 'LKL{U5E_STR1NG5_UT1LITY_cC6LpBZMH7L0}', 'LKL{U5E_STR1NG5_UT1LITY_re6xRWRAYllz}', 'LKL{U5E_STR1NG5_UT1LITY_rP8rbaYOCG3q}', 'LKL{U5E_STR1NG5_UT1LITY_MS4281JuO9rw}', 'LKL{U5E_STR1NG5_UT1LITY_cWtmSjU5q5SW}', 'LKL{U5E_STR1NG5_UT1LITY_YH1EIM3HCNNk}', 'LKL{U5E_STR1NG5_UT1LITY_zNllDmdCnPYh}', 'LKL{U5E_STR1NG5_UT1LITY_fZ5UIUvjsE24}', 'LKL{U5E_STR1NG5_UT1LITY_4lIjhM6FeqEQ}', 'LKL{U5E_STR1NG5_UT1LITY_CmIWndGNr5M0}', 'LKL{U5E_STR1NG5_UT1LITY_MbzYzk5HodC2}', 'LKL{U5E_STR1NG5_UT1LITY_VjVbLtrNbq1y}', 'LKL{U5E_STR1NG5_UT1LITY_L4LuRUhAbYSC}', 'LKL{U5E_STR1NG5_UT1LITY_dfSBBbDKT4L2}', 'LKL{U5E_STR1NG5_UT1LITY_EilbfzOfyGwn}', 'LKL{U5E_STR1NG5_UT1LITY_5vfTLSWwNf6G}', 'LKL{U5E_STR1NG5_UT1LITY_cEW5n0cZhQqs}', 'LKL{U5E_STR1NG5_UT1LITY_0SgAvcS1hBa9}', 'LKL{U5E_STR1NG5_UT1LITY_Oj1b3KbIUr3b}', 'LKL{U5E_STR1NG5_UT1LITY_CbqttLy0g0fr}', 'LKL{U5E_STR1NG5_UT1LITY_8jgg5sG8YQiS}', 'LKL{U5E_STR1NG5_UT1LITY_vAjmaQNTRzcS}', 'LKL{U5E_STR1NG5_UT1LITY_CJgPySeknets}', 'LKL{U5E_STR1NG5_UT1LITY_pAKE5y5WxvY5}', 'LKL{U5E_STR1NG5_UT1LITY_4ceGTl8vSWjj}', 'LKL{U5E_STR1NG5_UT1LITY_QFHtJFpF83hf}', 'LKL{U5E_STR1NG5_UT1LITY_N1ErvExcGJiQ}', 'LKL{U5E_STR1NG5_UT1LITY_B7Z14SfzCTHP}', 'LKL{U5E_STR1NG5_UT1LITY_PD6hsBxhEq6H}', 'LKL{U5E_STR1NG5_UT1LITY_PsMerowjL6lV}', 'LKL{U5E_STR1NG5_UT1LITY_gCWbKeXwQLF6}', 'LKL{U5E_STR1NG5_UT1LITY_Xx5ZPSF1Vcnq}', 'LKL{U5E_STR1NG5_UT1LITY_W8CGWSCNJVe8}', 'LKL{U5E_STR1NG5_UT1LITY_tvnf34wfL0Xz}', 'LKL{U5E_STR1NG5_UT1LITY_Y4KMVHcREP21}', 'LKL{U5E_STR1NG5_UT1LITY_DMOXiMurm1tN}', 'LKL{U5E_STR1NG5_UT1LITY_Ukk9PkzeevmG}', 'LKL{U5E_STR1NG5_UT1LITY_8Q8ObzGYlhbu}', 'LKL{U5E_STR1NG5_UT1LITY_UD6XnZUbhZMI}', 'LKL{U5E_STR1NG5_UT1LITY_wz0ULX1qhLwQ}', 'LKL{U5E_STR1NG5_UT1LITY_54dttlPLNWBn}', 'LKL{U5E_STR1NG5_UT1LITY_qXq9KiM9xZE0}', 'LKL{U5E_STR1NG5_UT1LITY_XTBws5x6tXGy}', 'LKL{U5E_STR1NG5_UT1LITY_UIP5meC1sUoQ}', 'LKL{U5E_STR1NG5_UT1LITY_xY5OLIeWlBy9}', 'LKL{U5E_STR1NG5_UT1LITY_MKthR3tALKil}', 'LKL{U5E_STR1NG5_UT1LITY_y69bUVjpR6uY}', 'LKL{U5E_STR1NG5_UT1LITY_uHV7Z0GShFMf}', 'LKL{U5E_STR1NG5_UT1LITY_oZOjLyaKROTF}', 'LKL{U5E_STR1NG5_UT1LITY_m8hFNq8P3etR}', 'LKL{U5E_STR1NG5_UT1LITY_PCKywHvp8jI4}', 'LKL{U5E_STR1NG5_UT1LITY_BsNgF5xti6pC}', 'LKL{U5E_STR1NG5_UT1LITY_G0HB2aRvmpa5}', 'LKL{U5E_STR1NG5_UT1LITY_FAZcUycfeX1v}', 'LKL{U5E_STR1NG5_UT1LITY_oOZg1DZyuY8g}', 'LKL{U5E_STR1NG5_UT1LITY_Itm3DDNlrYWP}', 'LKL{U5E_STR1NG5_UT1LITY_TMRExgzwKx7i}', 'LKL{U5E_STR1NG5_UT1LITY_UlU2DJ6gIV9g}', 'LKL{U5E_STR1NG5_UT1LITY_BPFQVHtzF8mj}', 'LKL{U5E_STR1NG5_UT1LITY_B9ZGucgKpaTv}', 'LKL{U5E_STR1NG5_UT1LITY_JHPJHuigFi4h}', 'LKL{U5E_STR1NG5_UT1LITY_Js7HJiH1HpFG}', 'LKL{U5E_STR1NG5_UT1LITY_dJjtoZf2VjUr}', 'LKL{U5E_STR1NG5_UT1LITY_SX2XaPdzqyPA}', 'LKL{U5E_STR1NG5_UT1LITY_5u0qo1b3uUu5}', 'LKL{U5E_STR1NG5_UT1LITY_7bo5Lvaqo1oa}', 'LKL{U5E_STR1NG5_UT1LITY_7Nv7NACGu8I1}', 'LKL{U5E_STR1NG5_UT1LITY_HXTNKhrnub8q}', 'LKL{U5E_STR1NG5_UT1LITY_AG1e4WOkKFqJ}', 'LKL{U5E_STR1NG5_UT1LITY_LgI4KQtmBuPz}', 'LKL{U5E_STR1NG5_UT1LITY_mUA6tUukpOyr}', 'LKL{U5E_STR1NG5_UT1LITY_bMLgYPYUVBes}', 'LKL{U5E_STR1NG5_UT1LITY_qdUDhhPQb8KW}', 'LKL{U5E_STR1NG5_UT1LITY_CItb7M9e26iu}', 'LKL{U5E_STR1NG5_UT1LITY_wb4ZlrXLZ23f}', 'LKL{U5E_STR1NG5_UT1LITY_xGPA3lEgpnp0}', 'LKL{U5E_STR1NG5_UT1LITY_86ptQSgHih1U}', 'LKL{U5E_STR1NG5_UT1LITY_oVdkJQns7f8T}', 'LKL{U5E_STR1NG5_UT1LITY_I9453SiicQh6}', 'LKL{U5E_STR1NG5_UT1LITY_bKNXmN1jH9nY}', 'LKL{U5E_STR1NG5_UT1LITY_eeJL04uzOEad}', 'LKL{U5E_STR1NG5_UT1LITY_GOEygw78e2MN}', 'LKL{U5E_STR1NG5_UT1LITY_IzvSeBbyYW57}', 'LKL{U5E_STR1NG5_UT1LITY_56ZpsRpNFpaW}', 'LKL{U5E_STR1NG5_UT1LITY_ERjkdhOTMaPD}', 'LKL{U5E_STR1NG5_UT1LITY_mzPHf0jtcVnm}', 'LKL{U5E_STR1NG5_UT1LITY_F4RvOv3lBWIn}', 'LKL{U5E_STR1NG5_UT1LITY_gGhjuJ2uziX4}', 'LKL{U5E_STR1NG5_UT1LITY_J5ksCaWNVU8Y}', 'LKL{U5E_STR1NG5_UT1LITY_zt5Lc2ieQ06I}', 'LKL{U5E_STR1NG5_UT1LITY_132K6e6OHXst}', 'LKL{U5E_STR1NG5_UT1LITY_3f2sbfHczbgk}', 'LKL{U5E_STR1NG5_UT1LITY_WCnTOsJ4sOq6}', 'LKL{U5E_STR1NG5_UT1LITY_CSXN4rxo1iFy}', 'LKL{U5E_STR1NG5_UT1LITY_wXDSJ6JzzAB0}', 'LKL{U5E_STR1NG5_UT1LITY_X4ldbKygnvWb}', 'LKL{U5E_STR1NG5_UT1LITY_xJE1ztMwE27X}', 'LKL{U5E_STR1NG5_UT1LITY_oEunBjvr5fYf}', 'LKL{U5E_STR1NG5_UT1LITY_JxbaFdcJtK6t}', 'LKL{U5E_STR1NG5_UT1LITY_dPheeRfl9xjM}', 'LKL{U5E_STR1NG5_UT1LITY_yZ8ObB3N5qHc}', 'LKL{U5E_STR1NG5_UT1LITY_ypNDicCP3mBw}', 'LKL{U5E_STR1NG5_UT1LITY_SpaQzj4oLwO9}', 'LKL{U5E_STR1NG5_UT1LITY_ikxjQojVec41}', 'LKL{U5E_STR1NG5_UT1LITY_utIx7PalnIJS}', 'LKL{U5E_STR1NG5_UT1LITY_dGUFkf2Zpx5o}', 'LKL{U5E_STR1NG5_UT1LITY_IY4vKetR5LfY}', 'LKL{U5E_STR1NG5_UT1LITY_Nwowj9eLSpNr}', 'LKL{U5E_STR1NG5_UT1LITY_ydVGcTKUFugE}', 'LKL{U5E_STR1NG5_UT1LITY_UMMBG2yLMWXS}', 'LKL{U5E_STR1NG5_UT1LITY_JRjCRBILoMyC}', 'LKL{U5E_STR1NG5_UT1LITY_2Sp88q8JYtFV}', 'LKL{U5E_STR1NG5_UT1LITY_H45Oe7UHqLrt}', 'LKL{U5E_STR1NG5_UT1LITY_n9OsbjeKUTDl}', 'LKL{U5E_STR1NG5_UT1LITY_yMA4lT6Tb0ks}', 'LKL{U5E_STR1NG5_UT1LITY_wsG302iFnhkB}', 'LKL{U5E_STR1NG5_UT1LITY_yDGo5q4THP8a}', 'LKL{U5E_STR1NG5_UT1LITY_36l0xNvfr8UQ}', 'LKL{U5E_STR1NG5_UT1LITY_cw5GUKLgwc2c}']
| 1,059.25
| 8,208
| 0.847652
| 1,032
| 8,474
| 6.378876
| 0.214147
| 0.182288
| 0.394957
| 0.607626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14404
| 0.0308
| 8,474
| 8
| 8,208
| 1,059.25
| 0.657494
| 0
| 0
| 0
| 0
| 0
| 0.873878
| 0.873878
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
f4adcbd98de6ed21b8fde3c3d28f2194eb7d9706
| 14,594
|
py
|
Python
|
python_module/test/unit/distributed/test_functional.py
|
1321024918/MegEngine
|
19568f67e9e80ae729272f56f19b739a72e5b889
|
[
"Apache-2.0"
] | 1
|
2020-10-12T11:09:46.000Z
|
2020-10-12T11:09:46.000Z
|
python_module/test/unit/distributed/test_functional.py
|
coincoming/MegEngine
|
19568f67e9e80ae729272f56f19b739a72e5b889
|
[
"Apache-2.0"
] | null | null | null |
python_module/test/unit/distributed/test_functional.py
|
coincoming/MegEngine
|
19568f67e9e80ae729272f56f19b739a72e5b889
|
[
"Apache-2.0"
] | 1
|
2022-02-21T10:41:55.000Z
|
2022-02-21T10:41:55.000Z
|
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import multiprocessing as mp
import platform
import numpy as np
import pytest
import megengine as mge
import megengine.distributed as dist
from megengine.core import Parameter, tensor
def _init_process_group_wrapper(world_size, rank, dev, backend, q):
if rank == 0:
dist.init_process_group("localhost", 0, world_size, rank, dev, backend)
q.put(dist.get_master_port())
else:
port = q.get()
dist.init_process_group("localhost", port, world_size, rank, dev, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_reduce_sum():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = tensor(data)
output = dist.functional.reduce_sum(inp)
if rank == 0:
assert np.allclose(output.numpy(), expect)
else:
assert np.allclose(output.numpy(), 0)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = x + y
p0 = mp.Process(target=worker, args=(0, x, backend, z, port_queue))
p1 = mp.Process(target=worker, args=(1, y, backend, None, port_queue))
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 3), (8, 10), (99, 77)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_gather():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = tensor(data)
output = dist.functional.gather(inp)
if rank == 0:
assert np.allclose(output.numpy(), expect)
else:
assert np.allclose(output.numpy(), 0)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = np.concatenate((x, y))
p0 = mp.Process(target=worker, args=(0, x, backend, z, port_queue))
p1 = mp.Process(target=worker, args=(1, y, backend, None, port_queue))
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 3), (8, 10), (99, 77)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_broadcast():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = tensor(data)
output = dist.functional.broadcast(inp)
assert np.allclose(output.numpy(), expect)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = x + 1
p0 = mp.Process(target=worker, args=(0, x, backend, x, port_queue))
p1 = mp.Process(target=worker, args=(1, y, backend, x, port_queue))
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 3), (8, 10), (99, 77)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_scatter():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = tensor(data)
output = dist.functional.scatter(inp)
assert np.allclose(output.numpy(), expect)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = x + 1
p0 = mp.Process(
target=worker, args=(0, x, backend, x[: shape[0] // 2], port_queue)
)
p1 = mp.Process(
target=worker, args=(1, y, backend, x[shape[0] // 2 :], port_queue)
)
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 3), (8, 10), (100, 77)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_all_to_all():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = tensor(data)
output = dist.functional.all_to_all(inp)
assert np.allclose(output.numpy(), expect)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
a = np.concatenate((x[: shape[0] // 2], y[: shape[0] // 2]))
b = np.concatenate((x[shape[0] // 2 :], y[shape[0] // 2 :]))
p0 = mp.Process(target=worker, args=(0, x, backend, a, port_queue))
p1 = mp.Process(target=worker, args=(1, y, backend, b, port_queue))
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 3), (8, 10), (100, 77)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_all_gather():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = tensor(data)
output = dist.functional.all_gather(inp)
assert np.allclose(output.numpy(), expect)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = np.concatenate((x, y))
p0 = mp.Process(target=worker, args=(0, x, backend, z, port_queue))
p1 = mp.Process(target=worker, args=(1, y, backend, z, port_queue))
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 3), (8, 10), (99, 77)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_reduce_scatter_sum():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = tensor(data)
output = dist.functional.reduce_scatter_sum(inp)
assert np.allclose(output.numpy(), expect)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = x + y
p0 = mp.Process(
target=worker, args=(0, x, backend, z[: shape[0] // 2], port_queue)
)
p1 = mp.Process(
target=worker, args=(1, y, backend, z[shape[0] // 2 :], port_queue)
)
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 4), (8, 10), (88, 44)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_all_reduce_sum():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = tensor(data)
output = dist.functional.all_reduce_sum(inp)
assert np.allclose(output.numpy(), expect)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = x + y
p0 = mp.Process(target=worker, args=(0, x, backend, z, port_queue))
p1 = mp.Process(target=worker, args=(1, y, backend, z, port_queue))
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 3), (8, 10), (99, 77)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_all_reduce_max():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = tensor(data)
output = dist.functional.all_reduce_max(inp)
assert np.allclose(output.numpy(), expect)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = np.maximum(x, y)
p0 = mp.Process(target=worker, args=(0, x, backend, z, port_queue))
p1 = mp.Process(target=worker, args=(1, y, backend, z, port_queue))
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 3), (8, 10), (99, 77)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_all_reduce_min():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = tensor(data)
output = dist.functional.all_reduce_min(inp)
assert np.allclose(output.numpy(), expect)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = np.minimum(x, y)
p0 = mp.Process(target=worker, args=(0, x, backend, z, port_queue))
p1 = mp.Process(target=worker, args=(1, y, backend, z, port_queue))
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 3), (8, 10), (99, 77)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="do not imp GPU mode at Windows now"
)
@pytest.mark.isolated_distributed
def test_bcast_param():
world_size = 2
def worker(rank, data, backend, expect, port_queue):
if mge.get_device_count("gpu") < world_size:
return
_init_process_group_wrapper(world_size, rank, rank, backend, port_queue)
inp = Parameter(data)
dist.functional.bcast_param(inp)
assert np.allclose(inp.numpy(), expect)
def check(shape, backend):
port_queue = mp.Queue()
x = np.random.rand(*shape).astype("float32")
y = x + 1
p0 = mp.Process(target=worker, args=(0, x, backend, x, port_queue))
p1 = mp.Process(target=worker, args=(1, y, backend, x, port_queue))
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
for shape in [(2, 3), (8, 10), (99, 77)]:
for backend in ["nccl", "ucx"]:
check(shape, backend)
| 31.183761
| 88
| 0.602919
| 2,016
| 14,594
| 4.257937
| 0.073413
| 0.057665
| 0.041007
| 0.06151
| 0.924161
| 0.915541
| 0.911463
| 0.907269
| 0.907269
| 0.903075
| 0
| 0.033229
| 0.255584
| 14,594
| 467
| 89
| 31.250535
| 0.756904
| 0.02316
| 0
| 0.80109
| 0
| 0
| 0.079309
| 0
| 0
| 0
| 0
| 0
| 0.065395
| 1
| 0.092643
| false
| 0
| 0.019074
| 0
| 0.141689
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4bacfbff1708b283316ec0446718d9d09232933
| 103
|
py
|
Python
|
src/aesara_theano_fallback/tensor.py
|
dfm/aesara-theano-fallback
|
9b7ba725ed9c25fa0ec457b183d4dfa3ad6874ab
|
[
"MIT"
] | null | null | null |
src/aesara_theano_fallback/tensor.py
|
dfm/aesara-theano-fallback
|
9b7ba725ed9c25fa0ec457b183d4dfa3ad6874ab
|
[
"MIT"
] | null | null | null |
src/aesara_theano_fallback/tensor.py
|
dfm/aesara-theano-fallback
|
9b7ba725ed9c25fa0ec457b183d4dfa3ad6874ab
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from theano.tensor import * # noqa
from theano.tensor import slinalg # noqa
| 20.6
| 41
| 0.669903
| 14
| 103
| 4.928571
| 0.642857
| 0.289855
| 0.463768
| 0.637681
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0.194175
| 103
| 4
| 42
| 25.75
| 0.819277
| 0.300971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f4c087cda429650ce0eb2a0fa1536cc33c929437
| 8,237
|
py
|
Python
|
rebalance.py
|
ShirleyPaul/revive
|
15d22062a2deb5f6f5308ce1439d236d9543a5e2
|
[
"Apache-2.0"
] | 46
|
2017-11-08T22:54:32.000Z
|
2019-11-23T10:55:35.000Z
|
rebalance.py
|
Riocloud/Qtum_Revive
|
bf4aed953ddec24c4b92897968d59903b3536643
|
[
"MIT"
] | 2
|
2018-06-01T16:08:41.000Z
|
2018-06-05T09:43:27.000Z
|
rebalance.py
|
Riocloud/Qtum_Revive
|
bf4aed953ddec24c4b92897968d59903b3536643
|
[
"MIT"
] | 11
|
2017-11-08T22:53:40.000Z
|
2019-07-22T02:38:59.000Z
|
from ethereum import tester
from ethereum import utils
from ethereum._solidity import get_solidity
SOLIDITY_AVAILABLE = get_solidity() is not None
from player import PaymentChannelPlayer
from protocol import getstatus, completeRound, init_contracts, init_channel_players, init_subnet_participants
from participant import PaymentSubnetParticipant
from leader import PaymentSubnetLeader
# Logging
from ethereum import slogging
slogging.configure(':INFO,eth.vm:INFO')
# Best case scenario.
def simulation_scenario_1():
print("================TEST SCENARIO 1: HAPPY PATH")
# Create test blockchain
blockchain_state = tester.state()
blockchain_state.mine()
tester.gas_limit = 3141592
private_keys = tester.keys[0:3]
public_addresses = list(map(utils.privtoaddr, private_keys))
# Create the contract
channel_contract_code = open('channel.sol').read()
challenge_contract_code = open('challenge.sol').read()
contracts = init_contracts(blockchain_state, channel_contract_code, challenge_contract_code, public_addresses)
blockchain_state.mine()
# Create snapshots at each phase
state_snapshots = [blockchain_state.snapshot()]
# Initialize channel players and subnet participants
players = init_channel_players(blockchain_state, contracts, private_keys, public_addresses)
participants = init_subnet_participants(contracts, players, public_addresses)
player_participant_map = dict((player, participant) for participant in participants for player in participant.player_roles)
# Create initial unbalanced setting
players[contracts[0]][0].deposit(100) # 100 A : B 0
getstatus(contracts[0])
players[contracts[1]][1].deposit(100) # 0 A : C 100
getstatus(contracts[1])
players[contracts[2]][0].deposit(100) # 100 B : C 0
getstatus(contracts[2])
# Save pre-rebalance snapshot
state_snapshots.append(blockchain_state.snapshot())
# Begin protocol, assign arbitrary leader
leader = PaymentSubnetLeader(participants)
# 2 out of 3 participants signal rebalance
for i in range(0, 2):
req = participants[i].send_rebalance_request(leader)
leader.receive_rebalance_request(req)
# 2/3 >= 1/2 threshold
assert(leader.threshold_passed)
# Leader attempts to initiate rebalance, all participants respond
for i in range(0, 3):
req = leader.send_initiation_request(participants[i])
participants[i].receive_initiation_request(req)
resp = participants[i].send_participation_confirmation(leader)
leader.receive_participation_confirmation(resp)
# Leader requests channel freeze from all participants
for i in range(0, 3):
req = leader.send_channel_freeze_requests(participants[i])
participants[i].receive_channel_freeze_request(req)
resp = participants[i].send_frozen_channel_info(leader)
leader.receive_frozen_channel_info(resp)
# Leader generates rebalance transactions, requests signatures
leader.generate_rebalance_set()
for i in range(0, 3):
req = leader.send_rebalance_transactions(participants[i])
participants[i].receive_rebalance_transactions(req)
resp = participants[i].send_signed_rebalance_set(leader)
leader.receive_signed_rebalance_set(resp)
# Leader announces fully signed transaction set
for i in range(0, 3):
req = leader.send_set_signatures(participants[i])
participants[i].receive_set_signatures(req)
# Display result
for i in range(0, 3):
print('Triggering')
player = players[contracts[i]][0]
contracts[i].trigger(sender=player.sk)
participant = player_participant_map[player]
participant.update_after_rebalance(player.contract.address)
blockchain_state.mine(15)
print('Finalize')
contracts[i].finalize()
getstatus(contracts[i])
# Best case scenario.
def simulation_scenario_2():
print("================TEST SCENARIO 2: UNAVAILABLE COMPLETE SIGNATURE SET FOR PARTICIPANT A")
# Create test blockchain
blockchain_state = tester.state()
blockchain_state.mine()
tester.gas_limit = 3141592
private_keys = tester.keys[0:3]
public_addresses = list(map(utils.privtoaddr, private_keys))
# Create the contract
channel_contract_code = open('channel.sol').read()
challenge_contract_code = open('challenge.sol').read()
contracts = init_contracts(blockchain_state, channel_contract_code, challenge_contract_code, public_addresses)
blockchain_state.mine()
# Create snapshots at each phase
state_snapshots = [blockchain_state.snapshot()]
# Initialize channel players and subnet participants
players = init_channel_players(blockchain_state, contracts, private_keys, public_addresses)
participants = init_subnet_participants(contracts, players, public_addresses)
player_participant_map = dict((player, participant) for participant in participants for player in participant.player_roles)
# Create initial unbalanced setting
players[contracts[0]][0].deposit(100) # 100 A : B 0
getstatus(contracts[0])
players[contracts[1]][1].deposit(100) # 0 A : C 100
getstatus(contracts[1])
players[contracts[2]][0].deposit(100) # 100 B : C 0
getstatus(contracts[2])
# Save pre-rebalance snapshot
state_snapshots.append(blockchain_state.snapshot())
# Begin protocol, assign arbitrary leader
leader = PaymentSubnetLeader(participants)
# 2 out of 3 participants signal rebalance
for i in range(0, 2):
req = participants[i].send_rebalance_request(leader)
leader.receive_rebalance_request(req)
# 2/3 >= 1/2 threshold
assert(leader.threshold_passed)
# Leader attempts to initiate rebalance, all participants respond
for i in range(0, 3):
req = leader.send_initiation_request(participants[i])
participants[i].receive_initiation_request(req)
resp = participants[i].send_participation_confirmation(leader)
leader.receive_participation_confirmation(resp)
# Leader requests channel freeze from all participants
for i in range(0, 3):
req = leader.send_channel_freeze_requests(participants[i])
participants[i].receive_channel_freeze_request(req)
resp = participants[i].send_frozen_channel_info(leader)
leader.receive_frozen_channel_info(resp)
# Leader generates rebalance transactions, requests signatures
leader.generate_rebalance_set()
for i in range(0, 3):
req = leader.send_rebalance_transactions(participants[i])
participants[i].receive_rebalance_transactions(req)
resp = participants[i].send_signed_rebalance_set(leader)
leader.receive_signed_rebalance_set(resp)
# Leader announces fully signed transaction set, but not to participant A
for i in range(1, 3):
req = leader.send_set_signatures(participants[i])
participants[i].receive_set_signatures(req)
# Participant A issues availability challenge
participants[0].issue_challenge(contracts[0].address, wei = int(25e9 * 60e3))
blockchain_state.mine(2)
# Participants B, C race to respond
participants[1].respond_to_challenge(contracts[2].address)
participants[2].respond_to_challenge(contracts[1].address)
blockchain_state.mine(7)
# Display result
for i in range(0, 3):
print('Triggering')
player = players[contracts[i]][0] if not i else players[contracts[i]][1]
contracts[i].trigger(sender=player.sk)
participant = player_participant_map[player]
if i == 0:
participant.update_after_rebalance_verified(player.contract.address)
else:
participant.update_after_rebalance(player.contract.address)
blockchain_state.mine(15)
print('Finalize')
contracts[i].finalize()
getstatus(contracts[i])
if __name__ == '__main__':
simulation_scenario_1()
simulation_scenario_2()
# # Check some assertions
# try:
# completeRound(players, 1, 6, 0, 0, 0) # Should fail
# except AssertionError:
# pass # Should fail
# else:
# raise(ValueError("Too much balance!"))
| 38.853774
| 127
| 0.721501
| 994
| 8,237
| 5.784708
| 0.17002
| 0.054261
| 0.012522
| 0.022957
| 0.818087
| 0.818087
| 0.805217
| 0.805217
| 0.805217
| 0.805217
| 0
| 0.02193
| 0.186233
| 8,237
| 212
| 128
| 38.853774
| 0.835894
| 0.177249
| 0
| 0.789474
| 0
| 0
| 0.035226
| 0
| 0
| 0
| 0
| 0
| 0.015038
| 1
| 0.015038
| false
| 0.015038
| 0.06015
| 0
| 0.075188
| 0.045113
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4d53d861967024a3793d21ec7ed0815832545bb
| 186
|
py
|
Python
|
vlne/keras/models/__init__.py
|
usert5432/vlne
|
e3cafd30ecce3a2dbc4a37cc4257d07fb1a1785d
|
[
"MIT"
] | null | null | null |
vlne/keras/models/__init__.py
|
usert5432/vlne
|
e3cafd30ecce3a2dbc4a37cc4257d07fb1a1785d
|
[
"MIT"
] | null | null | null |
vlne/keras/models/__init__.py
|
usert5432/vlne
|
e3cafd30ecce3a2dbc4a37cc4257d07fb1a1785d
|
[
"MIT"
] | null | null | null |
"""Definitions of `keras` models for the `vlne` training"""
from .models_flat import *
from .models_slice import *
from .models_lstm import *
from .models_atten import model_trans_v1
| 26.571429
| 59
| 0.763441
| 27
| 186
| 5.037037
| 0.62963
| 0.294118
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006289
| 0.145161
| 186
| 6
| 60
| 31
| 0.849057
| 0.284946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f4dc3fe47234043232bd6af8b83104406efd2862
| 1,486
|
py
|
Python
|
badx12/utils/errors.py
|
agaddis02/badX12
|
7362a4d9629e570be8cd3b42af5210cda39e0efc
|
[
"MIT"
] | null | null | null |
badx12/utils/errors.py
|
agaddis02/badX12
|
7362a4d9629e570be8cd3b42af5210cda39e0efc
|
[
"MIT"
] | null | null | null |
badx12/utils/errors.py
|
agaddis02/badX12
|
7362a4d9629e570be8cd3b42af5210cda39e0efc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class FieldValidationError(Exception):
"""Exception raised for errors in the input.
Attributes:
segment -- segment in which the error occurred
msg -- explanation of the error
"""
def __init__(self, segment, msg):
self.segment = segment
self.msg = msg
class IDMismatchError(Exception):
"""Exception raised for errors in the input.
Attributes:
segment -- segment in which the error occurred
msg -- explanation of the error
"""
def __init__(self, segment, msg):
self.segment = segment
self.msg = msg
class SegmentCountError(Exception):
"""Exception raised for errors in the input.
Attributes:
segment -- segment in which the error occurred
msg -- explanation of the error
"""
def __init__(self, segment, msg):
self.segment = segment
self.msg = msg
class InvalidFileTypeError(Exception):
"""Exception raised for errors in the input.
Attributes:
segment -- segment in which the error occurred
msg -- explanation of the error
"""
def __init__(self, segment, msg):
self.expr = segment
self.msg = msg
class SegmentTerminatorNotFoundError(Exception):
"""Exception raised for errors in the Interchange Header.
Attributes:
msg -- explanation of the error
"""
def __init__(self, msg):
self.msg = msg
| 25.62069
| 61
| 0.618439
| 163
| 1,486
| 5.515337
| 0.184049
| 0.080089
| 0.133482
| 0.150167
| 0.820912
| 0.79644
| 0.79644
| 0.754171
| 0.715239
| 0.715239
| 0
| 0.000959
| 0.298116
| 1,486
| 57
| 62
| 26.070175
| 0.860978
| 0.467026
| 0
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.263158
| false
| 0
| 0
| 0
| 0.526316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4eec3904f961be93c6768c9b60ff82ac5390fd5
| 5,399
|
py
|
Python
|
steering-models/community-models/rambo/preprocess_train_data.py
|
vaibhav-s/self-driving-car
|
eb5865d50499f90b3eeace869c1f8a65cf9e2c46
|
[
"MIT"
] | null | null | null |
steering-models/community-models/rambo/preprocess_train_data.py
|
vaibhav-s/self-driving-car
|
eb5865d50499f90b3eeace869c1f8a65cf9e2c46
|
[
"MIT"
] | null | null | null |
steering-models/community-models/rambo/preprocess_train_data.py
|
vaibhav-s/self-driving-car
|
eb5865d50499f90b3eeace869c1f8a65cf9e2c46
|
[
"MIT"
] | null | null | null |
from keras.preprocessing.image import load_img, img_to_array
import pandas as pd
import numpy as np
from skimage.exposure import rescale_intensity
from matplotlib.colors import rgb_to_hsv
from config import DataConfig
def make_hsv_data(path):
df = pd.read_csv(path)
num_rows = df.shape[0]
X = np.zeros((num_rows, row, col, 3), dtype=np.uint8)
for i in range(num_rows):
if i % 1000 == 0:
print "Processed " + str(i) + " images..."
path = df['fullpath'].iloc[i]
img = load_img(data_path + path, target_size=(row, col))
img = img_to_array(img)
img = rgb_to_hsv(img)
img = np.array(img, dtype=np.uint8)
X[i] = img
return X, np.array(df["angle"])
def make_color_data(path):
df = pd.read_csv(path)
num_rows = df.shape[0]
X = np.zeros((num_rows, row, col, 3), dtype=np.uint8)
for i in range(num_rows):
if i % 1000 == 0:
print "Processed " + str(i) + " images..."
path = df['fullpath'].iloc[i]
img = load_img(data_path + path, target_size=(row, col))
img = img_to_array(img)
img = np.array(img, dtype=np.uint8)
X[i] = img
return X, np.array(df["angle"])
def make_grayscale_diff_data(path, num_channels=2):
df = pd.read_csv(path)
num_rows = df.shape[0]
X = np.zeros((num_rows - num_channels, row, col, num_channels), dtype=np.uint8)
for i in range(num_channels, num_rows):
if i % 1000 == 0:
print "Processed " + str(i) + " images..."
for j in range(num_channels):
path0 = df['fullpath'].iloc[i - j - 1]
path1 = df['fullpath'].iloc[i - j]
img0 = load_img(data_path + path0, grayscale=True, target_size=(row, col))
img1 = load_img(data_path + path1, grayscale=True, target_size=(row, col))
img0 = img_to_array(img0)
img1 = img_to_array(img1)
img = img1 - img0
img = rescale_intensity(img, in_range=(-255, 255), out_range=(0, 255))
img = np.array(img, dtype=np.uint8)
X[i - num_channels, :, :, j] = img[:, :, 0]
return X, np.array(df["angle"].iloc[num_channels:])
def make_grayscale_diff_tx_data(path, num_channels=2):
df = pd.read_csv(path)
num_rows = df.shape[0]
X = np.zeros((num_rows - num_channels, row, col, num_channels), dtype=np.uint8)
for i in range(num_channels, num_rows):
if i % 1000 == 0:
print "Processed " + str(i) + " images..."
path1 = df['fullpath'].iloc[i]
img1 = load_img(data_path + path1, grayscale=True, target_size=(row, col))
img1 = img_to_array(img1)
for j in range(1, num_channels + 1):
path0 = df['fullpath'].iloc[i - j]
img0 = load_img(data_path + path0, grayscale=True, target_size=(row, col))
img0 = img_to_array(img0)
img = img1 - img0
img = rescale_intensity(img, in_range=(-255, 255), out_range=(0, 255))
img = np.array(img, dtype=np.uint8)
X[i - num_channels, :, :, j - 1] = img[:, :, 0]
return X, np.array(df["angle"].iloc[num_channels:])
def make_hsv_grayscale_diff_data(path, num_channels=2):
df = pd.read_csv(path)
num_rows = df.shape[0]
X = np.zeros((num_rows - num_channels, row, col, num_channels), dtype=np.uint8)
for i in range(num_channels, num_rows):
if i % 1000 == 0:
print "Processed " + str(i) + " images..."
for j in range(num_channels):
path0 = df['fullpath'].iloc[i - j - 1]
path1 = df['fullpath'].iloc[i - j]
img0 = load_img(data_path + path0, target_size=(row, col))
img1 = load_img(data_path + path1, target_size=(row, col))
img0 = img_to_array(img0)
img1 = img_to_array(img1)
img0 = rgb_to_hsv(img0)
img1 = rgb_to_hsv(img1)
img = img1[:, :, 2] - img0[:, :, 2]
img = rescale_intensity(img, in_range=(-255, 255), out_range=(0, 255))
img = np.array(img, dtype=np.uint8)
X[i - num_channels, :, :, j] = img
return X, np.array(df["angle"].iloc[num_channels:])
if __name__ == "__main__":
config = DataConfig()
data_path = config.data_path
row, col = config.height, config.width
print "Pre-processing phase 1 data..."
X_train, y_train = make_hsv_grayscale_diff_data("data/train_round1.txt", 4)
np.save("{}/X_train_round1_hsv_gray_diff_ch4".format(data_path), X_train)
np.save("{}/y_train_round1_hsv_gray_diff_ch4".format(data_path), y_train)
X_val, y_val = make_hsv_grayscale_diff_data("data/val_round1.txt", 4)
np.save("{}/X_train_round1_hsv_gray_diff_ch4".format(data_path), X_val)
np.save("{}/y_train_round1_hsv_gray_diff_ch4".format(data_path), y_val)
print "Pre-processing phase 2 data..."
for i in range(1, 6):
X_train, y_train = make_hsv_grayscale_diff_data("data/train_round2_part" + str(i) + ".txt", 4)
np.save("{}/X_train_round2_hsv_gray_diff_ch4_part{}".format(data_path, i), X_train)
np.save("{}/y_train_round2_hsv_gray_diff_ch4_part{}".format(data_path, i), y_train)
| 40.901515
| 103
| 0.582145
| 794
| 5,399
| 3.707809
| 0.117128
| 0.057065
| 0.040761
| 0.040761
| 0.837636
| 0.825068
| 0.804008
| 0.802649
| 0.802649
| 0.790421
| 0
| 0.037484
| 0.27857
| 5,399
| 132
| 104
| 40.901515
| 0.718357
| 0
| 0
| 0.638889
| 0
| 0
| 0.103815
| 0.050674
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.055556
| null | null | 0.064815
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4ef082e8eea9c25eefeaa40118243ca17252563
| 504
|
py
|
Python
|
src/make_allpopbamlists.py
|
silastittes/parv_local
|
67d0a804af320bc50024fd1e6e2190b0c55fb388
|
[
"MIT"
] | null | null | null |
src/make_allpopbamlists.py
|
silastittes/parv_local
|
67d0a804af320bc50024fd1e6e2190b0c55fb388
|
[
"MIT"
] | null | null | null |
src/make_allpopbamlists.py
|
silastittes/parv_local
|
67d0a804af320bc50024fd1e6e2190b0c55fb388
|
[
"MIT"
] | null | null | null |
grep Teo pop_key | awk '{print "data/bams/til11-alignments/deduped-bam/" $3 ".deduped.bam"}' > data/bamlist/til11-Teo-allBams.txt
grep LR pop_key | awk '{print "data/bams/til11-alignments/deduped-bam/" $3 ".deduped.bam"}' > data/bamlist/til11-LR-allBams.txt
grep Teo pop_key | awk '{print "data/bams/v5-alignments/deduped-bam/" $3 ".deduped.bam"}' > data/bamlist/v5-Teo-allBams.txt
grep LR pop_key | awk '{print "data/bams/v5-alignments/deduped-bam/" $3 ".deduped.bam"}' > data/bamlist/v5-LR-allBams.txt
| 100.8
| 129
| 0.722222
| 84
| 504
| 4.285714
| 0.214286
| 0.222222
| 0.1
| 0.155556
| 0.933333
| 0.933333
| 0.933333
| 0.933333
| 0.894444
| 0.894444
| 0
| 0.034783
| 0.087302
| 504
| 4
| 130
| 126
| 0.747826
| 0
| 0
| 0
| 0
| 1
| 0.519841
| 0.313492
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 12
|
7607db8a86f5e59e45e253ec71556cf71c168f5d
| 140
|
py
|
Python
|
tests/test_models/test_alembic.py
|
mowangdk/huskar
|
7692fbc5672a5ae6e2a33616c493466a7137f8cd
|
[
"MIT"
] | 59
|
2019-10-31T10:50:10.000Z
|
2021-11-26T04:32:25.000Z
|
tests/test_models/test_alembic.py
|
mowangdk/huskar
|
7692fbc5672a5ae6e2a33616c493466a7137f8cd
|
[
"MIT"
] | 5
|
2019-10-31T10:37:30.000Z
|
2020-03-02T06:45:46.000Z
|
tests/test_models/test_alembic.py
|
mowangdk/huskar
|
7692fbc5672a5ae6e2a33616c493466a7137f8cd
|
[
"MIT"
] | 9
|
2019-10-31T10:35:00.000Z
|
2019-12-01T14:13:58.000Z
|
from __future__ import absolute_import
from huskar_api.models.alembic import get_metadata
def test_metadata():
assert get_metadata()
| 17.5
| 50
| 0.814286
| 19
| 140
| 5.526316
| 0.684211
| 0.209524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135714
| 140
| 7
| 51
| 20
| 0.867769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
76115180dcde38d342a0b8fbe891e375efcf94d3
| 108
|
py
|
Python
|
problem-001/sol_3.py
|
memset0/naive-toys
|
be12745d268722351935414187ea06635d95c81a
|
[
"MIT"
] | 4
|
2021-09-10T08:51:40.000Z
|
2021-10-07T03:00:48.000Z
|
problem-001/sol_3.py
|
memset0/naive-toys
|
be12745d268722351935414187ea06635d95c81a
|
[
"MIT"
] | null | null | null |
problem-001/sol_3.py
|
memset0/naive-toys
|
be12745d268722351935414187ea06635d95c81a
|
[
"MIT"
] | null | null | null |
print('\n'.join(__import__('random').sample(open('bh.csv', 'r+').read().splitlines(), int(input('m = ')))))
| 54
| 107
| 0.601852
| 15
| 108
| 4.066667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046296
| 108
| 1
| 108
| 108
| 0.592233
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
7624ae713b80d5111e6d79da9341c4d4ab0334c9
| 51,421
|
py
|
Python
|
hata/discord/http/urls.py
|
m0nk3ybraindead/hata
|
f87ed3d7009eeae31d6ea158772efd33775c7b1c
|
[
"0BSD"
] | 1
|
2022-03-02T03:59:57.000Z
|
2022-03-02T03:59:57.000Z
|
hata/discord/http/urls.py
|
m0nk3ybraindead/hata
|
f87ed3d7009eeae31d6ea158772efd33775c7b1c
|
[
"0BSD"
] | null | null | null |
hata/discord/http/urls.py
|
m0nk3ybraindead/hata
|
f87ed3d7009eeae31d6ea158772efd33775c7b1c
|
[
"0BSD"
] | null | null | null |
__all__ = (
'API_ENDPOINT', 'CDN_ENDPOINT', 'DISCORD_ENDPOINT', 'INVITE_URL_RP', 'MESSAGE_JUMP_URL_RP', 'STATUS_ENDPOINT',
'VALID_ICON_FORMATS', 'VALID_ICON_FORMATS_EXTENDED', 'VALID_ICON_MEDIA_TYPES', 'VALID_ICON_MEDIA_TYPES_EXTENDED',
'VALID_STICKER_IMAGE_MEDIA_TYPES', 'is_media_url'
)
import re
from scarletio import export, include
from ...env import (
API_VERSION, CUSTOM_API_ENDPOINT, CUSTOM_CDN_ENDPOINT, CUSTOM_DISCORD_ENDPOINT, CUSTOM_STATUS_ENDPOINT
)
from ..bases import ICON_TYPE_NONE, ICON_TYPE_STATIC
ChannelGuildBase = include('ChannelGuildBase')
StickerFormat = include('StickerFormat')
API_ENDPOINT = f'https://discord.com/api/v{API_VERSION}' if (CUSTOM_API_ENDPOINT is None) else CUSTOM_API_ENDPOINT
CDN_ENDPOINT = 'https://cdn.discordapp.com' if (CUSTOM_CDN_ENDPOINT is None) else CUSTOM_CDN_ENDPOINT
DISCORD_ENDPOINT = 'https://discord.com' if (CUSTOM_DISCORD_ENDPOINT is None) else CUSTOM_DISCORD_ENDPOINT
STATUS_ENDPOINT = 'https://status.discord.com/api/v2' if (CUSTOM_STATUS_ENDPOINT is None) else CUSTOM_STATUS_ENDPOINT
del CUSTOM_API_ENDPOINT, CUSTOM_CDN_ENDPOINT, CUSTOM_DISCORD_ENDPOINT, CUSTOM_STATUS_ENDPOINT, API_VERSION
VALID_ICON_SIZES = frozenset((
*( 1 << x for x in range(4, 13)),
*((1 << x) * 3 for x in range(9, 11)),
*((1 << x) * 5 for x in range(2, 9)),
))
VALID_ICON_FORMATS = frozenset(('jpg', 'jpeg', 'png', 'webp'))
VALID_ICON_FORMATS_EXTENDED = frozenset((*VALID_ICON_FORMATS, 'gif',))
VALID_ICON_MEDIA_TYPES = frozenset(('image/jpeg', 'image/png', 'image/webp'))
VALID_ICON_MEDIA_TYPES_EXTENDED = frozenset(('image/gif', *VALID_ICON_MEDIA_TYPES))
VALID_STICKER_IMAGE_MEDIA_TYPES = frozenset(('image/png', 'application/json'))
STYLE_PATTERN = re.compile('(^shield$)|(^banner[1-4]$)')
MESSAGE_JUMP_URL_RP = re.compile('(?:https://)?discord(?:app)?.com/channels/(?:(\d{7,21})|@me)/(\d{7,21})/(\d{7,21})')
export(MESSAGE_JUMP_URL_RP, 'MESSAGE_JUMP_URL_RP')
_try_get_guild_id = include('_try_get_guild_id')
# returns a URL that allows the client to jump to this message
# guild is guild's id, or @me if there is no guild
def message_jump_url(message):
"""
Returns a jump url to the message. If the message's channel is a partial guild channel, returns `None`.
This function is a shared property of ``Message``-s.
Returns
-------
url : `None`, `str`
"""
channel_id = message.channel_id
guild_id = message.guild_id
if guild_id:
guild_id = str(guild_id)
else:
guild_id = '@me'
return f'{DISCORD_ENDPOINT}/channels/{guild_id}/{channel_id}/{message.id}'
CDN_RP = re.compile(
'https://(?:'
'cdn\.discordapp\.com|'
'discord\.com|'
'(?:'
'images-ext-\d+|'
'media'
')\.discordapp\.net'
')/'
)
def is_cdn_url(url):
"""
Returns whether the given url a Discord content delivery network url.
Parameters
----------
url : `str`
The url to check.
Returns
-------
is_cdn_url : `bool`
Examples
--------
Icons: `https://cdn.discordapp.com/...`
Assets: `https://discord.com/...`
Proxy service: `https://images-ext-1.discordapp.net/...`
Attachments: `https://media.discordapp.net/...`
```
"""
return (CDN_RP.match(url) is not None)
def is_media_url(url):
"""
Returns whether the given url uses the discord's media content delivery network.
Parameters
----------
url : `str`
The url to check.
Returns
-------
is_media_url : `bool`
"""
return url.startswith('https://media.discordapp.net/')
def guild_icon_url(guild):
"""
Returns the guild's icon's image's url. If the guild has no icon, then returns `None`.
This function is a shared property of ``Guild``, ``GuildPreview``.
Returns
-------
url : `None`, `str`
"""
icon_type = guild.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/icons/{guild.id}/{prefix}{guild.icon_hash:0>32x}.{ext}'
def guild_icon_url_as(guild, ext=None, size=None):
"""
Returns the guild's icon's url. If the guild has no icon, then returns `None`.
This function is a shared method of ``Guild``, ``GuildPreview``.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`. If the guild has
animated icon, it can `'gif'` as well.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the image's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = guild.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/icons/{guild.id}/{prefix}{guild.icon_hash:0>32x}.{ext}{end}'
def guild_invite_splash_url(guild):
"""
Returns the guild's invite splash's image's url. If the guild has no invite splash, then returns `None`.
This function is a shared property of ``Guild``, ``GuildPreview``.
Returns
-------
url : `None`, `str`
"""
icon_type = guild.invite_splash_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/splashes/{guild.id}/{prefix}{guild.invite_splash_hash:0>32x}.{ext}'
def guild_invite_splash_url_as(guild, ext=None, size=None):
"""
Returns the guild's invite splash's image's url. If the guild has no invite splash, then returns `None`.
This function is a shared method of ``Guild``, ``GuildPreview``.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the image's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = guild.invite_splash_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/splashes/{guild.id}/{prefix}{guild.invite_splash_hash:0>32x}.{ext}{end}'
def guild_discovery_splash_url(guild):
"""
Returns the guild's discovery splash's image's url. If the guild has no discovery splash, then returns `None`.
This function is a shared property of ``Guild``-s.
Returns
-------
url : `None`, `str`
"""
icon_type = guild.discovery_splash_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/discovery-splashes/{guild.id}/{prefix}{guild.discovery_splash_hash:0>32x}.{ext}'
def guild_discovery_splash_url_as(guild, ext=None, size=None):
"""
Returns the guild's discovery splash's image's url. If the guild has no discovery splash, then returns `None`.
This function is a shared method of ``Guild``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the image's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = guild.discovery_splash_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/discovery-splashes/{guild.id}/{prefix}{guild.discovery_splash_hash:0>32x}.{ext}{end}'
def guild_banner_url(guild):
"""
Returns the guild's banner's image's url. If the guild has no banner, then returns `None`.
This function is a shared property of ``Guild``-s.
Returns
-------
url : `None`, `str`
"""
icon_type = guild.banner_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/banners/{guild.id}/{prefix}{guild.banner_hash:0>32x}.{ext}'
def guild_banner_url_as(guild, ext=None, size=None):
"""
Returns the guild's banner's image's url. If the guild has no banner, then returns `None`.
This function is a shared method of ``Guild``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`, `'gif'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the image's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = guild.banner_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/banners/{guild.id}/{prefix}{guild.banner_hash:0>32x}.{ext}{end}'
def guild_widget_url(guild, style='shield'):
"""
Returns the guild's widget image's url in `.png` format.
This function is a shared method of ``Guild``, ``GuildPreview``.
Parameters
----------
style : `str` = `'shield'`, Optional
The widget image's style. Can be any of: `'shield'`, `'banner1'`, `'banner2'`, `'banner3'`, `'banner4'`.
Returns
-------
url : `str`
Raises
------
ValueError
If `style` was not passed as any of the expected values.
"""
if STYLE_PATTERN.match(style) is None:
raise ValueError(f'Invalid style: {style!r}')
return f'{API_ENDPOINT}/guilds/{guild.id}/widget.png?style={style}'
def guild_widget_json_url(guild):
"""
Returns an url to request a ``Guild``'s widget data.
This function is a shared property of ``Guild``, ``GuildPreview``.
Returns
-------
url : `str`
"""
return f'{API_ENDPOINT}/guilds/{guild.id}/widget.json'
def channel_group_icon_url(channel):
"""
Returns the group channel's icon's image's url. If the channel has no icon, then returns `None`.
This function is a shared property of ``ChannelGroup``-s.
Returns
-------
url : `None`, `str`
"""
icon_type = channel.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/channel-icons/{channel.id}/{prefix}{channel.icon_hash:0>32x}.{ext}'
def channel_group_icon_url_as(channel, ext=None, size=None):
"""
Returns the group channel's icon's image's url. If the channel has no icon, then returns `None`.
This function is a shared method of ``ChannelGroup``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the image's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = channel.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/channel-icons/{channel.id}/{prefix}{channel.icon_hash:0>32x}.{ext}{end}'
def emoji_url(emoji):
"""
Returns the emoji's image's url. If the emoji is unicode emoji, then returns `None` instead.
This function is a shared property of ``Emoji``-s.
Returns
-------
url : `None`, `str`
"""
if emoji.is_unicode_emoji():
return None
if emoji.animated:
ext = 'gif'
else:
ext = 'png'
return f'{CDN_ENDPOINT}/emojis/{emoji.id}.{ext}'
def emoji_url_as(emoji, ext=None, size=None):
"""
Returns the emoji's image's url. If the emoji is unicode emoji, then returns `None` instead.
This function is a shared method of ``Emoji``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`. If emoji is
animated, it can `'gif'` as well.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the image's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
if emoji.is_unicode_emoji():
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if emoji.animated:
ext = 'gif'
else:
ext = 'png'
else:
if emoji.animated:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
else:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
return f'{CDN_ENDPOINT}/emojis/{emoji.id}.{ext}{end}'
def webhook_url(webhook):
"""
Returns the webhook's url.
This function is a shared property of ``Webhook``-s.
Returns
-------
url : `str`
"""
return f'{API_ENDPOINT}/webhooks/{webhook.id}/{webhook.token}'
WEBHOOK_URL_PATTERN = re.compile(
'(?:https://)?discord(?:app)?.com/api/(?:v\d+/)?webhooks/([0-9]{17,21})/([a-zA-Z0-9\.\-\_%]{60,68})(?:/.*)?'
)
def invite_url(invite):
"""
Returns the invite's url.
This function is a shared property of ``Invite``-s.
Returns
-------
url : `str`
"""
return f'http://discord.gg/{invite.code}'
INVITE_URL_RP = re.compile('(?:https?://)?discord(?:\.gg|(?:app)?\.com/invite)/([a-zA-Z0-9-]+)')
def activity_asset_image_large_url(activity):
"""
Returns the activity's large asset image's url. If the activity has no large asset image, then returns `None`.
This function is a shared property of ``ActivityRich``-s.
Returns
-------
url : `None`, `str`
"""
application_id = activity.application_id
if not application_id:
return None
assets = activity.assets
if assets is None:
return None
image_large = assets.image_large
if image_large is None:
return None
return f'{CDN_ENDPOINT}/app-assets/{application_id}/{image_large}.png'
def activity_asset_image_large_url_as(activity, ext=None, size=None):
"""
Returns the activity's large asset image's url. If the activity has no large asset image, then returns `None`.
This function is a shared method of ``ActivityRich``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the image's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
application_id = activity.application_id
if not application_id:
return None
assets = activity.assets
if assets is None:
return None
image_large = assets.image_large
if image_large is None:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
return f'{CDN_ENDPOINT}/app-assets/{application_id}/{image_large}.{ext}{end}'
def activity_asset_image_small_url(activity):
"""
Returns the activity's small asset image's url. If the activity has no small asset image, then returns `None`.
This function is a shared property of ``ActivityRich``-s.
Returns
-------
url : `None`, `str`
"""
application_id = activity.application_id
if not application_id:
return None
assets = activity.assets
if assets is None:
return None
image_small = assets.image_small
if image_small is None:
return None
return f'{CDN_ENDPOINT}/app-assets/{application_id}/{image_small}.png'
def activity_asset_image_small_url_as(activity, ext=None, size=None):
"""
Returns the activity's small asset image's url. If the activity has no small asset image, then returns `None`.
This function is a shared method of ``ActivityRich``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the image's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
application_id = activity.application_id
if not application_id:
return None
assets = activity.assets
if assets is None:
return None
image_small = assets.image_small
if image_small is None:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
return f'{CDN_ENDPOINT}/app-assets/{application_id}/{image_small}.{ext}{end}'
def user_avatar_url(user):
"""
Returns the user's avatar's url. If the user has no avatar, then returns it's default avatar's url.
This function is a shared property of ``UserBase``-s.
Returns
-------
url : `None`, `str`
"""
icon_type = user.avatar_type
if icon_type is ICON_TYPE_NONE:
return user.default_avatar.url
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/avatars/{user.id}/{prefix}{user.avatar_hash:0>32x}.{ext}'
def user_avatar_url_as(user, ext=None, size=None):
"""
Returns the user's avatar's url. If the user has no avatar, then returns it's default avatar's url.
This function is a shared method of ``UserBase``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`. If the user has
animated avatar, it can `'gif'` as well.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the avatar's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = user.avatar_type
if icon_type is ICON_TYPE_NONE:
return user.default_avatar.url
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/avatars/{user.id}/{prefix}{user.avatar_hash:0>32x}.{ext}{end}'
def user_banner_url(user):
"""
Returns the user's banner's url. If the user has no banner, then returns `None`.
This function is a shared property of ``UserBase``-s.
Returns
-------
url : `None`, `str`
"""
icon_type = user.banner_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/banners/{user.id}/{prefix}{user.banner_hash:0>32x}.{ext}'
def user_banner_url_as(user, ext=None, size=None):
"""
Returns the user's banner's url. If the user has no banner, then returns `None`.
This function is a shared method of ``UserBase``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`. If the user has
animated banner, it can `'gif'` as well.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the avatar's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = user.banner_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/banners/{user.id}/{prefix}{user.banner_hash:0>32x}.{ext}{end}'
def user_avatar_url_for(user, guild):
"""
Returns the user's guild specific avatar. If the user has no guild specific avatar, returns `None`.
This function is a shared method of ``UserBase``-s.
Parameters
----------
guild : ``Guild``, `int`
The respective guild or it's identifier.
Returns
-------
url : `None`, `str`
"""
guild_id = _try_get_guild_id(guild)
try:
guild_profile = user.guild_profiles[guild_id]
except KeyError:
return None
icon_type = guild_profile.avatar_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/guilds/{guild_id}/users/{user.id}/avatars/{prefix}{guild_profile.avatar_hash:0>32x}.{ext}'
def user_avatar_url_for_as(user, guild, ext=None, size=None):
"""
Returns the user's guild specific avatar. If the user has no avatar, then returns it's default avatar's url.
This function is a shared method of ``UserBase``-s.
Parameters
----------
guild : ``Guild``, `int`
The respective guild or it's identifier.
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`. If the user has
animated avatar, it can `'gif'` as well.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the avatar's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
guild_id = _try_get_guild_id(guild)
try:
guild_profile = user.guild_profiles[guild_id]
except KeyError:
return None
icon_type = guild_profile.avatar_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/guilds/{guild_id}/users/{user.id}/avatars/{prefix}{guild_profile.avatar_hash:0>32x}.' \
f'{ext}{end}'
def user_avatar_url_at(user, guild):
"""
Returns the user's avatar's url at the guild.
This function is a shared method of ``UserBase``-s.
Parameters
----------
guild : ``Guild``, `int`
The respective guild or it's identifier.
Returns
-------
url : `None`, `str`
"""
avatar_url = user_avatar_url_for(user, guild)
if avatar_url is None:
avatar_url = user_avatar_url(user)
return avatar_url
def user_avatar_url_at_as(user, guild, ext=None, size=None):
"""
Returns the user's avatar's url at the guild. If the user has no avatar, then returns it's default avatar's url.
This function is a shared method of ``UserBase``-s.
Parameters
----------
guild : ``Guild``, `int`
The respective guild or it's identifier.
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`. If the user has
animated avatar, it can `'gif'` as well.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the avatar's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
avatar_url = user_avatar_url_for_as(user, guild, ext=ext, size=size)
if avatar_url is None:
avatar_url = user_avatar_url_as(user, ext=ext, size=size)
return avatar_url
def default_avatar_url(default_avatar):
"""
Returns the user's default avatar's url.
This function is a shared property of ``UserBase``-s.
Returns
-------
url : `str`
"""
return f'{CDN_ENDPOINT}/embed/avatars/{default_avatar.value}.png'
def application_icon_url(application):
"""
Returns the application's icon's url. If the application has no icon, then returns `None`.
This function is a shared property of ``Application``, ``MessageApplication``, ``IntegrationApplication``.
Returns
-------
url : `None`, `str`
"""
icon_type = application.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/app-icons/{application.id}/{prefix}{application.icon_hash:0>32x}.{ext}'
def application_icon_url_as(application, ext=None, size=None):
"""
Returns the application's icon's url. If the application has no icon, then returns `None`.
This function is a shared method of ``Application``, ``MessageApplication``, ``IntegrationApplication``.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the icon's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the icon's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = application.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/app-icons/{application.id}/{prefix}{application.icon_hash:0>32x}.{ext}{end}'
def application_cover_url(application):
"""
Returns the application's cover image's url. If the application has no cover image, then returns `None`.
This function is a shared property of ``Application``, ``MessageApplication``.
Returns
-------
url : `None`, `str`
"""
icon_type = application.cover_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/app-assets/{application.id}/store/{prefix}{application.cover_hash:0>32x}.{ext}'
def application_cover_url_as(application, ext=None, size=None):
"""
Returns the application's cover image's url. If the application has no cover image, then returns `None`.
This function is a shared method of ``Application``, ``MessageApplication``.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the cover's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the cover's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = application.cover_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/app-assets/{application.id}/store/{prefix}{application.cover_hash:0>32x}.{ext}{end}'
def team_icon_url(team):
"""
Returns the team's icon's url. If the team has no icon, then returns `None`.
This function is a shared property of ``Team``-s.
Returns
-------
url : `None`, `str`
"""
icon_type = team.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/team-icons/{team.id}/{prefix}{team.icon_hash:0>32x}.{ext}'
def team_icon_url_as(team, ext=None, size=None):
"""
Returns the team's icon's url. If the team has no icon, then returns `None`.
This function is a shared method of ``Team``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the icon's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the icon's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = team.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/team-icons/{team.id}/{prefix}{team.icon_hash:0>32x}.{ext}{end}'
def achievement_icon_url(achievement):
"""
Returns the achievement's icon's url.
This function is a shared property of ``Achievement``-s.
Returns
-------
url : `None`, `str`
"""
icon_type = achievement.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/app-assets/{achievement.application_id}/achievements/{achievement.id}/icons/{prefix}' \
f'{achievement.icon_hash:0>32x}.{ext}'
def achievement_icon_url_as(achievement, ext=None, size=None):
"""
Returns the achievement's icon's url.
This function is a shared method of ``Achievement``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the icon's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the icon's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = achievement.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/app-assets/{achievement.application_id}/achievements/{achievement.id}/icons/{prefix}' \
f'{achievement.icon_hash:0>32x}.{ext}{end}'
def sticker_url(sticker):
"""
Returns the sticker's url.
This function is a shared method of ``property``-s.
Returns
-------
url : `None`, `str`
"""
format = sticker.format
if format is StickerFormat.none:
return None
return f'{CDN_ENDPOINT}/stickers/{sticker.id}.{format.extension}'
def sticker_url_as(sticker, size=None, preview=False):
"""
Returns the sticker's url.
This function is a shared method of ``Sticker``-s.
Parameters
----------
size : `None`, `int` = `None`, Optional
The preferred minimal size of the icon's url.
preview : `bool` = `False`, Optional
Whether preview url should be generated.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `size` was not passed as any of the expected values.
"""
format = sticker.format
if format is StickerFormat.none:
return None
# Resolve size
if size is None:
end = ''
else:
if format is StickerFormat.lottie:
end = ''
else:
if size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
# Resolve preview
if preview:
if format is StickerFormat.apng:
end = f'{end}{"&" if end else "?"}passthrough=false'
return f'{CDN_ENDPOINT}/stickers/{sticker.id}.{format.extension}{end}'
def sticker_pack_banner(sticker_pack):
"""
Returns the sticker pack banner's url.
This function is a shared property of ``StickerPack``-s.
Returns
-------
url : `None`, `str`
"""
return f'{CDN_ENDPOINT}/app-assets/710982414301790216/store/{sticker_pack.banner_id}.png'
def sticker_pack_banner_as(sticker_pack, ext=None, size=None):
"""
Returns the achievement's icon's url.
This function is a shared method of ``StickerPack``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the banner's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the banner's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
ext = 'png'
else:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
return f'{CDN_ENDPOINT}/app-assets/710982414301790216/store/{sticker_pack.banner_id}.{ext}{end}'
def role_icon_url(role):
"""
Returns the role's icon's image's url. If the role has no icon, then returns `None`.
This function is a shared property of ``Role``-s.
Returns
-------
url : `None`, `str`
"""
icon_type = role.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/role-icons/{role.id}/{prefix}{role.icon_hash:0>32x}.{ext}'
def role_icon_url_as(role, ext=None, size=None):
"""
Returns the role's icon's image's url. If the role has no icon, then returns `None`.
This function is a shared method of ``Role``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`, `'gif'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the image's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
icon_type = role.icon_type
if icon_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if icon_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if icon_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/role-icons/{role.id}/{prefix}{role.icon_hash:0>32x}.{ext}{end}'
def channel_banner_url(channel):
"""
Returns the channel's banner's url. If the channel has no banner, then returns `None`.
This function is a shared property of ``ChannelText``-s.
Returns
-------
url : `None`, `str`
"""
banner_type = channel.banner_type
if banner_type is ICON_TYPE_NONE:
return None
if banner_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/channel-banners/{channel.id}/{prefix}{channel.banner_hash:0>32x}.{ext}'
def channel_banner_url_as(channel, ext=None, size=None):
"""
Returns the channel's banner's url. If the channel has no banner, then returns `None`.
This function is a shared method of ``ChannelText``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the banner's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the banner's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
banner_type = channel.banner_type
if banner_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if banner_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if banner_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/channel-banners/{channel.id}/{prefix}{channel.banner_hash:0>32x}.{ext}{end}'
def scheduled_event_image_url(scheduled_event):
"""
Returns the scheduled event's image's url. If the scheduled event has no image, then returns `None`.
This function is a property of ``ScheduledEvent``-s.
Returns
-------
url : `None`, `str`
"""
image_type = scheduled_event.image_type
if image_type is ICON_TYPE_NONE:
return None
if image_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
return f'{CDN_ENDPOINT}/guild-events/{scheduled_event.id}/{prefix}{scheduled_event.image_hash:0>32x}.{ext}'
def scheduled_event_image_url_as(scheduled_event, ext=None, size=None):
"""
Returns the scheduled event's image's url. If the scheduled event has no image, then returns `None`.
This function is a method of ``ScheduledEvent``-s.
Parameters
----------
ext : `None`, `str` = `None`, Optional
The extension of the image's url. Can be any of: `'jpg'`, `'jpeg'`, `'png'`, `'webp'`.
size : `None`, `int` = `None`, Optional
The preferred minimal size of the image's url.
Returns
-------
url : `None`, `str`
Raises
------
ValueError
If `ext`, `size` was not passed as any of the expected values.
"""
image_type = scheduled_event.image_type
if image_type is ICON_TYPE_NONE:
return None
if size is None:
end = ''
elif size in VALID_ICON_SIZES:
end = f'?size={size}'
else:
raise ValueError(f'Size must be in {sorted(VALID_ICON_SIZES)!r}, got {size!r}.')
if ext is None:
if image_type is ICON_TYPE_STATIC:
prefix = ''
ext = 'png'
else:
prefix = 'a_'
ext = 'gif'
else:
if image_type is ICON_TYPE_STATIC:
if ext not in VALID_ICON_FORMATS:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS}, got {ext!r}.')
prefix = ''
else:
if ext not in VALID_ICON_FORMATS_EXTENDED:
raise ValueError(f'Extension must be one of {VALID_ICON_FORMATS_EXTENDED}, got {ext!r}.')
prefix = 'a_'
return f'{CDN_ENDPOINT}/guild-events/{scheduled_event.id}/{prefix}{scheduled_event.image_hash:0>32x}.{ext}{end}'
| 28.775042
| 118
| 0.581436
| 6,872
| 51,421
| 4.199942
| 0.034197
| 0.049893
| 0.041577
| 0.03638
| 0.886425
| 0.867438
| 0.845576
| 0.832132
| 0.824163
| 0.807221
| 0
| 0.004658
| 0.29029
| 51,421
| 1,786
| 119
| 28.791153
| 0.786211
| 0.333755
| 0
| 0.79646
| 0
| 0.041719
| 0.271009
| 0.167754
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063211
| false
| 0.001264
| 0.005057
| 0
| 0.192162
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5232d0be308082bfaa9062b5edf8609c57f9bc33
| 1,304
|
py
|
Python
|
08_largest_product_in_a_series.py
|
benjiyo/EulerProject
|
47b6c5cc0ff969c050320efed98f5769cdc25c6e
|
[
"MIT"
] | null | null | null |
08_largest_product_in_a_series.py
|
benjiyo/EulerProject
|
47b6c5cc0ff969c050320efed98f5769cdc25c6e
|
[
"MIT"
] | null | null | null |
08_largest_product_in_a_series.py
|
benjiyo/EulerProject
|
47b6c5cc0ff969c050320efed98f5769cdc25c6e
|
[
"MIT"
] | null | null | null |
number = """7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"""
largest_prod = 1
for i in range(len(number) - 13): # Through the big number
candidate = number[i:i+13]
prod = 1
for idx in range(0,13):
prod *= int(candidate[idx])
if prod > largest_prod:
largest_prod = prod
print largest_prod
| 81.5
| 1,015
| 0.897239
| 43
| 1,304
| 27.116279
| 0.465116
| 0.037736
| 0.013722
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.83804
| 0.076687
| 1,304
| 15
| 1,016
| 86.933333
| 0.130399
| 0.016871
| 0
| 0
| 0
| 0
| 0.78125
| 0.78125
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.1
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5254737873b3b6c06d8b4266180d6e1f4a94418f
| 582
|
py
|
Python
|
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v2/queue/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | 2
|
2020-09-30T00:11:09.000Z
|
2021-10-04T13:00:38.000Z
|
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v2/queue/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | null | null | null |
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v2/queue/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | 1
|
2021-01-28T01:57:41.000Z
|
2021-01-28T01:57:41.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.queue namespace.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.ops.data_flow_ops import FIFOQueue
from tensorflow.python.ops.data_flow_ops import PaddingFIFOQueue
from tensorflow.python.ops.data_flow_ops import PriorityQueue
from tensorflow.python.ops.data_flow_ops import QueueBase
from tensorflow.python.ops.data_flow_ops import RandomShuffleQueue
del _print_function
| 34.235294
| 82
| 0.843643
| 86
| 582
| 5.453488
| 0.44186
| 0.204691
| 0.21322
| 0.245203
| 0.426439
| 0.426439
| 0.426439
| 0.426439
| 0
| 0
| 0
| 0
| 0.09622
| 582
| 16
| 83
| 36.375
| 0.891635
| 0.276632
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.875
| 0
| 0.875
| 0.25
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bff64f98c86b5561662cf07cedfc116c2f3e2fdb
| 79
|
py
|
Python
|
tests/parser/true_negation.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/true_negation.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/true_negation.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
x | -f(1).
x | f(1).
"""
output = """
x | -f(1).
x | f(1).
"""
| 8.777778
| 12
| 0.291139
| 14
| 79
| 1.642857
| 0.357143
| 0.347826
| 0.521739
| 0.347826
| 0.521739
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 0.303797
| 79
| 8
| 13
| 9.875
| 0.345455
| 0
| 0
| 0.75
| 0
| 0
| 0.586667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
871ff1617497c2267e7a8a8848da1cf6b879ddea
| 248
|
py
|
Python
|
vulnerable_people_form/form_pages/accessibility_statement.py
|
uk-gov-mirror/alphagov.govuk-shielded-vulnerable-people-service
|
5b191980dec554155e9d431a514a945072032e7c
|
[
"MIT"
] | 3
|
2020-08-16T19:36:26.000Z
|
2020-10-29T14:35:01.000Z
|
vulnerable_people_form/form_pages/accessibility_statement.py
|
uk-gov-mirror/alphagov.govuk-shielded-vulnerable-people-service
|
5b191980dec554155e9d431a514a945072032e7c
|
[
"MIT"
] | 101
|
2020-09-03T11:10:00.000Z
|
2021-10-01T03:03:46.000Z
|
vulnerable_people_form/form_pages/accessibility_statement.py
|
alphagov-mirror/govuk-shielded-vulnerable-people-service
|
f9cb4ae9046fc402f0878503733a23d42546cc53
|
[
"MIT"
] | 6
|
2020-07-28T09:03:20.000Z
|
2021-04-10T18:04:56.000Z
|
from .default import app_default
from .shared.render import render_template_with_title
@app_default.route("/accessibility-statement", methods=["GET"])
def get_accessibility_statement():
return render_template_with_title("accessibility.html")
| 31
| 63
| 0.818548
| 31
| 248
| 6.225806
| 0.548387
| 0.103627
| 0.186529
| 0.238342
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080645
| 248
| 7
| 64
| 35.428571
| 0.846491
| 0
| 0
| 0
| 0
| 0
| 0.181452
| 0.096774
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
876ac1de2c6898c2e5bc07e0aa5ba853a5baab77
| 92
|
py
|
Python
|
unicorn_binance_rest_api/__init__.py
|
flo-rian/unicorn-binance-rest-api
|
81fb358464dea486fe4096f8590f1df511142469
|
[
"MIT"
] | 18
|
2021-04-03T16:59:36.000Z
|
2021-12-30T21:12:09.000Z
|
unicorn_binance_rest_api/__init__.py
|
flo-rian/unicorn-binance-rest-api
|
81fb358464dea486fe4096f8590f1df511142469
|
[
"MIT"
] | 12
|
2021-04-30T10:02:05.000Z
|
2022-01-01T19:52:18.000Z
|
unicorn_binance_rest_api/__init__.py
|
flo-rian/unicorn-binance-rest-api
|
81fb358464dea486fe4096f8590f1df511142469
|
[
"MIT"
] | 7
|
2021-05-14T23:50:08.000Z
|
2021-12-16T21:38:50.000Z
|
from unicorn_binance_rest_api.unicorn_binance_rest_api_manager import BinanceRestApiManager
| 46
| 91
| 0.945652
| 12
| 92
| 6.666667
| 0.666667
| 0.35
| 0.45
| 0.525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 92
| 1
| 92
| 92
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
5e5c9093d304c4eada50a2ad18c7ae4617fff29a
| 181
|
py
|
Python
|
tests/transactions/builder/test_delegate_resignation.py
|
supaiku0/python-crypto
|
112bfe2f7f581d317d6be65c0c38dad5c9689f5c
|
[
"MIT"
] | null | null | null |
tests/transactions/builder/test_delegate_resignation.py
|
supaiku0/python-crypto
|
112bfe2f7f581d317d6be65c0c38dad5c9689f5c
|
[
"MIT"
] | null | null | null |
tests/transactions/builder/test_delegate_resignation.py
|
supaiku0/python-crypto
|
112bfe2f7f581d317d6be65c0c38dad5c9689f5c
|
[
"MIT"
] | 1
|
2019-11-26T15:37:56.000Z
|
2019-11-26T15:37:56.000Z
|
import pytest
@pytest.mark.skip(reason='not implemented')
def test_delegate_resignation_transaction():
"""Test if delegate resignation transaction gets built
"""
pass
| 20.111111
| 58
| 0.745856
| 21
| 181
| 6.285714
| 0.761905
| 0.287879
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160221
| 181
| 8
| 59
| 22.625
| 0.868421
| 0.281768
| 0
| 0
| 0
| 0
| 0.12605
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
5ea074644ea06d3b698156ffd3bb4aae07fb453f
| 4,095
|
py
|
Python
|
detector.py
|
mouhcineToumi/face-recognition
|
c203fd2faf5273c4d036f3d967398d1365d1ce6c
|
[
"Apache-2.0"
] | 1
|
2019-10-18T21:29:50.000Z
|
2019-10-18T21:29:50.000Z
|
detector.py
|
mouhcineToumi/face-recognition
|
c203fd2faf5273c4d036f3d967398d1365d1ce6c
|
[
"Apache-2.0"
] | null | null | null |
detector.py
|
mouhcineToumi/face-recognition
|
c203fd2faf5273c4d036f3d967398d1365d1ce6c
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import cv2
import os
# face cropping from the image
class FaceDetector:
def __init__(self, confid = 0.5, prototxt="deploy.prototxt.txt", model="res10_300x300_ssd_iter_140000.caffemodel"):
self.confid = 0.5
self.net = cv2.dnn.readNetFromCaffe(prototxt, model)
# takes image
def get_face2(self, image):
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
(h, w) = image.shape[:2]
blob = cv2.dnn.blobFromImage(cv2.resize(image, (300, 300)), 1.0, (300, 300), (104.0, 177.0, 123.0))
# pass the blob through the network and obtain the detections and
# predictions
self.net.setInput(blob)
detections = self.net.forward()
m = 0;
# loop over the detections
for i in range(0, detections.shape[2]):
# extract the confidence (i.e., probability) associated with the
# prediction
confidence = detections[0, 0, i, 2]
# filter out weak detections by ensuring the `confidence` is
# greater than the minimum confidence
if confidence > self.confid:
# compute the (x, y)-coordinates of the bounding box for the
# object
box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])
(startX, startY, endX, endY) = box.astype("int")
# draw the bounding box of the face along with the associated
# probability
text = "{:.2f}%".format(confidence * 100)
y = startY - 10 if startY - 10 > 10 else startY + 10
#cv2.rectangle(image, (startX, startY), (endX, endY),(0, 0, 255), 2)
#cv2.putText(image, text, (startX, y),cv2.FONT_HERSHEY_SIMPLEX, 0.45, (0, 0, 255), 2)
#only get the most accurate one
if( confidence > m):
face = image[startY:endY, startX:endX]
m = confidence
return cv2.cvtColor(face, cv2.COLOR_BGR2RGB)
# takes path
def get_face(self, image_path):
#load the input image and construct an input blob for the image
# by resizing to a fixed 300x300 pixels and then normalizing it
image = cv2.imread(image_path)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
(h, w) = image.shape[:2]
blob = cv2.dnn.blobFromImage(cv2.resize(image, (300, 300)), 1.0, (300, 300), (104.0, 177.0, 123.0))
# pass the blob through the network and obtain the detections and
# predictions
self.net.setInput(blob)
detections = self.net.forward()
m = 0;
# loop over the detections
for i in range(0, detections.shape[2]):
# extract the confidence (i.e., probability) associated with the
# prediction
confidence = detections[0, 0, i, 2]
# filter out weak detections by ensuring the `confidence` is
# greater than the minimum confidence
if confidence > self.confid:
# compute the (x, y)-coordinates of the bounding box for the
# object
box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])
(startX, startY, endX, endY) = box.astype("int")
# draw the bounding box of the face along with the associated
# probability
text = "{:.2f}%".format(confidence * 100)
y = startY - 10 if startY - 10 > 10 else startY + 10
#cv2.rectangle(image, (startX, startY), (endX, endY),(0, 0, 255), 2)
#cv2.putText(image, text, (startX, y),cv2.FONT_HERSHEY_SIMPLEX, 0.45, (0, 0, 255), 2)
#only get the most accurate one
if( confidence > m):
face = image[startY:endY, startX:endX]
m = confidence
return cv2.cvtColor(face, cv2.COLOR_BGR2RGB)
| 41.785714
| 120
| 0.540659
| 504
| 4,095
| 4.353175
| 0.265873
| 0.007293
| 0.027347
| 0.023701
| 0.818596
| 0.818596
| 0.818596
| 0.818596
| 0.818596
| 0.818596
| 0
| 0.067603
| 0.357021
| 4,095
| 97
| 121
| 42.216495
| 0.765667
| 0.330403
| 0
| 0.772727
| 0
| 0
| 0.03028
| 0.015332
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068182
| false
| 0
| 0.068182
| 0
| 0.204545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d77311ab7ec3b2967a51942a3b7af01251125b0
| 60
|
py
|
Python
|
resumen_declaracion_iva/reports/report_xlsx/report/__init__.py
|
Chief0-0/Localizacion_ERP_V12
|
f59e56564e29525f772b59db7fef7c7cde347336
|
[
"Apache-2.0"
] | null | null | null |
resumen_declaracion_iva/reports/report_xlsx/report/__init__.py
|
Chief0-0/Localizacion_ERP_V12
|
f59e56564e29525f772b59db7fef7c7cde347336
|
[
"Apache-2.0"
] | null | null | null |
resumen_declaracion_iva/reports/report_xlsx/report/__init__.py
|
Chief0-0/Localizacion_ERP_V12
|
f59e56564e29525f772b59db7fef7c7cde347336
|
[
"Apache-2.0"
] | null | null | null |
from . import report_xlsx
from . import report_partner_xlsx
| 20
| 33
| 0.833333
| 9
| 60
| 5.222222
| 0.555556
| 0.425532
| 0.680851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 60
| 2
| 34
| 30
| 0.903846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0d7f41e02c05fa71ef2e57939043521f5ce2a5e3
| 14,632
|
py
|
Python
|
opponents/nugi-chan/nugi_images.py
|
beatea/SPNI-Prizes
|
485bd948f03678eee168cf4d987df89b421c1e14
|
[
"MIT"
] | 1
|
2020-10-03T13:56:50.000Z
|
2020-10-03T13:56:50.000Z
|
opponents/nugi-chan/nugi_images.py
|
beatea/spni-p.github.io
|
485bd948f03678eee168cf4d987df89b421c1e14
|
[
"MIT"
] | null | null | null |
opponents/nugi-chan/nugi_images.py
|
beatea/spni-p.github.io
|
485bd948f03678eee168cf4d987df89b421c1e14
|
[
"MIT"
] | null | null | null |
import sys
#colours:
#orange FF813E
#yellow FFDB39
#green 00D791
#test yellow FFE73C
#emotions:
#happy
#calm
#sad
#loss
#interested
#horny
#shocked
#awkward
#
#clothes = shoes, gloves, stockings, top, skirt, bra, panties
#9 total stages
#36**aa7.89.1.28.80.7.89.1.28.80_ab_ac_ba50_bb5.1_bc150.500.0.0.1_bd5_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc1.1.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.65.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc1.0_ge0000000000_gh_gf_gg_gd10100000_ha89.89_hb49.1.44.99_hc0.59.39.0.59.39_hd0.1.49.49_ia_if2.55.55.55.1.0.0.0.0.0.0.0.0.0_ib0.60.55.0.0.0.0.1.29.55.0.29.55.0.1_id12.55.55.44.0.0.1.0.0.0.0.0.0.20_ic29.60.60.55.0_jc_ie_ja9.55.60.55_jb9.55.60.55_jd7.60.50.50_je7.60.50.50_jf_jg_ka4.61.61.61.0_kb4.61.61.61_kc2.41.0.20.0_kd2.41.0.20.0_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of3.8ACAD3.0.0.0_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og3.222222.56.0_oh3.222222.56.0_oo_op_oq_or_om_on_ok_ol_oi0.55.55.0.0_oj0.55.55.0.0_ad0.0.0.0.0.0.0.0.0.0
version_str = "36**"
def get_emotion_data():
em = dict()
happy = dict()
happy["pose"] = "aa28.43.1.42.47.28.43.1.42.47_ab_ac_ba50_bb5.1_bc150.500.0.0.1_bd5_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc1.1.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.65.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc1.0_ge0000000000_gh_gf_gg_gd10100000_ha89.89_hb49.1.44.99_hc0.59.39.0.59.39_hd34.1.49.49"
happy["blush_mod"] = 0
em["happy"] = happy
calm = dict()
calm["pose"] = "aa7.91.1.28.51.7.91.1.28.51_ab_ac_ba50_bb5.1_bc188.500.0.0.1_bd5_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc1.1.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.65.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc1.0_ge0000000000_gh_gf_gg_gd10100000_ha89.89_hb49.1.44.99_hc0.59.39.0.59.39_hd0.1.49.49"
calm["blush_mod"] = 0
em["calm"] = calm
sad = dict()
sad["pose"] = "aa6.100.1.16.47.6.100.1.16.47_ab_ac_ba50_bb5.1_bc150.500.0.0.1_bd5_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc1.1.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.65.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc1.0_ge0000000000_gh_gf_gg_gd10000000_ha89.89_hb49.1.44.99_hc0.59.39.0.59.39_hd12.1.49.49"
sad["blush_mod"] = 0
em["sad"] = sad
loss = dict()
loss["pose"] = "aa6.100.1.16.47.6.100.1.16.47_ab_ac_ba50_bb5.1_bc150.500.0.0.1_bd5_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc1.1.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.65.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc1.0_ge0000000000_gh_gf_gg_gd10000000_ha89.89_hb49.1.44.99_hc0.59.39.0.59.39_hd36.1.49.49"
loss["blush_mod"] = 0
em["loss"] = loss
intr = dict()
intr["pose"] = "aa31.69.0.16.47.31.69.0.16.47_ab_ac_ba50_bb5.1_bc150.500.0.0.1_bd5_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc1.1.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.65.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc1.0_ge0000000000_gh_gf_gg_gd10000000_ha89.89_hb49.1.44.99_hc0.59.39.0.59.39_hd17.1.49.49"
intr["blush_mod"] = 1
em["interested"] = intr
horny = dict()
horny["pose"] = "aa9.90.1.16.47.9.90.1.16.47_ab_ac_ba50_bb5.1_bc150.500.0.0.1_bd5_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc1.1.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.39.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc1.0_ge0000000000_gh_gf_gg_gd10000000_ha89.89_hb49.1.44.99_hc0.25.39.0.25.39_hd20.1.49.49"
horny["blush_mod"] = 2
em["horny"] = horny
shk = dict()
shk["pose"] = "aa17.94.1.0.47.17.94.1.0.47_ab_ac_ba50_bb5.1_bc150.500.0.0.1_bd5_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc1.1.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.65.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc1.0_ge0000000000_gh_gf_gg_gd10000000_ha89.89_hb49.1.44.99_hc0.97.39.0.97.39_hd40.1.49.49"
shk["blush_mod"] = 0
em["shocked"] = shk
awk = dict()
awk["pose"] = "aa20.76.1.16.27.20.76.1.16.27_ab_ac_ba50_bb5.1_bc188.500.0.0.1_bd5_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc0.5.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh9.E2D4F2_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.65.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc14.0_ge0000000000_gh_gf_gg_gd10000000_ha89.89_hb0.1.0.99_hc0.97.39.0.97.39_hd39.1.49.49" #aa14.48.1.8.64.14.48.1.8.64_ab_ac_ba50_bb5.1_bc150.500.0.0.1_bd5_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc1.1.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.65.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc1.0_ge0000000000_gh_gf_gg_gd10000000_ha89.89_hb49.1.44.99_hc0.97.39.0.97.39_hd44.1.49.49
awk["blush_mod"] = 1
em["awkward"] = awk
return em
def get_image_data():
d = dict()
d["appearance"] = "aa13.97.1.28.40.13.97.1.28.40_ab_ac_ba50_bb17.1_bc150.500.8.0.1_bd17_be180_ca55.0.32.55.34.35.34.0.0_cb0_da1.0.0.100_db_dd9.0.20.50.50_dh1.30.50.50.4_di8_qa_qb_dc1.5.1.1.1_ea32.EAD9EE.EAD9EE.56.0.0_ec_ed2.44.1.1.EAD9EE.56_ef1.2.0.EAD9EE.EAD9EE.56_eg1.2.0.EAD9EE.EAD9EE.56_eh9.11_r061.EAD9EE.EAD9EE.56.0.0.100.100.10.692.536.1_fa24.50.50.50.50.65.56_fb11_fc1.35A0AA.55.1.35A0AA.55.55.103134.103134_fd16.0.26.EAD9EE.56_fe50.61_ff0000000000_fg0.50_t0_pa0.0.0.0.40.50.85.85.0.0_pb_pc_pd_pe_ga0_gb1_gc2.0_ge0000000000_gh_gf_gg_gd10100000_ha89.89_hb49.1.44.99_hc0.59.39.0.59.39_hd0.1.49.49"
d["vagina"] = "dc1.5.1.1.1_eh9.E2D4F2"
d["face"] = "dd9.0.20.50.50"
stages = list()
#lj = love juices
#fully clothed
s0 = {}
s0["blush"] = 0
s0["lj"] = 0
s0["clothes"] = "ia_if2.55.55.55.1.0.0.0.0.0.0.0.0.0_ib0.60.55.0.0.0.0.1.29.55.0.29.55.0.1_id12.55.55.44.0.0.1.0.0.0.0.0.0.20_ic29.60.60.55.0_jc_ie_ja9.55.60.55_jb9.55.60.55_jd7.60.50.50_je7.60.50.50_jf_jg_ka4.61.61.61.0_kb4.61.61.61_kc2.41.0.20.0_kd2.41.0.20.0_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of3.8ACAD3.0.0.0_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og3.222222.56.0_oh3.222222.56.0_oo_op_oq_or_om_on_ok_ol_oi0.55.55.0.0_oj0.55.55.0.0_ad0.0.0.0.0.0.0.0.0.0"
s0["other"] = ""
stages.append(s0)
#lost shoes
s1 = {}
s1["blush"] = 0
s1["lj"] = 0
s1["clothes"] = "ia_if2.55.55.55.1.0.0.0.0.0.0.0.0.0_ib0.60.55.0.0.0.0.1.29.55.0.29.55.0.1_id12.55.55.44.0.0.1.0.0.0.0.0.0.20_ic29.60.60.55.0_jc_ie_ja9.55.60.55_jb9.55.60.55_jd_je_jf_jg_ka4.61.61.61.0_kb4.61.61.61_kc2.41.0.20.0_kd2.41.0.20.0_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of3.8ACAD3.0.0.0_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og3.222222.56.0_oh3.222222.56.0_oo_op_oq_or_om_on_ok_ol_oi0.55.55.0.0_oj0.55.55.0.0_ad0.0.0.0.0.0.0.0.0.0"
s1["other"] = ""
stages.append(s1)
#lost gloves
s2 = {}
s2["blush"] = 0
s2["lj"] = 0
s2["clothes"] = "ia_if2.55.55.55.1.0.0.0.0.0.0.0.0.0_ib0.60.55.0.0.0.0.1.29.55.0.29.55.0.1_id12.55.55.44.0.0.1.0.0.0.0.0.0.20_ic29.60.60.55.0_jc_ie_ja9.55.60.55_jb9.55.60.55_jd_je_jf_jg_ka4.61.61.61.0_kb4.61.61.61_kc2.41.0.20.0_kd2.41.0.20.0_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of3.8ACAD3.0.0.0_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og_oh_oo_op_oq_or_om_on_ok_ol_oi_oj_ad0.0.0.0.0.0.0.0.0.0"
s2["other"] = ""
stages.append(s2)
#lost stockings
s3 = {}
s3["blush"] = 0
s3["lj"] = 0
s3["clothes"] = "ia_if2.55.55.55.1.0.0.0.0.0.0.0.0.0_ib0.60.55.0.0.0.0.1.29.55.0.29.55.0.1_id12.55.55.44.0.0.1.0.0.0.0.0.0.20_ic29.60.60.55.0_jc_ie_ja_jb_jd_je_jf_jg_ka4.61.61.61.0_kb4.61.61.61_kc2.41.0.20.0_kd2.41.0.20.0_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of3.8ACAD3.0.0.0_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og_oh_oo_op_oq_or_om_on_ok_ol_oi_oj_ad0.0.0.0.0.0.0.0.0.0"
s3["other"] = ""
stages.append(s3)
#lost top
s4 = {}
s4["blush"] = 1
s4["lj"] = 0
s4["clothes"] = "ia_if_ib_id_ic29.60.60.55.0_jc_ie_ja_jb_jd_je_jf_jg_ka4.61.61.61.0_kb4.61.61.61_kc2.41.0.20.0_kd2.41.0.20.0_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og_oh_oo_op_oq_or_om_on_ok_ol_oi_oj_ad0.0.0.0.0.0.0.0.0.0"
s4["other"] = ""
stages.append(s4)
#lost skirt
s5 = {}
s5["blush"] = 1
s5["lj"] = 0
s5["clothes"] = "ia_if_ib_id_ic_jc_ie_ja_jb_jd_je_jf_jg_ka4.61.61.61.0_kb4.61.61.61_kc2.41.0.20.0_kd2.41.0.20.0_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og_oh_oo_op_oq_or_om_on_ok_ol_oi_oj_ad0.0.0.0.0.0.0.0.0.0"
s5["other"] = ""
stages.append(s5)
#lost bra
s6 = {}
s6["blush"] = 2
s6["lj"] = 0
s6["clothes"] = "ia_if_ib_id_ic_jc_ie_ja_jb_jd_je_jf_jg_ka_kb4.61.61.61_kc_kd_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og_oh_oo_op_oq_or_om_on_ok_ol_oi_oj_ad0.0.0.0.0.0.0.0.0.0"
s6["other"] = ""
stages.append(s6)
#lost panties/nude
s6 = {}
s6["blush"] = 3
s6["lj"] = 0
s6["clothes"] = "ia_if_ib_id_ic_jc_ie_ja_jb_jd_je_jf_jg_ka_kb_kc_kd_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og_oh_oo_op_oq_or_om_on_ok_ol_oi_oj_ad0.0.0.0.0.0.0.0.0.0"
s6["other"] = ""
stages.append(s6)
#masturbating
s7 = {}
s7["blush"] = 6
s7["lj"] = 50
s7["clothes"] = "ia_if_ib_id_ic_jc_ie_ja_jb_jd_je_jf_jg_ka_kb_kc_kd_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og_oh_oo_op_oq_or_om_on_ok_ol_oi_oj_ad0.0.0.0.0.0.0.0.0.0"
s7["other"] = ""
stages.append(s7)
#finished
s8 = {}
s8["blush"] = 4
s8["lj"] = 150
s8["clothes"] = "ia_if_ib_id_ic_jc_ie_ja_jb_jd_je_jf_jg_ka_kb_kc_kd_ke_kf_la4.55.55.0.1_lb_oa_os_ob_oc_od_oe_of_lc_m04.60.3.0.0.1.40.15.66.70.2.61_n0_s0_og_oh_oo_op_oq_or_om_on_ok_ol_oi_oj_ad0.0.0.0.0.0.0.0.0.0"
s8["other"] = ""
stages.append(s8)
d["stages"] = stages
blush = list()
blush.append(( 0, 9)) # no blush
blush.append((14, 9)) # lost top
blush.append((27, 0)) # lost bra
blush.append((50, 1)) # lost panties
blush.append((60, 10)) # finished
blush.append((70, 12)) # no blush
blush.append((80, 14)) # masturbating
#want to leave something for heavy masturbating & orgasm
d["blush"] = blush
return d
def make_descriptions(pd, ems, out_filename):
#pd = player data
#ems = emotion data
#get complete vagina description string
def get_v_str(desc, lj):
#desc = vagina description string, lj = love juice level
a, b = desc.split(".", 1)
return "dc" + ("%d." % lj) + b
#get blush/blue face desciption string
def get_b_str(blush, blue):
return "gc%d.%d" % (blush, blue)
#get complete face description string
def get_face_str(desc, sticker_type):
a, b = desc.split(".", 1)
return "dd" + ("%d." % sticker_type) + b
with open(out_filename, "w") as f:
#write unique setup string - you need to zoom out a bit so that all of her hair fits in
f.write("nugi-setup=33***bc188.500.0.0.1_ga0*0*0*0*0*0*0*0*0#/]ua1.0.0.0_ub_uc6.0.12_ud7.0\n\n")
for ind, stage in enumerate(pd["stages"]):
if ind == len(pd["stages"]) - 2:
#skip the masturbation stage, all of those are custom images
continue
stage_desc = version_str + pd["appearance"] + "_" + stage["clothes"]
if "other" in stage and len(stage["other"]) > 0:
stage_desc += "_" + stage["other"]
#if ind == 8:
#setup scene for masturbation
#f.write("masturbation-setup=33***bc185.200.0.0.1_ga0*0*0*0*0*0*0*0*0#/]ua1.0.0.0_ub_uc7.0.30_ud7.0\n\n")
#if ind == 9:
#reset scene for finished stage
#f.write("finished-setup=33***bc185.500.0.0.1_ga0*0*0*0*0*0*0*0*0#/]ua1.0.0.0_ub_uc7.0.30_ud7.0\n\n")
for em_name, em in ems.iteritems():
blush_ind = stage["blush"] + em["blush_mod"]
if blush_ind < 0:
blush_ind = 0
if blush_ind >= len(pd["blush"]):
blush_ind = len(pd["blush"]) - 1
blush = pd["blush"][blush_ind]
em_desc = stage_desc + "_" + em["pose"]
em_desc += "_" + get_b_str(blush[0], 0)
#put in the strings that need to be replaced last, so that they don't get overwritten
em_desc += "_" + get_face_str(pd["face"], blush[1])
em_desc += "_" + get_v_str(pd["vagina"], stage["lj"])
image_name = "%d-%s" % (ind, em_name)
f.write("%s=%s\n\n" % (image_name, em_desc))
def write_descriptions(out_name):
character_data = get_image_data()
emotion_data = get_emotion_data()
make_descriptions(character_data, emotion_data, out_name)
if __name__ == "__main__":
write_descriptions(sys.argv[1])
| 59.722449
| 1,180
| 0.730864
| 3,726
| 14,632
| 2.571122
| 0.098765
| 0.066388
| 0.056054
| 0.054697
| 0.710125
| 0.701253
| 0.697495
| 0.696033
| 0.69238
| 0.69238
| 0
| 0.319224
| 0.069984
| 14,632
| 245
| 1,181
| 59.722449
| 0.384774
| 0.188833
| 0
| 0.066667
| 0
| 0.133333
| 0.738722
| 0.694287
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046667
| false
| 0
| 0.006667
| 0.006667
| 0.086667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d95c957182531b9f37a1e53d02693feea73af0f
| 8,109
|
py
|
Python
|
p_tqdm/tests/tests.py
|
wassname/p_tqdm
|
542416388f276cce02d7ee5d4a1d6abff9520e05
|
[
"MIT"
] | null | null | null |
p_tqdm/tests/tests.py
|
wassname/p_tqdm
|
542416388f276cce02d7ee5d4a1d6abff9520e05
|
[
"MIT"
] | null | null | null |
p_tqdm/tests/tests.py
|
wassname/p_tqdm
|
542416388f276cce02d7ee5d4a1d6abff9520e05
|
[
"MIT"
] | null | null | null |
import unittest
import p_tqdm
def add_1(a):
return a + 1
def add_2(a, b):
return a + b
def add_3(a, b, c):
return a + b + c
def _test_one_list(self):
array = [1, 2, 3]
result = self.func(add_1, array)
if self.generator:
result = list(result)
correct_array = [2, 3, 4]
if self.ordered:
self.assertEquals(correct_array, result)
else:
self.assertEquals(sorted(correct_array), sorted(result))
def _test_two_lists(self):
array_1 = [1, 2, 3]
array_2 = [10, 11, 12]
result = self.func(add_2, array_1, array_2)
if self.generator:
result = list(result)
correct_array = [11, 13, 15]
if self.ordered:
self.assertEquals(correct_array, result)
else:
self.assertEquals(sorted(correct_array), sorted(result))
def _test_two_lists_and_one_single(self):
array_1 = [1, 2, 3]
array_2 = [10, 11, 12]
single = 5
result = self.func(add_3, array_1, single, array_2)
if self.generator:
result = list(result)
correct_array = [16, 18, 20]
if self.ordered:
self.assertEquals(correct_array, result)
else:
self.assertEquals(sorted(correct_array), sorted(result))
def _test_one_list_and_two_singles(self):
array = [1, 2, 3]
single_1 = 5
single_2 = -2
result = self.func(add_3, single_1, array, single_2)
if self.generator:
result = list(result)
correct_array = [4, 5, 6]
if self.ordered:
self.assertEquals(correct_array, result)
else:
self.assertEquals(sorted(correct_array), sorted(result))
def _test_one_single(self):
single = 5
result = self.func(add_1, single)
if self.generator:
result = list(result)
correct_array = [6]
if self.ordered:
self.assertEquals(correct_array, result)
else:
self.assertEquals(sorted(correct_array), sorted(result))
def _test_one_single_with_num_iter(self):
single = 5
num_iter = 3
result = self.func(add_1, single, num_iter=num_iter)
if self.generator:
result = list(result)
correct_array = [6]*num_iter
if self.ordered:
self.assertEquals(correct_array, result)
else:
self.assertEquals(sorted(correct_array), sorted(result))
def _test_two_singles(self):
single_1 = 5
single_2 = -2
result = self.func(add_2, single_1, single_2)
if self.generator:
result = list(result)
correct_array = [3]
if self.ordered:
self.assertEquals(correct_array, result)
else:
self.assertEquals(sorted(correct_array), sorted(result))
def _test_two_singles_with_num_iter(self):
single_1 = 5
single_2 = -2
num_iter = 3
result = self.func(add_2, single_1, single_2, num_iter=num_iter)
if self.generator:
result = list(result)
correct_array = [3]*num_iter
if self.ordered:
self.assertEquals(correct_array, result)
else:
self.assertEquals(sorted(correct_array), sorted(result))
class Testp_imap(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(Testp_imap, self).__init__(*args, **kwargs)
self.func = p_tqdm.p_imap
self.generator = True
self.ordered = True
def test_one_list(self):
_test_one_list(self)
def test_two_lists(self):
_test_two_lists(self)
def test_two_lists_and_one_single(self):
_test_two_lists_and_one_single(self)
def test_one_list_and_two_singles(self):
_test_one_list_and_two_singles(self)
def test_one_single(self):
_test_one_single(self)
def test_one_single_with_num_iter(self):
_test_one_single_with_num_iter(self)
def test_two_singles(self):
_test_two_singles(self)
def test_two_singles_with_num_iter(self):
_test_two_singles_with_num_iter(self)
class Testp_map(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(Testp_map, self).__init__(*args, **kwargs)
self.func = p_tqdm.p_map
self.generator = False
self.ordered = True
def test_one_list(self):
_test_one_list(self)
def test_two_lists(self):
_test_two_lists(self)
def test_two_lists_and_one_single(self):
_test_two_lists_and_one_single(self)
def test_one_list_and_two_singles(self):
_test_one_list_and_two_singles(self)
def test_one_single(self):
_test_one_single(self)
def test_one_single_with_num_iter(self):
_test_one_single_with_num_iter(self)
def test_two_singles(self):
_test_two_singles(self)
def test_two_singles_with_num_iter(self):
_test_two_singles_with_num_iter(self)
class Testp_uimap(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(Testp_uimap, self).__init__(*args, **kwargs)
self.func = p_tqdm.p_uimap
self.generator = True
self.ordered = False
def test_one_list(self):
_test_one_list(self)
def test_two_lists(self):
_test_two_lists(self)
def test_two_lists_and_one_single(self):
_test_two_lists_and_one_single(self)
def test_one_list_and_two_singles(self):
_test_one_list_and_two_singles(self)
def test_one_single(self):
_test_one_single(self)
def test_one_single_with_num_iter(self):
_test_one_single_with_num_iter(self)
def test_two_singles(self):
_test_two_singles(self)
def test_two_singles_with_num_iter(self):
_test_two_singles_with_num_iter(self)
class Testp_umap(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(Testp_umap, self).__init__(*args, **kwargs)
self.func = p_tqdm.p_umap
self.generator = False
self.ordered = False
def test_one_list(self):
_test_one_list(self)
def test_two_lists(self):
_test_two_lists(self)
def test_two_lists_and_one_single(self):
_test_two_lists_and_one_single(self)
def test_one_list_and_two_singles(self):
_test_one_list_and_two_singles(self)
def test_one_single(self):
_test_one_single(self)
def test_one_single_with_num_iter(self):
_test_one_single_with_num_iter(self)
def test_two_singles(self):
_test_two_singles(self)
def test_two_singles_with_num_iter(self):
_test_two_singles_with_num_iter(self)
class Testt_imap(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(Testt_imap, self).__init__(*args, **kwargs)
self.func = p_tqdm.t_imap
self.generator = True
self.ordered = True
def test_one_list(self):
_test_one_list(self)
def test_two_lists(self):
_test_two_lists(self)
def test_two_lists_and_one_single(self):
_test_two_lists_and_one_single(self)
def test_one_list_and_two_singles(self):
_test_one_list_and_two_singles(self)
def test_one_single(self):
_test_one_single(self)
def test_one_single_with_num_iter(self):
_test_one_single_with_num_iter(self)
def test_two_singles(self):
_test_two_singles(self)
def test_two_singles_with_num_iter(self):
_test_two_singles_with_num_iter(self)
class Testt_map(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(Testt_map, self).__init__(*args, **kwargs)
self.func = p_tqdm.t_map
self.generator = False
self.ordered = True
def test_one_list(self):
_test_one_list(self)
def test_two_lists(self):
_test_two_lists(self)
def test_two_lists_and_one_single(self):
_test_two_lists_and_one_single(self)
def test_one_list_and_two_singles(self):
_test_one_list_and_two_singles(self)
def test_one_single(self):
_test_one_single(self)
def test_one_single_with_num_iter(self):
_test_one_single_with_num_iter(self)
def test_two_singles(self):
_test_two_singles(self)
def test_two_singles_with_num_iter(self):
_test_two_singles_with_num_iter(self)
if __name__ == '__main__':
unittest.main()
| 26.413681
| 68
| 0.681095
| 1,180
| 8,109
| 4.202542
| 0.05339
| 0.079048
| 0.093164
| 0.078645
| 0.961887
| 0.942125
| 0.925791
| 0.915104
| 0.893729
| 0.775156
| 0
| 0.014186
| 0.226292
| 8,109
| 306
| 69
| 26.5
| 0.776219
| 0
| 0
| 0.774892
| 0
| 0
| 0.000987
| 0
| 0
| 0
| 0
| 0
| 0.069264
| 1
| 0.281385
| false
| 0
| 0.008658
| 0.012987
| 0.329004
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0d9688857a9e50f82e5a0cadff02217309032d7a
| 13,910
|
py
|
Python
|
NMF_Heat_PDE.py
|
Jmaihuire/MTH_9821
|
faf921ddbb2875962885ddfc75231889d0cceb3b
|
[
"FSFAP"
] | 9
|
2018-04-13T03:53:54.000Z
|
2022-03-26T17:15:23.000Z
|
NMF_Heat_PDE.py
|
noisyoscillator/MTH_9821
|
faf921ddbb2875962885ddfc75231889d0cceb3b
|
[
"FSFAP"
] | null | null | null |
NMF_Heat_PDE.py
|
noisyoscillator/MTH_9821
|
faf921ddbb2875962885ddfc75231889d0cceb3b
|
[
"FSFAP"
] | 7
|
2018-03-19T09:00:55.000Z
|
2022-01-19T07:26:11.000Z
|
from __future__ import division
import numpy as np
import NMF_linear_solve as lis
import NMF_Iter_solve as its
from Heat_PDE_settings import *
from time import *
np.set_printoptions(precision=12, linewidth=300)
def proj_SOR(alpha, A, b, x0, tol, omega, res_cri = 1, show_ic = False):
N_entry = len(x0)
x = x0.copy()
r = b - np.dot(A, x0)
nr0 = its.norm(r)
r_cri = 1
while(r_cri > tol):
prevx = x.copy()
for j in xrange(N_entry):
x[j] = (1 - omega) * x[j] + omega * alpha / (2 * (1 + alpha)) * (x[j - 1] + x[j + 1]) + omega / (1 + alpha) * b[j]
r = b - np.dot(A, x)
r_cri = its.norm(r) / nr0
if res_cri != 1:
r_cri = its.norm(x - prevx)
return x
def PDE_Forward_Euler(x_left, x_right, tau_final, f, g_left, g_right, M, N):
'''
Forward Euler solution for heat pde u_tau = u_xx
:param x0: the point of interest
:param x_left: left boundary of the interval
:param x_right: right boundary of the interval
:param tau_final: upper boundary of the interval
:param f: lower boundary condition
:param g_left: left boundary condition
:param g_right: right boundary condition
:return: the discrete nodes x and solution at (x_i, tau_final) for each x_i in x
'''
# Discretization settings
dx = (x_right - x_left) / N
dtau = tau_final / M
alpha = dtau / (dx ** 2)
# print alpha
x = np.linspace(x_left, x_right, N+1)
tau = np.linspace(0, tau_final, M+1)
u_approx = np.zeros([M+1, N+1])
# Plug in boundary conditions
# u_approx[0, :] = np.apply_along_axis(f, 0, x)
for c in xrange(N+1):
u_approx[0, c] = f(x[c])
u_approx[:, 0] = np.apply_along_axis(g_left, 0, tau)
u_approx[:, N] = np.apply_along_axis(g_right, 0, tau)
# Execute Forward Euler
for m in xrange(1, M+1):
for n in xrange(1, N):
u_approx[m, n] = alpha * u_approx[m-1, n-1] - (2*alpha - 1) * u_approx[m-1, n] + alpha * u_approx[m-1, n+1]
return u_approx, x, tau
def PDE_Forward_Euler_Amer(x_left, x_right, tau_final, f, g_left, g_right, M, N, opt, r):
'''
Forward Euler solution for heat pde u_tau = u_xx, with the American early exercise premium
:param x0: the point of interest
:param x_left: left boundary of the interval
:param x_right: right boundary of the interval
:param tau_final: upper boundary of the interval
:param f: lower boundary condition
:param g_left: left boundary condition
:param g_right: right boundary condition
:return: the discrete nodes x and solution at (x_i, tau_final) for each x_i in x
'''
# Discretization settings
dx = (x_right - x_left) / N
dtau = tau_final / M
alpha = dtau / (dx ** 2)
# print alpha
x = np.linspace(x_left, x_right, N+1)
tau = np.linspace(0, tau_final, M+1)
u_approx = np.zeros([M+1, N+1])
# Plug in boundary conditions
# u_approx[0, :] = np.apply_along_axis(f, 0, x)
for c in xrange(N+1):
u_approx[0, c] = f(x[c])
u_approx[:, 0] = np.apply_along_axis(g_left, 0, tau)
u_approx[:, N] = np.apply_along_axis(g_right, 0, tau)
# Execute Forward Euler
for m in xrange(1, M+1):
for n in xrange(1, N):
u_approx[m, n] = alpha * u_approx[m-1, n-1] - (2*alpha - 1) * u_approx[m-1, n] + alpha * u_approx[m-1, n+1]
S0, K, T, q, sigma = opt.spot, opt.strike, opt.maturity, opt.div_rate, opt.vol
a = (r - q) / sigma ** 2 - 1 / 2
b = ((r - q) / sigma ** 2 + 1 / 2) ** 2 + 2 * q / sigma ** 2
tau_m = m * dtau
x_n = x_left + n * dx
if opt.cp == "P":
early_ex_premium = K * np.exp(a * x_n + b * tau_m) * max(1 - np.exp(x_n), 0)
else:
early_ex_premium = K * np.exp(a * x_n + b * tau_m) * max(np.exp(x_n) - 1, 0)
u_approx[m, n] = np.maximum(u_approx[m, n], early_ex_premium)
return u_approx, x, tau
def PDE_Backward_Euler(x_left, x_right, tau_final, f, g_left, g_right, M, N, solver='LU'):
'''
Forward Euler solution for heat pde u_tau = u_xx
:param x_left: left boundary of the interval
:param x_right: right boundary of the interval
:param tau_final: upper boundary of the interval
:param f: lower boundary condition
:param g_left: left boundary condition
:param g_right: right boundary condition
:return: the discrete nodes x and solution at (x_i, tau_final) for each x_i in x
'''
# Discretization settings
dx = (x_right - x_left) / N
dtau = tau_final / M
alpha = dtau / (dx ** 2)
x = np.linspace(x_left, x_right, N+1)
tau = np.linspace(0, tau_final, M+1)
u_approx = np.zeros([M+1, N+1])
# Plug in boundary conditions
# u_approx[0, :] = np.apply_along_axis(f, 0, x)
for c in xrange(N+1):
u_approx[0, c] = f(x[c])
u_approx[:, 0] = np.apply_along_axis(g_left, 0, tau)
u_approx[:, N] = np.apply_along_axis(g_right, 0, tau)
# Execute Backward Euler
# Initialize the tri-diagonal matrix A
A = np.zeros([N - 1, N - 1])
A[0, 0], A[0, 1] = 1 + 2 * alpha, - alpha
for i in xrange(1, N - 2):
A[i, i - 1], A[i, i], A[i, i + 1] = -alpha, 1 + 2 * alpha, -alpha
A[N - 2, N - 3], A[N - 2, N - 2] = -alpha, 1 + 2 * alpha
if solver == 'LU':
# LU decomposition
[L, U] = lis.lu_no_pivoting_banded(A, 2)
# Solve linear system
for m in xrange(1, M+1):
b = u_approx[m-1, 1:N].copy()
b[0] += u_approx[m, 0] * alpha
b[-1] += u_approx[m, -1] * alpha
b = np.reshape(b, (N-1,1))
y = lis.forward_subst_banded(L, 2, b)
u_approx[m, 1:N] = np.reshape(lis.backward_subst_banded(U, 2, y), N-1)
elif solver == "SOR":
for m in xrange(1, M+1):
b = u_approx[m - 1, 1:N].copy()
b[0] += u_approx[m, 0] * alpha
b[-1] += u_approx[m, -1] * alpha
b = np.reshape(b, (N - 1, 1))
u_approx[m, 1:N] = np.reshape(its.SOR_iter_banded(A, 2, b, np.reshape(u_approx[m-1, 1:N], (N-1, 1)), tol=10**(-6), omega=1.2), N - 1)
return u_approx, x, tau
def PDE_Crank_Nicolson(x_left, x_right, tau_final, f, g_left, g_right, M, N, solver='LU'):
'''
Forward Euler solution for heat pde u_tau = u_xx
:param x_left: left boundary of the interval
:param x_right: right boundary of the interval
:param tau_final: upper boundary of the interval
:param f: lower boundary condition
:param g_left: left boundary condition
:param g_right: right boundary condition
:return: the discrete nodes x and solution at (x_i, tau_final) for each x_i in x
'''
# Discretization settings
dx = (x_right - x_left) / N
dtau = tau_final / M
alpha = dtau / (dx ** 2)
x = np.linspace(x_left, x_right, N+1)
tau = np.linspace(0, tau_final, M+1)
u_approx = np.zeros([M+1, N+1])
# Plug in boundary conditions
# u_approx[0, :] = np.apply_along_axis(f, 0, x)
for c in xrange(N+1):
u_approx[0, c] = f(x[c])
u_approx[:, 0] = np.apply_along_axis(g_left, 0, tau)
u_approx[:, N] = np.apply_along_axis(g_right, 0, tau)
# Execute Crank Nicolson
# Initialize the tri-diagonal matrix A
A = np.zeros([N-1, N-1])
A[0, 0], A[0, 1] = 1 + alpha, - alpha / 2
for i in xrange(1, N-2):
A[i, i - 1], A[i, i], A[i, i + 1] = -alpha / 2, 1 + alpha, -alpha / 2
A[N-2, N-3], A[N-2, N-2] = -alpha / 2, 1 + alpha
# Initialize tri-diagonal matrix B
B = np.zeros([N-1, N-1])
B[0, 0], B[0, 1] = 1 - alpha, alpha / 2
for i in xrange(1, N-2):
B[i, i-1], B[i, i], B[i, i+1] = alpha / 2, 1 - alpha, alpha / 2
B[N-2, N-3], B[N-2, N-2] = alpha / 2, 1 - alpha
# LU decomposition
if solver == 'LU':
[L, U] = lis.lu_no_pivoting_banded(A, 2)
# Solve linear system
for m in xrange(1, M+1):
b = np.dot(B, np.reshape(u_approx[m-1, 1:N], (N-1, 1)))
b[0] += (u_approx[m, 0] + u_approx[m-1, 0]) * alpha / 2
b[-1] += (u_approx[m, -1] + u_approx[m-1, -1]) * alpha / 2
y = lis.forward_subst_banded(L, 2, b)
u_approx[m, 1:N] = np.reshape(lis.backward_subst_banded(U, 2, y), N-1)
elif solver == 'SOR':
for m in xrange(1, M + 1):
b = np.dot(B, np.reshape(u_approx[m - 1, 1:N], (N - 1, 1)))
b[0] += (u_approx[m, 0] + u_approx[m - 1, 0]) * alpha / 2
b[-1] += (u_approx[m, -1] + u_approx[m - 1, -1]) * alpha / 2
u_approx[m, 1:N] = np.reshape(its.SOR_iter_banded(A, 2, b, np.reshape(u_approx[m-1, 1:N], (N-1, 1)), tol=10 ** (-8), res_cri=0, omega=1.2), N - 1)
# u_approx[m, 1:N] = np.reshape(its.SOR_iter(A, b, np.reshape(u_approx[m-1, 1:N], (N-1, 1)), tol=10 ** (-8), res_cri=0, omega=1.2), N - 1)
return u_approx, x, tau
def PDE_Crank_Nicolson_Amer(x_left, x_right, tau_final, f, g_left, g_right, M, N, opt, r, solver='SOR'):
'''
Forward Euler solution for heat pde u_tau = u_xx
:param x_left: left boundary of the interval
:param x_right: right boundary of the interval
:param tau_final: upper boundary of the interval
:param f: lower boundary condition
:param g_left: left boundary condition
:param g_right: right boundary condition
:return: the discrete nodes x and solution at (x_i, tau_final) for each x_i in x
'''
S0, K, T, q, sigma = opt.spot, opt.strike, opt.maturity, opt.div_rate, opt.vol
# Discretization settings
dx = (x_right - x_left) / N
dtau = tau_final / M
alpha = dtau / (dx ** 2)
x = np.linspace(x_left, x_right, N+1)
tau = np.linspace(0, tau_final, M+1)
u_approx = np.zeros([M+1, N+1])
# Plug in boundary conditions
# u_approx[0, :] = np.apply_along_axis(f, 0, x)
for c in xrange(N+1):
u_approx[0, c] = f(x[c])
u_approx[:, 0] = np.apply_along_axis(g_left, 0, tau)
u_approx[:, N] = np.apply_along_axis(g_right, 0, tau)
# Execute Crank Nicolson
# Initialize the tri-diagonal matrix A
A = np.zeros([N-1, N-1])
A[0, 0], A[0, 1] = 1 + alpha, - alpha / 2
for i in xrange(1, N-2):
A[i, i - 1], A[i, i], A[i, i + 1] = -alpha / 2, 1 + alpha, -alpha / 2
A[N-2, N-3], A[N-2, N-2] = -alpha / 2, 1 + alpha
# Initialize tri-diagonal matrix B
B = np.zeros([N-1, N-1])
B[0, 0], B[0, 1] = 1 - alpha, alpha / 2
for i in xrange(1, N-2):
B[i, i-1], B[i, i], B[i, i+1] = alpha / 2, 1 - alpha, alpha / 2
B[N-2, N-3], B[N-2, N-2] = alpha / 2, 1 - alpha
#Projected SOR
if solver == 'SOR':
for m in xrange(1, M + 1):
b_vec = np.dot(B, np.reshape(u_approx[m - 1, 1:N], (N - 1, 1)))
b_vec[0] += (u_approx[m, 0] + u_approx[m - 1, 0]) * alpha / 2
b_vec[-1] += (u_approx[m, -1] + u_approx[m - 1, -1]) * alpha / 2
# u_approx[m, 1:N] = np.reshape(its.SOR_iter_banded(A, 2, b_vec, np.reshape(u_approx[m-1, 1:N], (N-1, 1)), tol=10 ** (-8), res_cri=0, omega=1.2), N - 1)
# u_approx[m, 1:N] = np.reshape(its.SOR_iter(A, b_vec, np.reshape(u_approx[m-1, 1:N], (N-1, 1)), tol=10 ** (-8), res_cri=0, omega=1.2), N - 1)
# u_approx[m, 1:N] = np.reshape(proj_SOR(alpha, A, b_vec, np.reshape(u_approx[m-1, 1:N], (N-1, 1)), tol=10 ** (-8), res_cri=0, omega=1.2), N - 1)
tol = 10**(-8)
omega = 1.2
a = (r - q) / sigma ** 2 - 1 / 2
b = ((r - q) / sigma ** 2 + 1 / 2) ** 2 + 2 * q / sigma ** 2
tau_m = m * dtau
res = 1
u_approx[m, 1:N] = np.array(
[K * np.exp(a * (x_left + n * dx) + b * tau_m) * max(np.exp(x_left + n * dx) - 1, 0) for n in
xrange(1, N)])
while (res > tol):
for n in xrange(1, N):
x_n = x_left + n * dx
if opt.cp == "P":
early_ex_premium = K * np.exp(a * x_n + b * tau_m) * max(1 - np.exp(x_n), 0)
else:
early_ex_premium = K * np.exp(a * x_n + b * tau_m) * max(np.exp(x_n) - 1, 0)
u_approx[m, n] = (1 - omega) * u_approx[m, n] + omega * alpha / (2 * (1 + alpha)) * (u_approx[m, n-1] + u_approx[m, n+1]) + omega / (1 + alpha) * b_vec[n - 1]
u_approx[m, n] = np.maximum(u_approx[m, n], early_ex_premium)
u_old = u_approx[m, 1:N].copy()
res = its.norm(u_approx[m, 1:N] - u_old)
return u_approx, x, tau
def max_pointwise_error(u_approx, u_exact):
N = u_approx.shape[0]
return max(abs(u_approx[1:N] - u_exact[1:N]))
def RMS_error(u_approx, u_exact):
r_e = abs(u_approx - u_exact) / abs(u_exact) # relative error
return np.sqrt(np.mean(r_e ** 2))
if __name__ == "__main__":
M, N = 8, 8
# print PDE_Forward_Euler(-2, 2, 1, f, g_left, g_right, M, N)[1]
print PDE_Backward_Euler(-2, 2, 1, f, g_left, g_right, M, N, solver="SOR")[0]
# print PDE_Crank_Nicolson(-2, 2, 1, f, g_left, g_right, M, N, solver='SOR')[0]
#
# alpha = 0.125
# if alpha == 0.125:
# N_lst = [4, 8, 16, 32]
# elif alpha == 0.5:
# N_lst = [8, 16, 32, 64]
# else:
# N_lst = [16, 32, 64, 128]
#
# start = time()
# for N in N_lst:
# M = int((N ** 2) / (16 * alpha))
# # u_approx, x_knot, tau_knot = PDE_Forward_Euler(-2, 2, 1, f, g_left, g_right, M, N)
# # u_approx, x_knot, tau_knot = PDE_Backward_Euler(-2, 2, 1, f, g_left, g_right, M, N, solver='SOR')
# u_approx, x_knot, tau_knot = PDE_Crank_Nicolson(-2, 2, 1, f, g_left, g_right, M, N, solver='SOR')
# u_approx = u_approx[-1, :]
# u_exa = np.apply_along_axis(u_exact_final, 0, x_knot)
# cur = time()
# print max_pointwise_error(u_approx, u_exa), RMS_error(u_approx, u_exa)
# start = cur
| 42.538226
| 178
| 0.554421
| 2,533
| 13,910
| 2.883143
| 0.069483
| 0.092017
| 0.059154
| 0.048062
| 0.874435
| 0.849788
| 0.832945
| 0.819252
| 0.819252
| 0.816103
| 0
| 0.047806
| 0.285694
| 13,910
| 326
| 179
| 42.668712
| 0.687198
| 0.166786
| 0
| 0.696809
| 0
| 0
| 0.003561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.031915
| null | null | 0.010638
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0db543aad0bc96cf969207dc8cdc599717eb6147
| 5,567
|
py
|
Python
|
mygraph.py
|
HongyiZhu/EHI
|
9fbbc6046546dd7fc6de5d831b4c941bc4404e02
|
[
"MIT"
] | null | null | null |
mygraph.py
|
HongyiZhu/EHI
|
9fbbc6046546dd7fc6de5d831b4c941bc4404e02
|
[
"MIT"
] | null | null | null |
mygraph.py
|
HongyiZhu/EHI
|
9fbbc6046546dd7fc6de5d831b4c941bc4404e02
|
[
"MIT"
] | null | null | null |
"""Graph utilities."""
# from time import time
import openne.graph
import networkx as nx
import numpy as np
class Graph_Int(openne.graph.Graph):
def __init__(self):
super(Graph_Int, self).__init__()
def encode_node(self):
super(Graph_Int, self).encode_node()
def read_g(self, g):
super(Graph_Int, self).read_g(g)
def read_adjlist(self, filename):
""" Read graph from adjacency file in which the edge must be unweighted
the format of each line: v1 n1 n2 n3 ... nk
:param filename: the filename of input file
"""
super(Graph_Int, self).read_adjlist(filename)
def read_edgelist(self, filename, weighted=False, directed=False):
self.G = nx.DiGraph()
if directed:
def read_unweighted(l):
_src, _dst = [x for x in l.strip().split()]
src = int(_src)
dst = int(_dst)
self.G.add_edge(src, dst)
self.G[src][dst]['weight'] = 1.0
def read_weighted(l):
_src, _dst, w = [x for x in l.strip().split()]
src = int(_src)
dst = int(_dst)
self.G.add_edge(src, dst)
self.G[src][dst]['weight'] = float(w)
else:
def read_unweighted(l):
_src, _dst = [x for x in l.strip().split()]
src = int(_src)
dst = int(_dst)
self.G.add_edge(src, dst)
self.G.add_edge(dst, src)
self.G[src][dst]['weight'] = 1.0
self.G[dst][src]['weight'] = 1.0
def read_weighted(l):
_src, _dst, w = [x for x in l.strip().split(" ")]
src = int(_src)
dst = int(_dst)
self.G.add_edge(src, dst)
self.G.add_edge(dst, src)
self.G[src][dst]['weight'] = float(w)
self.G[dst][src]['weight'] = float(w)
fin = open(filename, 'r')
func = read_unweighted
if weighted:
func = read_weighted
while 1:
l = fin.readline()
if l == '':
break
func(l)
fin.close()
def read_node_label(self, filename):
super(Graph_Int, self).read_node_label(filename)
def read_node_features(self, filename):
fin = open(filename, 'r')
for l in fin.readlines():
vec = l.split()
n = int(vec[0])
self.G.nodes[n]['feature'] = np.array(
[float(x) for x in vec[1:]])
fin.close()
def read_node_status(self, filename):
super(Graph_Int, self).read_node_status(filename)
def read_edge_label(self, filename):
super(Graph_Int, self).read_edge_label(filename)
class Graph_Str(openne.graph.Graph):
def __init__(self):
super(Graph_Str, self).__init__()
def encode_node(self):
super(Graph_Str, self).encode_node()
def read_g(self, g):
super(Graph_Str, self).read_g(g)
def read_adjlist(self, filename):
""" Read graph from adjacency file in which the edge must be unweighted
the format of each line: v1 n1 n2 n3 ... nk
:param filename: the filename of input file
"""
super(Graph_Str, self).read_adjlist(filename)
def read_edgelist(self, filename, weighted=False, directed=False):
self.G = nx.DiGraph()
if directed:
def read_unweighted(l):
_src, _dst = [x for x in l.split()]
src = str(int(_src))
dst = str(int(_dst))
self.G.add_edge(src, dst)
self.G[src][dst]['weight'] = 1.0
def read_weighted(l):
_src, _dst, w = [x for x in l.split()]
src = str(int(_src))
dst = str(int(_dst))
self.G.add_edge(src, dst)
self.G[src][dst]['weight'] = float(w)
else:
def read_unweighted(l):
_src, _dst = [x for x in l.split()]
src = str(int(_src))
dst = str(int(_dst))
self.G.add_edge(src, dst)
self.G.add_edge(dst, src)
self.G[src][dst]['weight'] = 1.0
self.G[dst][src]['weight'] = 1.0
def read_weighted(l):
_src, _dst, w = [x for x in l.split()]
src = str(int(_src))
dst = str(int(_dst))
self.G.add_edge(src, dst)
self.G.add_edge(dst, src)
self.G[src][dst]['weight'] = float(w)
self.G[dst][src]['weight'] = float(w)
fin = open(filename, 'r')
func = read_unweighted
if weighted:
func = read_weighted
while 1:
l = fin.readline()
if l == '':
break
func(l)
fin.close()
def read_node_label(self, filename):
super(Graph_Str, self).read_node_label(filename)
def read_node_features(self, filename):
fin = open(filename, 'r')
for l in fin.readlines():
vec = l.split()
n = str(int(vec[0]))
self.G.nodes[n]['feature'] = np.array(
[float(x) for x in vec[1:]])
fin.close()
def read_node_status(self, filename):
super(Graph_Str, self).read_node_status(filename)
def read_edge_label(self, filename):
super(Graph_Str, self).read_edge_label(filename)
| 32.940828
| 79
| 0.508532
| 728
| 5,567
| 3.710165
| 0.111264
| 0.071085
| 0.04739
| 0.048871
| 0.958904
| 0.937431
| 0.937431
| 0.937431
| 0.871529
| 0.871529
| 0
| 0.007345
| 0.36411
| 5,567
| 169
| 80
| 32.940828
| 0.75565
| 0.06305
| 0
| 0.827068
| 0
| 0
| 0.017728
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.195489
| false
| 0
| 0.022556
| 0
| 0.233083
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
21a1d3057c899cc6d153ba86b7087b6bb7247ef2
| 172
|
py
|
Python
|
muimock/ui/__init__.py
|
bcdev/multiply-uimock
|
3b2b41a71d9085ee2ee01a2ecbc53760e9035bf7
|
[
"MIT"
] | null | null | null |
muimock/ui/__init__.py
|
bcdev/multiply-uimock
|
3b2b41a71d9085ee2ee01a2ecbc53760e9035bf7
|
[
"MIT"
] | null | null | null |
muimock/ui/__init__.py
|
bcdev/multiply-uimock
|
3b2b41a71d9085ee2ee01a2ecbc53760e9035bf7
|
[
"MIT"
] | null | null | null |
# noinspection PyUnresolvedReferences
from .data import data_ui
# noinspection PyUnresolvedReferences
from .exec import exec_ui, job_monitor, Job, JobStatus, JobStatusList
| 34.4
| 69
| 0.848837
| 19
| 172
| 7.526316
| 0.578947
| 0.475524
| 0.531469
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104651
| 172
| 4
| 70
| 43
| 0.928571
| 0.412791
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
21bd1830d593fa9948b9d7f6b7d15e79dc963c39
| 19,896
|
py
|
Python
|
sitegeist/data/census/models.py
|
parthkhatri1998/sitegeist
|
231b18dfb3a5a0fce32c1c5e01227dcf9bb18010
|
[
"BSD-3-Clause"
] | 15
|
2015-01-29T12:51:14.000Z
|
2021-07-23T17:11:25.000Z
|
sitegeist/data/census/models.py
|
parthkhatri1998/sitegeist
|
231b18dfb3a5a0fce32c1c5e01227dcf9bb18010
|
[
"BSD-3-Clause"
] | 2
|
2015-04-22T17:30:51.000Z
|
2015-08-03T22:05:56.000Z
|
sitegeist/data/census/models.py
|
modern-resilience/sitegeist
|
a0922c61239fad8e5b9ed9c0e423a127a4d5b82c
|
[
"BSD-3-Clause"
] | 10
|
2015-04-10T19:47:03.000Z
|
2022-03-25T06:18:58.000Z
|
from django.db import models
class Tract(models.Model):
state = models.CharField(max_length=2)
county = models.CharField(max_length=3)
tract = models.CharField(max_length=12)
B01001_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_002E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_003E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_004E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_005E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_006E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_007E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_008E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_009E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_010E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_011E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_012E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_013E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_014E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_015E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_016E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_017E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_018E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_019E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_020E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_021E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_022E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_023E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_024E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_025E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_026E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_027E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_028E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_029E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_030E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_031E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_032E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_033E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_034E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_035E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_036E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_037E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_038E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_039E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_040E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_041E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_042E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_043E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_044E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_045E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_046E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_047E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_048E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01001_049E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01002_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B01003_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_002E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_003E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_004E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_005E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_006E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_007E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_008E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_009E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_010E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_011E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_012E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_013E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_014E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_015E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_016E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_017E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_018E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_019E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_020E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_021E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_022E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_023E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_024E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_025E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_026E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_027E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_028E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_029E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_030E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_031E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_032E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_033E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_034E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_035E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_036E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_037E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_038E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_039E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_040E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_041E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_042E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_043E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_044E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_045E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_046E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_047E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_048E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_049E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_050E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_051E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_052E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_053E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_054E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_055E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_056E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_057E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_058E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_059E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_060E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_061E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_062E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_063E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_064E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_065E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_066E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_067E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_068E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_069E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_070E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_071E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_072E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_073E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_074E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_075E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_076E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_077E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_078E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_079E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_080E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_081E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_082E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_083E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_084E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_085E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_086E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_087E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_088E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_089E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_090E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_091E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_092E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_093E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_094E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_095E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_096E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_097E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_098E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_099E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_100E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_101E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_102E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_103E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_104E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_105E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_106E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_107E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B04003_108E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B11005_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B11005_002E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_002E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_003E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_004E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_005E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_006E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_007E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_008E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_009E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_010E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_011E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_012E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_013E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_014E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_015E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_016E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19001_017E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B19013_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
# new as of 2012-12-03
B25058_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B25064_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B25077_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B25003_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B25003_002E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B25003_003E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
# new as of 2012-12-05
B08301_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_002E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_003E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_004E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_005E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_006E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_007E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_008E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_009E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_010E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_011E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_012E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_013E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_014E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_015E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_016E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_017E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_018E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_019E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_020E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
B08301_021E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
# new as of 2012-12-11
B25035_001E = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)
class Meta:
ordering = ('state', 'county', 'tract')
def __unicode__(self):
return u"%s:%s:%s" % (self.state, self.county, self.tract)
| 85.758621
| 93
| 0.773271
| 2,963
| 19,896
| 4.980425
| 0.053662
| 0.25249
| 0.294572
| 0.378736
| 0.964898
| 0.964898
| 0.964898
| 0.964898
| 0.964898
| 0.964898
| 0
| 0.130072
| 0.109318
| 19,896
| 231
| 94
| 86.12987
| 0.702669
| 0.003116
| 0
| 0
| 0
| 0
| 0.00121
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00463
| false
| 0
| 0.00463
| 0.00463
| 0.99537
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
21c248214b4f4f0b33523d42d35cdceb0df530eb
| 918
|
py
|
Python
|
exer009.py
|
fabionunesdeparis/Fundamentos-em-python3
|
b21d06b44d5b18e99419cd06b4e08363c1f7a7ce
|
[
"MIT"
] | null | null | null |
exer009.py
|
fabionunesdeparis/Fundamentos-em-python3
|
b21d06b44d5b18e99419cd06b4e08363c1f7a7ce
|
[
"MIT"
] | null | null | null |
exer009.py
|
fabionunesdeparis/Fundamentos-em-python3
|
b21d06b44d5b18e99419cd06b4e08363c1f7a7ce
|
[
"MIT"
] | null | null | null |
# @Fábio C. Nunes 04/05/2020
# Escreva um programa que leia um número inteiro qualquer e mostre na tela a sua tabuada.
valor = int(input('Digite um valor inteiro: '))
tab = 0
print('-' *12)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
tab = (tab+1)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
tab = (tab+1)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
tab = (tab+1)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
tab = (tab+1)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
tab = (tab+1)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
tab = (tab+1)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
tab = (tab+1)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
tab = (tab+1)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
tab = (tab+1)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
tab = (tab+1)
print('{} x {:2} = {}'.format(valor,tab,valor*tab))
print('-' *12)
| 31.655172
| 89
| 0.577342
| 153
| 918
| 3.464052
| 0.215686
| 0.332075
| 0.145283
| 0.269811
| 0.733962
| 0.733962
| 0.733962
| 0.733962
| 0.733962
| 0.733962
| 0
| 0.042714
| 0.132898
| 918
| 28
| 90
| 32.785714
| 0.623116
| 0.124183
| 0
| 0.92
| 0
| 0
| 0.225968
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.52
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
21f834d92e9d8d468cf4ed672e2835cc974723b9
| 21,467
|
py
|
Python
|
backend/initiatives/admin/initiative_admin.py
|
danesjenovdan/izboljsajmo-maribor
|
cd2f388ceb89d7989952ab05154fd8e7341c2b2b
|
[
"CC0-1.0"
] | null | null | null |
backend/initiatives/admin/initiative_admin.py
|
danesjenovdan/izboljsajmo-maribor
|
cd2f388ceb89d7989952ab05154fd8e7341c2b2b
|
[
"CC0-1.0"
] | null | null | null |
backend/initiatives/admin/initiative_admin.py
|
danesjenovdan/izboljsajmo-maribor
|
cd2f388ceb89d7989952ab05154fd8e7341c2b2b
|
[
"CC0-1.0"
] | null | null | null |
from django.contrib import admin
from django import forms
from django.shortcuts import render
from django.contrib.admin import SimpleListFilter
from django.contrib.gis import admin as gis_admin
from django.db.models import Q
from django.utils.translation import gettext as _
from behaviors.models import Published
from initiatives.models import (
Initiative, BothersInitiativeSuper, BothersInitiativeArea, BothersInitiativeAppraiser, BothersInitiativeContractor,
IdeaInitiativeSuper, IdeaInitiativeArea, IdeaInitiativeAppraiser, IdeaInitiativeContractor,
InterestedInitiativeSuper, InterestedInitiativeArea, InterestedInitiativeAppraiser,
ArchivedInitiative, User, Reviwers
)
from initiatives.admin.admin import (DescriptionInline, FileInline, StatusInitiativeHearInline, StatusInitiativeEditingInline,
StatusInitiativeProgressInline, StatusInitiativeFinishedInline, StatusInitiativeDoneInline, StatusInitiativeRejectedInline,
StatusInitiativeHearAdminInline, StatusInitiativeEditingAdminInline, StatusInitiativeProgressAdminInline, StatusInitiativeFinishedAdminInline,
StatusInitiativeRejectedAdminInline, CommentInline
)
import logging
logger = logging.getLogger(__name__)
class PublicFilter(SimpleListFilter):
title = _('is publuc')
parameter_name = 'public'
def lookups(self, request, model_admin):
statuses = list(set(list(model_admin.model.objects.all().values_list("initiative_statuses__status__name", flat=True))))
return [('Public', _('Objavleno')), ('Private', _('Nepregledano'))]
def queryset(self, request, queryset):
if self.value() == 'Public':
return queryset.exclude(Q(initiative_statuses__publication_status=Published.DRAFT) | Q(initiative_statuses__publication_status=None))
elif self.value() == 'Private':
return queryset.exclude(initiative_statuses__publication_status=Published.PUBLISHED)
else:
return queryset
class InitiativeAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['author', 'publisher', 'area', 'zone', 'reviewer_user']
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
date_hierarchy = 'created'
readonly_fields = ['status_history', 'created', 'images_preview', 'description']
list_display = [
'id',
'title',
'reviewer',
'author',
'type',
'zone',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'_is_published',
'_needs_publish'
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeHearInline,
StatusInitiativeEditingInline,
StatusInitiativeProgressInline,
StatusInitiativeFinishedInline,
StatusInitiativeDoneInline,
StatusInitiativeRejectedInline,
CommentInline)
actions = ['printer']
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = _("Print initiatives")
# ---- ZANIMA ME -> interested in
class InterestedInitiativeSuperAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['author', 'publisher', 'area', 'zone', 'reviewer_user']
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
date_hierarchy = 'created'
readonly_fields = ['status_history', 'created', 'images_preview', 'phone_number', 'email', 'description']
exclude = ['is_draft']
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'reviewer',
'_is_published',
'_needs_publish'
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeHearAdminInline,
StatusInitiativeFinishedAdminInline,
StatusInitiativeRejectedAdminInline,
CommentInline)
actions = ['printer']
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = "Print initiatives"
class InterestedAdminForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
idx = Reviwers.get_order().index(self.instance.reviewer)
self.fields['reviewer_user'].queryset = User.objects.filter(
role=Reviwers.get_order()[idx+1],
area=self.instance.area)
class InterestedInitiativeAreaAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
form = InterestedAdminForm
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['area', 'zone', 'area']
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
date_hierarchy = 'created'
readonly_fields = ['title', 'type', 'status_history', 'created', 'images_preview', 'author', 'modified', 'cover_image', 'archived', 'address', 'publisher', 'zone', 'phone_number', 'email', 'description']
modifiable = False
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'reviewer',
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeHearInline,
StatusInitiativeFinishedInline,
StatusInitiativeRejectedInline,
CommentInline)
actions = ['printer']
def get_queryset(self, request):
qs = super().get_queryset(request)
areas = request.user.area.all()
return qs.filter(area__in=areas)
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = "Print initiatives"
class InterestedInitiativeAppraiserAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
readonly_fields = ['title', 'type', 'status_history', 'created', 'images_preview', 'author', 'modified', 'area', 'cover_image', 'archived', 'address', 'publisher', 'zone', 'reviewer_user', 'reviewer', 'phone_number', 'email', 'description']
exclude = ['publisher', ]
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['area', 'zone']
date_hierarchy = 'created'
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
modifiable = False
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count'
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeFinishedInline,
CommentInline)
actions = ['printer']
def get_queryset(self, request):
qs = super().get_queryset(request)
areas = request.user.area.all()
return qs.filter(area__in=areas)
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = "Print initiatives"
# ---- IDEJA Idea
class IdeaInitiativeSuperAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['author', 'publisher', 'area', 'zone', 'reviewer_user']
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
date_hierarchy = 'created'
readonly_fields = ['status_history', 'created', 'images_preview', 'phone_number', 'email', 'description']
exclude = ['is_draft']
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'reviewer',
'_is_published',
'_needs_publish'
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeHearAdminInline,
StatusInitiativeEditingAdminInline,
StatusInitiativeFinishedAdminInline,
StatusInitiativeRejectedAdminInline,
CommentInline)
actions = ['printer']
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = "Print initiatives"
class IteaAdminForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
idx = Reviwers.get_order().index(self.instance.reviewer)
self.fields['reviewer_user'].queryset = User.objects.filter(
role=Reviwers.get_order()[idx+1],
area=self.instance.area)
class IdeaInitiativeAreaAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
form = IteaAdminForm
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['area', 'zone', 'area']
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
date_hierarchy = 'created'
readonly_fields = ['title', 'type', 'status_history', 'created', 'images_preview', 'author', 'modified', 'cover_image', 'archived', 'address', 'publisher', 'zone', 'phone_number', 'email', 'description']
modifiable = False
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'reviewer',
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeHearInline,
StatusInitiativeEditingAdminInline,
StatusInitiativeFinishedInline,
StatusInitiativeRejectedInline,
CommentInline)
actions = ['printer']
def get_queryset(self, request):
qs = super().get_queryset(request)
areas = request.user.area.all()
return qs.filter(area__in=areas)
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = _("Print initiatives")
class IdeaInitiativeAppraiserAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
form = IteaAdminForm
readonly_fields = ['title', 'type', 'status_history', 'created', 'images_preview', 'author', 'modified', 'area', 'cover_image', 'archived', 'address', 'publisher', 'zone', 'phone_number', 'email', 'description']
exclude = ['publisher', ]
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['area', 'zone']
date_hierarchy = 'created'
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
modifiable = False
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'reviewer',
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeEditingAdminInline,
StatusInitiativeFinishedInline,
CommentInline)
actions = ['printer']
def get_queryset(self, request):
qs = super().get_queryset(request)
areas = request.user.area.all()
return qs.filter(area__in=areas)
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = "Print initiatives"
class IdeaInitiativeContractorAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
readonly_fields = ['title', 'type', 'status_history', 'created', 'images_preview', 'author', 'modified', 'area', 'cover_image', 'archived', 'address', 'publisher', 'zone', 'reviewer_user', 'reviewer', 'phone_number', 'email', 'description']
exclude = ['publisher', ]
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['area', 'zone']
date_hierarchy = 'created'
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
modifiable = False
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'reviewer',
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeFinishedInline,
CommentInline)
actions = ['printer']
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.filter(reviewer_user=request.user)
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = "Print initiatives"
# MOTI ME -> bothers me
class BothersInitiativeSuperAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['author', 'publisher', 'area', 'zone', 'reviewer_user']
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
date_hierarchy = 'created'
readonly_fields = ['status_history', 'created', 'images_preview', 'phone_number', 'email', 'description']
exclude = ['is_draft']
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'reviewer',
'_is_published',
'_needs_publish'
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeHearAdminInline,
StatusInitiativeEditingAdminInline,
StatusInitiativeFinishedAdminInline,
StatusInitiativeRejectedAdminInline,
CommentInline)
actions = ['printer']
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = "Print initiatives"
class BothersInitiativeForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
idx = Reviwers.get_order().index(self.instance.reviewer)
self.fields['reviewer_user'].queryset = User.objects.filter(
role__in=Reviwers.get_order()[idx+1:],
area=self.instance.area)
class BothersInitiativeAreaAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
form = BothersInitiativeForm
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['zone', 'area']
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
date_hierarchy = 'created'
readonly_fields = ['title', 'type', 'status_history', 'created', 'images_preview', 'author', 'modified', 'cover_image', 'archived', 'address', 'publisher', 'zone', 'phone_number', 'email', 'description', 'description']
modifiable = False
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'reviewer',
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeHearInline,
StatusInitiativeEditingInline,
StatusInitiativeFinishedInline,
StatusInitiativeRejectedInline,
CommentInline)
actions = ['printer']
def get_queryset(self, request):
qs = super().get_queryset(request)
areas = request.user.area.all()
return qs.filter(area__in=areas)
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = "Print initiatives"
class BothersInitiativeAppraiserAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
form = BothersInitiativeForm
readonly_fields = ['title', 'type', 'status_history', 'created', 'images_preview', 'author', 'modified', 'area', 'cover_image', 'archived', 'address', 'publisher', 'zone', 'phone_number', 'email', 'description']
exclude = ['publisher', ]
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['area', 'zone']
date_hierarchy = 'created'
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
modifiable = False
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'reviewer',
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeEditingInline,
StatusInitiativeFinishedInline,
CommentInline)
actions = ['printer']
def get_queryset(self, request):
qs = super().get_queryset(request)
areas = request.user.area.all()
return qs.filter(area__in=areas)
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = "Print initiatives"
class BothersInitiativeContractorAdmin(gis_admin.OSMGeoAdmin, admin.ModelAdmin):
readonly_fields = ['title', 'type', 'status_history', 'created', 'images_preview', 'author', 'modified', 'area', 'cover_image', 'archived', 'address', 'publisher', 'zone', 'reviewer_user', 'reviewer', 'phone_number', 'email', 'description']
modifiable = False
exclude = ['publisher', ]
search_fields = ['author__username', 'address', 'descriptions__content']
autocomplete_fields = ['area', 'zone']
date_hierarchy = 'created'
list_filter = ['statuses', 'zone__name', 'area__name', 'type', PublicFilter]
list_display = [
'id',
'title',
'author',
'publisher',
'status',
'area',
'zone',
'created',
'comment_count',
'vote_count',
'reviewer',
]
inlines = (
DescriptionInline,
FileInline,
StatusInitiativeFinishedInline,
CommentInline)
actions = ['printer']
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.filter(reviewer_user=request.user)
def printer(self, request, queryset):
return render(request, 'print/initiatives.html', {'initiatives': queryset})
def phone_number(self, obj):
return obj.author.phone_number
def email(self, obj):
return obj.author.email
printer.short_description = "Print initiatives"
admin.site.register(Initiative, InitiativeAdmin)
admin.site.register(ArchivedInitiative)
admin.site.register(InterestedInitiativeSuper, InterestedInitiativeSuperAdmin)
admin.site.register(InterestedInitiativeArea, InterestedInitiativeAreaAdmin)
admin.site.register(InterestedInitiativeAppraiser, InterestedInitiativeAppraiserAdmin)
admin.site.register(IdeaInitiativeSuper, IdeaInitiativeSuperAdmin)
admin.site.register(IdeaInitiativeArea, IdeaInitiativeAreaAdmin)
admin.site.register(IdeaInitiativeAppraiser, IdeaInitiativeAppraiserAdmin)
admin.site.register(IdeaInitiativeContractor, IdeaInitiativeContractorAdmin)
admin.site.register(BothersInitiativeSuper, BothersInitiativeSuperAdmin)
admin.site.register(BothersInitiativeArea, BothersInitiativeAreaAdmin)
admin.site.register(BothersInitiativeAppraiser, BothersInitiativeAppraiserAdmin)
admin.site.register(BothersInitiativeContractor, BothersInitiativeContractorAdmin)
| 32.975422
| 244
| 0.656869
| 1,904
| 21,467
| 7.192752
| 0.091387
| 0.028112
| 0.022782
| 0.028039
| 0.805549
| 0.794304
| 0.790434
| 0.77218
| 0.743264
| 0.734429
| 0
| 0.000179
| 0.219593
| 21,467
| 650
| 245
| 33.026154
| 0.817286
| 0.003214
| 0
| 0.84486
| 0
| 0
| 0.206787
| 0.025661
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091589
| false
| 0
| 0.020561
| 0.06729
| 0.454206
| 0.08972
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df3bd7a348ea03af52a7f5ec17c54b876be0a6c0
| 7,853
|
py
|
Python
|
tests/helpers/TwitterProjectTestEntities.py
|
AdamSwenson/TwitterProject
|
8c5dc7a57eac611b555058736d609f2f204cb836
|
[
"MIT"
] | null | null | null |
tests/helpers/TwitterProjectTestEntities.py
|
AdamSwenson/TwitterProject
|
8c5dc7a57eac611b555058736d609f2f204cb836
|
[
"MIT"
] | 6
|
2020-03-24T17:34:24.000Z
|
2021-12-13T20:14:34.000Z
|
tests/helpers/TwitterProjectTestEntities.py
|
AdamSwenson/TwitterProject
|
8c5dc7a57eac611b555058736d609f2f204cb836
|
[
"MIT"
] | null | null | null |
"""
This holds entities like sample tweets which are used throughout the testsuite
"""
test_tweet = {
"_id": "401352093598253056",
"_rev": "2-64861d45d1696f848786975706ad3bf9",
"truncated": 'false',
"text": "I might have vulvodynia. Reminds me of Charlotte #satc #sexology #vulvodynia @ Ganzenplein http://t.co/C1E4pgiu2T",
"in_reply_to_status_id": 'null',
"id": 401352093598253060,
"favorite_count": 0,
"source": "<a href=\"http://instagram.com\" rel=\"nofollow\">Instagram</a>",
"retweeted": 'false',
"entities": {
"symbols": [],
"user_mentions": [],
"hashtags": [
{
"indices": [56, 61],
"text": "satc"
},
{
"indices": [62, 71],
"text": "sexology"
},
{
"indices": [72, 83],
"text": "vulvodynia"
}
],
"urls": [
{
"url": "http://t.co/C1E4pgiu2T",
"indices": [98, 120],
"expanded_url": "http://instagram.com/p/gvPYnCDEPm/",
"display_url": "instagram.com/p/gvPYnCDEPm/"
}
]
},
"in_reply_to_screen_name": 'null',
"in_reply_to_user_id": 'null',
"retweet_count": 0,
"id_str": "401352093598253056",
"favorited": 'false',
"user": {
"id": 319297935,
"verified": 'false',
"entities": {
"url": {
"urls": [
{
"url": "http://t.co/845k6L9q1k",
"indices": [0, 22],
"expanded_url": "http://www.facebook.com/niels.jacobus",
"display_url": "facebook.com/niels.jacobus"
}
]
},
"description": {"urls": []}
},
"followers_count": 69,
"id_str": "319297935",
"utc_offset": 7200,
"statuses_count": 137,
"description": "Student seksuologie @KU_Leuven | KSA",
"friends_count": 81,
"location": "Leuven",
"screen_name": "NielsJacobus",
"lang": "en",
"profile_background_tile": 'false',
"favourites_count": 19,
"name": "Niels ",
"url": "http://t.co/845k6L9q1k",
"created_at": "Fri Jun 17 21:54:43 +0000 2011",
"time_zone": "Athens"
},
"lang": "de",
"created_at": "Fri Nov 15 14:12:50 +0000 2013",
"place": 'null',
"metadata": {
"iso_language_code": "de",
"result_type": "recent"
}
}
EXPECTED_TAGS = ['satc', 'sexology', 'vulvodynia']
EXPECTED_TEXT = "I might have vulvodynia. Reminds me of Charlotte #satc #sexology #vulvodynia @ Ganzenplein http://t.co/C1E4pgiu2T"
"""The portion of the tweet object that will be used in saving user """
USER_DICT = {"user": {
"id": 319297935,
"verified": 'false',
"entities": {
"url": {
"urls": [
{
"url": "http://t.co/845k6L9q1k",
"indices": [0, 22],
"expanded_url": "http://www.facebook.com/niels.jacobus",
"display_url": "facebook.com/niels.jacobus"
}
]
},
"description": {"urls": []}
},
"followers_count": 69,
"id_str": "319297935",
"utc_offset": 7200,
"statuses_count": 137,
"description": "Student seksuologie @KU_Leuven | KSA",
"friends_count": 81,
"location": "Leuven",
"screen_name": "NielsJacobus",
"lang": "en",
"profile_background_tile": 'false',
"favourites_count": 19,
"name": "Niels ",
"url": "http://t.co/845k6L9q1k",
"created_at": "Fri Jun 17 21:54:43 +0000 2011",
"time_zone": "Athens"
}}
""" Has the html items """
test_tweet2 = {"_id": "401352093598253056",
"_rev": "2-64861d45d1696f848786975706ad3bf9",
"truncated": 'false',
"text": "\"I might have vulvodynia\". Reminds me of Charlotte 😷💔📺💊 #satc #sexology #vulvodynia @ Ganzenplein http://t.co/C1E4pgiu2T",
"in_reply_to_status_id": 'null',
"id": 401352093598253060,
"favorite_count": 0,
"source": "<a href=\"http://instagram.com\" rel=\"nofollow\">Instagram</a>",
"retweeted": 'false',
"entities": {
"symbols": [
],
"user_mentions": [
],
"hashtags": [
{
"indices": [
56,
61
],
"text": "satc"
},
{
"indices": [
62,
71
],
"text": "sexology"
},
{
"indices": [
72,
83
],
"text": "vulvodynia"
}
],
"urls": [
{
"url": "http://t.co/C1E4pgiu2T",
"indices": [
98,
120
],
"expanded_url": "http://instagram.com/p/gvPYnCDEPm/",
"display_url": "instagram.com/p/gvPYnCDEPm/"
}
]
},
"in_reply_to_screen_name": 'null',
"in_reply_to_user_id": 'null',
"retweet_count": 0,
"id_str": "401352093598253056",
"favorited": 'false',
"user": {
"id": 319297935,
"verified": 'false',
"entities": {
"url": {
"urls": [
{
"url": "http://t.co/845k6L9q1k",
"indices": [
0,
22
],
"expanded_url": "http://www.facebook.com/niels.jacobus",
"display_url": "facebook.com/niels.jacobus"
}
]
},
"description": {
"urls": [
]
}
},
"followers_count": 69,
"id_str": "319297935",
"utc_offset": 7200,
"statuses_count": 137,
"description": "Student seksuologie @KU_Leuven | KSA",
"friends_count": 81,
"location": "Leuven",
"screen_name": "NielsJacobus",
"lang": "en",
"profile_background_tile": 'false',
"favourites_count": 19,
"name": "Niels ☕",
"url": "http://t.co/845k6L9q1k",
"created_at": "Fri Jun 17 21:54:43 +0000 2011",
"time_zone": "Athens",
},
"lang": "de",
"created_at": "Fri Nov 15 14:12:50 +0000 2013",
"place": 'null',
"metadata": {
"iso_language_code": "de",
"result_type": "recent"
}
}
| 34.143478
| 148
| 0.386094
| 584
| 7,853
| 5.039384
| 0.263699
| 0.030921
| 0.026164
| 0.027183
| 0.928984
| 0.928984
| 0.928984
| 0.928984
| 0.928984
| 0.928984
| 0
| 0.10455
| 0.473832
| 7,853
| 230
| 149
| 34.143478
| 0.606244
| 0
| 0
| 0.660465
| 0
| 0.013953
| 0.374837
| 0.046545
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df6f1f4e5d2804439071ab3be7c52ceab9535434
| 2,596
|
py
|
Python
|
perf-analysis/get_ci_tlb.py
|
DSouzaM/mesh-testsuite
|
8fb4a0a97a81183f0920e55042ea96c4a994ed27
|
[
"Apache-2.0"
] | null | null | null |
perf-analysis/get_ci_tlb.py
|
DSouzaM/mesh-testsuite
|
8fb4a0a97a81183f0920e55042ea96c4a994ed27
|
[
"Apache-2.0"
] | null | null | null |
perf-analysis/get_ci_tlb.py
|
DSouzaM/mesh-testsuite
|
8fb4a0a97a81183f0920e55042ea96c4a994ed27
|
[
"Apache-2.0"
] | null | null | null |
import pandas as pd
import numpy as np
import sys
from matplotlib import pyplot
import math
df = pd.read_csv(sys.argv[1])
print(df.head())
print('-'*30)
print("dTLB-load-misses statistics")
stats = df.groupby(['memlib'])['dTLB-load-misses'].agg(['mean', 'count', 'std'])
ci95_hi = []
ci95_lo = []
for i in stats.index:
m, c, s = stats.loc[i]
ci95_hi.append(m + 1.96*s/math.sqrt(c))
ci95_lo.append(m - 1.96*s/math.sqrt(c))
stats['ci95_hi'] = ci95_hi
stats['ci95_lo'] = ci95_lo
print(stats)
x_values = stats.index.tolist()
yaxis_points = []
yaxis_legend = []
for i in range(len(x_values)):
yaxis_points.append(2*i +1)
yaxis_legend.append(x_values[i])
print(yaxis_legend)
print(yaxis_points)
pyplot.figure()
pyplot.title("dTLB-load-misses")
mean_data = stats.loc[[(x) for x in x_values],'mean'].tolist()
ci95_hi_data= stats.loc[[x for x in x_values],'ci95_hi'].tolist()
ci95_lo_data= stats.loc[[x for x in x_values],'ci95_lo'].tolist()
error_data= [x1 - x2 for (x1, x2) in zip(ci95_hi_data, ci95_lo_data)]
pyplot.errorbar(mean_data,yaxis_points,xerr=error_data, fmt = 'o', color = 'k')
yaxis_points.append(yaxis_points[len(yaxis_points)-1] +1)
yaxis_points.insert(0, 0)
yaxis_legend.insert(len(yaxis_legend), '')
yaxis_legend.insert(0,'')
pyplot.yticks(yaxis_points, yaxis_legend)
pyplot.savefig(f'{sys.argv[1]}-dTLB.png')
print('-'*30)
print("iTLB-load-misses statistics")
stats = df.groupby(['memlib'])['iTLB-load-misses'].agg(['mean', 'count', 'std'])
print(yaxis_legend)
print(yaxis_points)
ci95_hi = []
ci95_lo = []
for i in stats.index:
m, c, s = stats.loc[i]
ci95_hi.append(m + 1.96*s/math.sqrt(c))
ci95_lo.append(m - 1.96*s/math.sqrt(c))
stats['ci95_hi'] = ci95_hi
stats['ci95_lo'] = ci95_lo
print(stats)
x_values = stats.index.tolist()
yaxis_points = []
yaxis_legend = []
for i in range(len(x_values)):
yaxis_points.append(2*i +1)
yaxis_legend.append(x_values[i])
# x_values = df.index.levels[0]
pyplot.figure()
pyplot.title("iTLB-load-misses")
mean_data = stats.loc[[(x) for x in x_values],'mean'].tolist()
ci95_hi_data= stats.loc[[x for x in x_values],'ci95_hi'].tolist()
ci95_lo_data= stats.loc[[x for x in x_values],'ci95_lo'].tolist()
error_data= [x1 - x2 for (x1, x2) in zip(ci95_hi_data, ci95_lo_data)]
pyplot.errorbar(mean_data,yaxis_points,xerr=error_data, fmt = 'o', color = 'k')
yaxis_points.append(yaxis_points[len(yaxis_points)-1] +1)
yaxis_points.insert(0, 0)
yaxis_legend.insert(len(yaxis_legend), '')
yaxis_legend.insert(0,'')
pyplot.yticks(yaxis_points, yaxis_legend)
pyplot.savefig(f'{sys.argv[1]}-iTLB.png')
| 24.490566
| 80
| 0.703775
| 458
| 2,596
| 3.792576
| 0.161572
| 0.11399
| 0.041451
| 0.044905
| 0.864709
| 0.864709
| 0.799079
| 0.753022
| 0.753022
| 0.753022
| 0
| 0.042516
| 0.112096
| 2,596
| 105
| 81
| 24.72381
| 0.711063
| 0.011171
| 0
| 0.794521
| 0
| 0
| 0.104483
| 0.017154
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.068493
| 0
| 0.068493
| 0.150685
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df71ccdf4fab0fcb45599cda87b48ee29207e4d5
| 35,977
|
py
|
Python
|
sdk/python/pulumi_sumologic/cse_threshold_rule.py
|
pulumi/pulumi-sumologic
|
962fa056ee4b96e61a200e7bf2308bfad723c3af
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-10-13T03:50:41.000Z
|
2021-10-13T03:50:41.000Z
|
sdk/python/pulumi_sumologic/cse_threshold_rule.py
|
pulumi/pulumi-sumologic
|
962fa056ee4b96e61a200e7bf2308bfad723c3af
|
[
"ECL-2.0",
"Apache-2.0"
] | 28
|
2021-05-21T11:00:45.000Z
|
2022-03-31T15:47:13.000Z
|
sdk/python/pulumi_sumologic/cse_threshold_rule.py
|
pulumi/pulumi-sumologic
|
962fa056ee4b96e61a200e7bf2308bfad723c3af
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['CseThresholdRuleArgs', 'CseThresholdRule']
@pulumi.input_type
class CseThresholdRuleArgs:
def __init__(__self__, *,
description: pulumi.Input[str],
enabled: pulumi.Input[bool],
entity_selectors: pulumi.Input[Sequence[pulumi.Input['CseThresholdRuleEntitySelectorArgs']]],
expression: pulumi.Input[str],
limit: pulumi.Input[int],
severity: pulumi.Input[int],
window_size: pulumi.Input[str],
count_distinct: Optional[pulumi.Input[bool]] = None,
count_field: Optional[pulumi.Input[str]] = None,
group_by_fields: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_prototype: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
summary_expression: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a CseThresholdRule resource.
:param pulumi.Input[str] description: The description of the generated Signals
:param pulumi.Input[bool] enabled: Whether the rule should generate Signals
:param pulumi.Input[Sequence[pulumi.Input['CseThresholdRuleEntitySelectorArgs']]] entity_selectors: The entities to generate Signals on
:param pulumi.Input[str] expression: The expression for which records to match on
:param pulumi.Input[int] limit: A Signal will be fired when this many records/distinct field values are matched
:param pulumi.Input[int] severity: The severity of the generated Signals
:param pulumi.Input[str] window_size: How long of a window to aggregate records for. Current acceptable values are T05M, T10M, T30M, T60M, T24H, T12H, or T05D.
:param pulumi.Input[bool] count_distinct: Whether to count distinct values of a field, as opposed to just counting the number of records
:param pulumi.Input[str] count_field: The field to count if `count_distinct` is set to true
:param pulumi.Input[Sequence[pulumi.Input[str]]] group_by_fields: A list of fields to group records by
:param pulumi.Input[bool] is_prototype: Whether the generated Signals should be prototype Signals
:param pulumi.Input[str] name: The name of the Rule and the generated Signals
:param pulumi.Input[str] summary_expression: The summary of the generated Signals
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: The tags of the generated Signals
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "entity_selectors", entity_selectors)
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "limit", limit)
pulumi.set(__self__, "severity", severity)
pulumi.set(__self__, "window_size", window_size)
if count_distinct is not None:
pulumi.set(__self__, "count_distinct", count_distinct)
if count_field is not None:
pulumi.set(__self__, "count_field", count_field)
if group_by_fields is not None:
pulumi.set(__self__, "group_by_fields", group_by_fields)
if is_prototype is not None:
pulumi.set(__self__, "is_prototype", is_prototype)
if name is not None:
pulumi.set(__self__, "name", name)
if summary_expression is not None:
pulumi.set(__self__, "summary_expression", summary_expression)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def description(self) -> pulumi.Input[str]:
"""
The description of the generated Signals
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: pulumi.Input[str]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
"""
Whether the rule should generate Signals
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="entitySelectors")
def entity_selectors(self) -> pulumi.Input[Sequence[pulumi.Input['CseThresholdRuleEntitySelectorArgs']]]:
"""
The entities to generate Signals on
"""
return pulumi.get(self, "entity_selectors")
@entity_selectors.setter
def entity_selectors(self, value: pulumi.Input[Sequence[pulumi.Input['CseThresholdRuleEntitySelectorArgs']]]):
pulumi.set(self, "entity_selectors", value)
@property
@pulumi.getter
def expression(self) -> pulumi.Input[str]:
"""
The expression for which records to match on
"""
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: pulumi.Input[str]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter
def limit(self) -> pulumi.Input[int]:
"""
A Signal will be fired when this many records/distinct field values are matched
"""
return pulumi.get(self, "limit")
@limit.setter
def limit(self, value: pulumi.Input[int]):
pulumi.set(self, "limit", value)
@property
@pulumi.getter
def severity(self) -> pulumi.Input[int]:
"""
The severity of the generated Signals
"""
return pulumi.get(self, "severity")
@severity.setter
def severity(self, value: pulumi.Input[int]):
pulumi.set(self, "severity", value)
@property
@pulumi.getter(name="windowSize")
def window_size(self) -> pulumi.Input[str]:
"""
How long of a window to aggregate records for. Current acceptable values are T05M, T10M, T30M, T60M, T24H, T12H, or T05D.
"""
return pulumi.get(self, "window_size")
@window_size.setter
def window_size(self, value: pulumi.Input[str]):
pulumi.set(self, "window_size", value)
@property
@pulumi.getter(name="countDistinct")
def count_distinct(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to count distinct values of a field, as opposed to just counting the number of records
"""
return pulumi.get(self, "count_distinct")
@count_distinct.setter
def count_distinct(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "count_distinct", value)
@property
@pulumi.getter(name="countField")
def count_field(self) -> Optional[pulumi.Input[str]]:
"""
The field to count if `count_distinct` is set to true
"""
return pulumi.get(self, "count_field")
@count_field.setter
def count_field(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "count_field", value)
@property
@pulumi.getter(name="groupByFields")
def group_by_fields(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of fields to group records by
"""
return pulumi.get(self, "group_by_fields")
@group_by_fields.setter
def group_by_fields(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "group_by_fields", value)
@property
@pulumi.getter(name="isPrototype")
def is_prototype(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the generated Signals should be prototype Signals
"""
return pulumi.get(self, "is_prototype")
@is_prototype.setter
def is_prototype(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_prototype", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Rule and the generated Signals
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="summaryExpression")
def summary_expression(self) -> Optional[pulumi.Input[str]]:
"""
The summary of the generated Signals
"""
return pulumi.get(self, "summary_expression")
@summary_expression.setter
def summary_expression(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "summary_expression", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The tags of the generated Signals
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _CseThresholdRuleState:
def __init__(__self__, *,
count_distinct: Optional[pulumi.Input[bool]] = None,
count_field: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
entity_selectors: Optional[pulumi.Input[Sequence[pulumi.Input['CseThresholdRuleEntitySelectorArgs']]]] = None,
expression: Optional[pulumi.Input[str]] = None,
group_by_fields: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_prototype: Optional[pulumi.Input[bool]] = None,
limit: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
severity: Optional[pulumi.Input[int]] = None,
summary_expression: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
window_size: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering CseThresholdRule resources.
:param pulumi.Input[bool] count_distinct: Whether to count distinct values of a field, as opposed to just counting the number of records
:param pulumi.Input[str] count_field: The field to count if `count_distinct` is set to true
:param pulumi.Input[str] description: The description of the generated Signals
:param pulumi.Input[bool] enabled: Whether the rule should generate Signals
:param pulumi.Input[Sequence[pulumi.Input['CseThresholdRuleEntitySelectorArgs']]] entity_selectors: The entities to generate Signals on
:param pulumi.Input[str] expression: The expression for which records to match on
:param pulumi.Input[Sequence[pulumi.Input[str]]] group_by_fields: A list of fields to group records by
:param pulumi.Input[bool] is_prototype: Whether the generated Signals should be prototype Signals
:param pulumi.Input[int] limit: A Signal will be fired when this many records/distinct field values are matched
:param pulumi.Input[str] name: The name of the Rule and the generated Signals
:param pulumi.Input[int] severity: The severity of the generated Signals
:param pulumi.Input[str] summary_expression: The summary of the generated Signals
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: The tags of the generated Signals
:param pulumi.Input[str] window_size: How long of a window to aggregate records for. Current acceptable values are T05M, T10M, T30M, T60M, T24H, T12H, or T05D.
"""
if count_distinct is not None:
pulumi.set(__self__, "count_distinct", count_distinct)
if count_field is not None:
pulumi.set(__self__, "count_field", count_field)
if description is not None:
pulumi.set(__self__, "description", description)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if entity_selectors is not None:
pulumi.set(__self__, "entity_selectors", entity_selectors)
if expression is not None:
pulumi.set(__self__, "expression", expression)
if group_by_fields is not None:
pulumi.set(__self__, "group_by_fields", group_by_fields)
if is_prototype is not None:
pulumi.set(__self__, "is_prototype", is_prototype)
if limit is not None:
pulumi.set(__self__, "limit", limit)
if name is not None:
pulumi.set(__self__, "name", name)
if severity is not None:
pulumi.set(__self__, "severity", severity)
if summary_expression is not None:
pulumi.set(__self__, "summary_expression", summary_expression)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if window_size is not None:
pulumi.set(__self__, "window_size", window_size)
@property
@pulumi.getter(name="countDistinct")
def count_distinct(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to count distinct values of a field, as opposed to just counting the number of records
"""
return pulumi.get(self, "count_distinct")
@count_distinct.setter
def count_distinct(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "count_distinct", value)
@property
@pulumi.getter(name="countField")
def count_field(self) -> Optional[pulumi.Input[str]]:
"""
The field to count if `count_distinct` is set to true
"""
return pulumi.get(self, "count_field")
@count_field.setter
def count_field(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "count_field", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the generated Signals
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the rule should generate Signals
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="entitySelectors")
def entity_selectors(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['CseThresholdRuleEntitySelectorArgs']]]]:
"""
The entities to generate Signals on
"""
return pulumi.get(self, "entity_selectors")
@entity_selectors.setter
def entity_selectors(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['CseThresholdRuleEntitySelectorArgs']]]]):
pulumi.set(self, "entity_selectors", value)
@property
@pulumi.getter
def expression(self) -> Optional[pulumi.Input[str]]:
"""
The expression for which records to match on
"""
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter(name="groupByFields")
def group_by_fields(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of fields to group records by
"""
return pulumi.get(self, "group_by_fields")
@group_by_fields.setter
def group_by_fields(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "group_by_fields", value)
@property
@pulumi.getter(name="isPrototype")
def is_prototype(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the generated Signals should be prototype Signals
"""
return pulumi.get(self, "is_prototype")
@is_prototype.setter
def is_prototype(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_prototype", value)
@property
@pulumi.getter
def limit(self) -> Optional[pulumi.Input[int]]:
"""
A Signal will be fired when this many records/distinct field values are matched
"""
return pulumi.get(self, "limit")
@limit.setter
def limit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "limit", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Rule and the generated Signals
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def severity(self) -> Optional[pulumi.Input[int]]:
"""
The severity of the generated Signals
"""
return pulumi.get(self, "severity")
@severity.setter
def severity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "severity", value)
@property
@pulumi.getter(name="summaryExpression")
def summary_expression(self) -> Optional[pulumi.Input[str]]:
"""
The summary of the generated Signals
"""
return pulumi.get(self, "summary_expression")
@summary_expression.setter
def summary_expression(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "summary_expression", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The tags of the generated Signals
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="windowSize")
def window_size(self) -> Optional[pulumi.Input[str]]:
"""
How long of a window to aggregate records for. Current acceptable values are T05M, T10M, T30M, T60M, T24H, T12H, or T05D.
"""
return pulumi.get(self, "window_size")
@window_size.setter
def window_size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "window_size", value)
class CseThresholdRule(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
count_distinct: Optional[pulumi.Input[bool]] = None,
count_field: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
entity_selectors: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['CseThresholdRuleEntitySelectorArgs']]]]] = None,
expression: Optional[pulumi.Input[str]] = None,
group_by_fields: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_prototype: Optional[pulumi.Input[bool]] = None,
limit: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
severity: Optional[pulumi.Input[int]] = None,
summary_expression: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
window_size: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Sumo Logic CSE [Threshold Rule](https://help.sumologic.com/Cloud_SIEM_Enterprise/CSE_Rules/05_Write_a_Threshold_Rule).
## Example Usage
```python
import pulumi
import pulumi_sumologic as sumologic
threshold_rule = sumologic.CseThresholdRule("thresholdRule",
count_distinct=True,
count_field="dstDevice_hostname",
description="Signal description",
enabled=True,
entity_selectors=[sumologic.CseThresholdRuleEntitySelectorArgs(
entity_type="_ip",
expression="srcDevice_ip",
)],
expression="objectType = \"Network\"",
group_by_fields=["dstDevice_hostname"],
is_prototype=False,
limit=1000,
severity=5,
summary_expression="Signal summary",
tags=["_mitreAttackTactic:TA0009"],
window_size="T30M")
```
## Import
Threshold Rules can be imported using the field id, e.g.hcl
```sh
$ pulumi import sumologic:index/cseThresholdRule:CseThresholdRule threshold_rule id
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] count_distinct: Whether to count distinct values of a field, as opposed to just counting the number of records
:param pulumi.Input[str] count_field: The field to count if `count_distinct` is set to true
:param pulumi.Input[str] description: The description of the generated Signals
:param pulumi.Input[bool] enabled: Whether the rule should generate Signals
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['CseThresholdRuleEntitySelectorArgs']]]] entity_selectors: The entities to generate Signals on
:param pulumi.Input[str] expression: The expression for which records to match on
:param pulumi.Input[Sequence[pulumi.Input[str]]] group_by_fields: A list of fields to group records by
:param pulumi.Input[bool] is_prototype: Whether the generated Signals should be prototype Signals
:param pulumi.Input[int] limit: A Signal will be fired when this many records/distinct field values are matched
:param pulumi.Input[str] name: The name of the Rule and the generated Signals
:param pulumi.Input[int] severity: The severity of the generated Signals
:param pulumi.Input[str] summary_expression: The summary of the generated Signals
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: The tags of the generated Signals
:param pulumi.Input[str] window_size: How long of a window to aggregate records for. Current acceptable values are T05M, T10M, T30M, T60M, T24H, T12H, or T05D.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: CseThresholdRuleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Sumo Logic CSE [Threshold Rule](https://help.sumologic.com/Cloud_SIEM_Enterprise/CSE_Rules/05_Write_a_Threshold_Rule).
## Example Usage
```python
import pulumi
import pulumi_sumologic as sumologic
threshold_rule = sumologic.CseThresholdRule("thresholdRule",
count_distinct=True,
count_field="dstDevice_hostname",
description="Signal description",
enabled=True,
entity_selectors=[sumologic.CseThresholdRuleEntitySelectorArgs(
entity_type="_ip",
expression="srcDevice_ip",
)],
expression="objectType = \"Network\"",
group_by_fields=["dstDevice_hostname"],
is_prototype=False,
limit=1000,
severity=5,
summary_expression="Signal summary",
tags=["_mitreAttackTactic:TA0009"],
window_size="T30M")
```
## Import
Threshold Rules can be imported using the field id, e.g.hcl
```sh
$ pulumi import sumologic:index/cseThresholdRule:CseThresholdRule threshold_rule id
```
:param str resource_name: The name of the resource.
:param CseThresholdRuleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(CseThresholdRuleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
count_distinct: Optional[pulumi.Input[bool]] = None,
count_field: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
entity_selectors: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['CseThresholdRuleEntitySelectorArgs']]]]] = None,
expression: Optional[pulumi.Input[str]] = None,
group_by_fields: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_prototype: Optional[pulumi.Input[bool]] = None,
limit: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
severity: Optional[pulumi.Input[int]] = None,
summary_expression: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
window_size: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = CseThresholdRuleArgs.__new__(CseThresholdRuleArgs)
__props__.__dict__["count_distinct"] = count_distinct
__props__.__dict__["count_field"] = count_field
if description is None and not opts.urn:
raise TypeError("Missing required property 'description'")
__props__.__dict__["description"] = description
if enabled is None and not opts.urn:
raise TypeError("Missing required property 'enabled'")
__props__.__dict__["enabled"] = enabled
if entity_selectors is None and not opts.urn:
raise TypeError("Missing required property 'entity_selectors'")
__props__.__dict__["entity_selectors"] = entity_selectors
if expression is None and not opts.urn:
raise TypeError("Missing required property 'expression'")
__props__.__dict__["expression"] = expression
__props__.__dict__["group_by_fields"] = group_by_fields
__props__.__dict__["is_prototype"] = is_prototype
if limit is None and not opts.urn:
raise TypeError("Missing required property 'limit'")
__props__.__dict__["limit"] = limit
__props__.__dict__["name"] = name
if severity is None and not opts.urn:
raise TypeError("Missing required property 'severity'")
__props__.__dict__["severity"] = severity
__props__.__dict__["summary_expression"] = summary_expression
__props__.__dict__["tags"] = tags
if window_size is None and not opts.urn:
raise TypeError("Missing required property 'window_size'")
__props__.__dict__["window_size"] = window_size
super(CseThresholdRule, __self__).__init__(
'sumologic:index/cseThresholdRule:CseThresholdRule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
count_distinct: Optional[pulumi.Input[bool]] = None,
count_field: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
entity_selectors: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['CseThresholdRuleEntitySelectorArgs']]]]] = None,
expression: Optional[pulumi.Input[str]] = None,
group_by_fields: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_prototype: Optional[pulumi.Input[bool]] = None,
limit: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
severity: Optional[pulumi.Input[int]] = None,
summary_expression: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
window_size: Optional[pulumi.Input[str]] = None) -> 'CseThresholdRule':
"""
Get an existing CseThresholdRule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] count_distinct: Whether to count distinct values of a field, as opposed to just counting the number of records
:param pulumi.Input[str] count_field: The field to count if `count_distinct` is set to true
:param pulumi.Input[str] description: The description of the generated Signals
:param pulumi.Input[bool] enabled: Whether the rule should generate Signals
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['CseThresholdRuleEntitySelectorArgs']]]] entity_selectors: The entities to generate Signals on
:param pulumi.Input[str] expression: The expression for which records to match on
:param pulumi.Input[Sequence[pulumi.Input[str]]] group_by_fields: A list of fields to group records by
:param pulumi.Input[bool] is_prototype: Whether the generated Signals should be prototype Signals
:param pulumi.Input[int] limit: A Signal will be fired when this many records/distinct field values are matched
:param pulumi.Input[str] name: The name of the Rule and the generated Signals
:param pulumi.Input[int] severity: The severity of the generated Signals
:param pulumi.Input[str] summary_expression: The summary of the generated Signals
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: The tags of the generated Signals
:param pulumi.Input[str] window_size: How long of a window to aggregate records for. Current acceptable values are T05M, T10M, T30M, T60M, T24H, T12H, or T05D.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _CseThresholdRuleState.__new__(_CseThresholdRuleState)
__props__.__dict__["count_distinct"] = count_distinct
__props__.__dict__["count_field"] = count_field
__props__.__dict__["description"] = description
__props__.__dict__["enabled"] = enabled
__props__.__dict__["entity_selectors"] = entity_selectors
__props__.__dict__["expression"] = expression
__props__.__dict__["group_by_fields"] = group_by_fields
__props__.__dict__["is_prototype"] = is_prototype
__props__.__dict__["limit"] = limit
__props__.__dict__["name"] = name
__props__.__dict__["severity"] = severity
__props__.__dict__["summary_expression"] = summary_expression
__props__.__dict__["tags"] = tags
__props__.__dict__["window_size"] = window_size
return CseThresholdRule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="countDistinct")
def count_distinct(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to count distinct values of a field, as opposed to just counting the number of records
"""
return pulumi.get(self, "count_distinct")
@property
@pulumi.getter(name="countField")
def count_field(self) -> pulumi.Output[Optional[str]]:
"""
The field to count if `count_distinct` is set to true
"""
return pulumi.get(self, "count_field")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
The description of the generated Signals
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[bool]:
"""
Whether the rule should generate Signals
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="entitySelectors")
def entity_selectors(self) -> pulumi.Output[Sequence['outputs.CseThresholdRuleEntitySelector']]:
"""
The entities to generate Signals on
"""
return pulumi.get(self, "entity_selectors")
@property
@pulumi.getter
def expression(self) -> pulumi.Output[str]:
"""
The expression for which records to match on
"""
return pulumi.get(self, "expression")
@property
@pulumi.getter(name="groupByFields")
def group_by_fields(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of fields to group records by
"""
return pulumi.get(self, "group_by_fields")
@property
@pulumi.getter(name="isPrototype")
def is_prototype(self) -> pulumi.Output[Optional[bool]]:
"""
Whether the generated Signals should be prototype Signals
"""
return pulumi.get(self, "is_prototype")
@property
@pulumi.getter
def limit(self) -> pulumi.Output[int]:
"""
A Signal will be fired when this many records/distinct field values are matched
"""
return pulumi.get(self, "limit")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the Rule and the generated Signals
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def severity(self) -> pulumi.Output[int]:
"""
The severity of the generated Signals
"""
return pulumi.get(self, "severity")
@property
@pulumi.getter(name="summaryExpression")
def summary_expression(self) -> pulumi.Output[Optional[str]]:
"""
The summary of the generated Signals
"""
return pulumi.get(self, "summary_expression")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The tags of the generated Signals
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="windowSize")
def window_size(self) -> pulumi.Output[str]:
"""
How long of a window to aggregate records for. Current acceptable values are T05M, T10M, T30M, T60M, T24H, T12H, or T05D.
"""
return pulumi.get(self, "window_size")
| 43.13789
| 167
| 0.645663
| 4,128
| 35,977
| 5.436531
| 0.05499
| 0.110284
| 0.066126
| 0.044114
| 0.908163
| 0.88731
| 0.861376
| 0.830675
| 0.824837
| 0.809242
| 0
| 0.004634
| 0.250271
| 35,977
| 833
| 168
| 43.189676
| 0.827383
| 0.300359
| 0
| 0.756356
| 1
| 0
| 0.105793
| 0.016991
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163136
| false
| 0.002119
| 0.014831
| 0
| 0.275424
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8016bc92a806838fbaee4c2824f17eeb199d742d
| 171
|
py
|
Python
|
winter/core/utils/__init__.py
|
maximluzinqs/winter
|
4fc6d23e60849400f148618e4a90d1d549144dcf
|
[
"MIT"
] | null | null | null |
winter/core/utils/__init__.py
|
maximluzinqs/winter
|
4fc6d23e60849400f148618e4a90d1d549144dcf
|
[
"MIT"
] | null | null | null |
winter/core/utils/__init__.py
|
maximluzinqs/winter
|
4fc6d23e60849400f148618e4a90d1d549144dcf
|
[
"MIT"
] | null | null | null |
from .cached_property import cached_property
from .nested_types import TypeWrapper
from .nested_types import has_nested_type
from .positive_integer import PositiveInteger
| 34.2
| 45
| 0.883041
| 23
| 171
| 6.26087
| 0.521739
| 0.194444
| 0.208333
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093567
| 171
| 4
| 46
| 42.75
| 0.929032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
33febde5f89ed205e8fa6af1b2fffe78c63332d5
| 317
|
py
|
Python
|
platform/core/polyaxon/tracker/events/experiment_job.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/polyaxon/tracker/events/experiment_job.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/polyaxon/tracker/events/experiment_job.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
import tracker
from events.registry import experiment_job
tracker.subscribe(experiment_job.ExperimentJobViewedEvent)
tracker.subscribe(experiment_job.ExperimentJobResourcesViewedEvent)
tracker.subscribe(experiment_job.ExperimentJobLogsViewedEvent)
tracker.subscribe(experiment_job.ExperimentJobStatusesViewedEvent)
| 35.222222
| 67
| 0.908517
| 28
| 317
| 10.107143
| 0.428571
| 0.229682
| 0.367491
| 0.409894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037855
| 317
| 8
| 68
| 39.625
| 0.927869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
1d66aaa11c2ea883fdcdc6246ab4a53c849ae4fc
| 26,936
|
py
|
Python
|
darling_ansible/python_venv/lib/python3.7/site-packages/oci/dns/dns_client_composite_operations.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | null | null | null |
darling_ansible/python_venv/lib/python3.7/site-packages/oci/dns/dns_client_composite_operations.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | null | null | null |
darling_ansible/python_venv/lib/python3.7/site-packages/oci/dns/dns_client_composite_operations.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | 1
|
2020-06-25T03:12:58.000Z
|
2020-06-25T03:12:58.000Z
|
# coding: utf-8
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
import oci # noqa: F401
from oci.util import WAIT_RESOURCE_NOT_FOUND # noqa: F401
class DnsClientCompositeOperations(object):
"""
This class provides a wrapper around :py:class:`~oci.dns.DnsClient` and offers convenience methods
for operations that would otherwise need to be chained together. For example, instead of performing an action
on a resource (e.g. launching an instance, creating a load balancer) and then using a waiter to wait for the resource
to enter a given state, you can call a single method in this class to accomplish the same functionality
"""
def __init__(self, client, **kwargs):
"""
Creates a new DnsClientCompositeOperations object
:param DnsClient client:
The service client which will be wrapped by this object
"""
self.client = client
def create_steering_policy_and_wait_for_state(self, create_steering_policy_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.create_steering_policy` and waits for the :py:class:`~oci.dns.models.SteeringPolicy` acted upon
to enter the given state(s).
:param CreateSteeringPolicyDetails create_steering_policy_details: (required)
Details for creating a new steering policy.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.SteeringPolicy.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.create_steering_policy`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_steering_policy(create_steering_policy_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_steering_policy(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_steering_policy_attachment_and_wait_for_state(self, create_steering_policy_attachment_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.create_steering_policy_attachment` and waits for the :py:class:`~oci.dns.models.SteeringPolicyAttachment` acted upon
to enter the given state(s).
:param CreateSteeringPolicyAttachmentDetails create_steering_policy_attachment_details: (required)
Details for creating a new steering policy attachment.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.SteeringPolicyAttachment.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.create_steering_policy_attachment`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_steering_policy_attachment(create_steering_policy_attachment_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_steering_policy_attachment(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_tsig_key_and_wait_for_state(self, create_tsig_key_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.create_tsig_key` and waits for the :py:class:`~oci.dns.models.TsigKey` acted upon
to enter the given state(s).
:param CreateTsigKeyDetails create_tsig_key_details: (required)
Details for creating a new TSIG key.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.TsigKey.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.create_tsig_key`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_tsig_key(create_tsig_key_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_tsig_key(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_zone_and_wait_for_state(self, create_zone_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.create_zone` and waits for the :py:class:`~oci.dns.models.Zone` acted upon
to enter the given state(s).
:param CreateZoneBaseDetails create_zone_details: (required)
Details for creating a new zone.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.Zone.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.create_zone`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_zone(create_zone_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_zone(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_steering_policy_and_wait_for_state(self, steering_policy_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.delete_steering_policy` and waits for the :py:class:`~oci.dns.models.SteeringPolicy` acted upon
to enter the given state(s).
:param str steering_policy_id: (required)
The OCID of the target steering policy.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.SteeringPolicy.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.delete_steering_policy`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_steering_policy(steering_policy_id)
operation_result = None
try:
operation_result = self.client.delete_steering_policy(steering_policy_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_steering_policy_attachment_and_wait_for_state(self, steering_policy_attachment_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.delete_steering_policy_attachment` and waits for the :py:class:`~oci.dns.models.SteeringPolicyAttachment` acted upon
to enter the given state(s).
:param str steering_policy_attachment_id: (required)
The OCID of the target steering policy attachment.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.SteeringPolicyAttachment.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.delete_steering_policy_attachment`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_steering_policy_attachment(steering_policy_attachment_id)
operation_result = None
try:
operation_result = self.client.delete_steering_policy_attachment(steering_policy_attachment_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_tsig_key_and_wait_for_state(self, tsig_key_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.delete_tsig_key` and waits for the :py:class:`~oci.dns.models.TsigKey` acted upon
to enter the given state(s).
:param str tsig_key_id: (required)
The OCID of the target TSIG key.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.TsigKey.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.delete_tsig_key`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_tsig_key(tsig_key_id)
operation_result = None
try:
operation_result = self.client.delete_tsig_key(tsig_key_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_zone_and_wait_for_state(self, zone_name_or_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.delete_zone` and waits for the :py:class:`~oci.dns.models.Zone` acted upon
to enter the given state(s).
:param str zone_name_or_id: (required)
The name or OCID of the target zone.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.Zone.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.delete_zone`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_zone(zone_name_or_id)
operation_result = None
try:
operation_result = self.client.delete_zone(zone_name_or_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_steering_policy_and_wait_for_state(self, steering_policy_id, update_steering_policy_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.update_steering_policy` and waits for the :py:class:`~oci.dns.models.SteeringPolicy` acted upon
to enter the given state(s).
:param str steering_policy_id: (required)
The OCID of the target steering policy.
:param UpdateSteeringPolicyDetails update_steering_policy_details: (required)
New data for the steering policy.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.SteeringPolicy.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.update_steering_policy`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_steering_policy(steering_policy_id, update_steering_policy_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_steering_policy(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_steering_policy_attachment_and_wait_for_state(self, steering_policy_attachment_id, update_steering_policy_attachment_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.update_steering_policy_attachment` and waits for the :py:class:`~oci.dns.models.SteeringPolicyAttachment` acted upon
to enter the given state(s).
:param str steering_policy_attachment_id: (required)
The OCID of the target steering policy attachment.
:param UpdateSteeringPolicyAttachmentDetails update_steering_policy_attachment_details: (required)
New data for the steering policy attachment.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.SteeringPolicyAttachment.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.update_steering_policy_attachment`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_steering_policy_attachment(steering_policy_attachment_id, update_steering_policy_attachment_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_steering_policy_attachment(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_tsig_key_and_wait_for_state(self, tsig_key_id, update_tsig_key_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.update_tsig_key` and waits for the :py:class:`~oci.dns.models.TsigKey` acted upon
to enter the given state(s).
:param str tsig_key_id: (required)
The OCID of the target TSIG key.
:param UpdateTsigKeyDetails update_tsig_key_details: (required)
New data for the TSIG key.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.TsigKey.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.update_tsig_key`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_tsig_key(tsig_key_id, update_tsig_key_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_tsig_key(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_zone_and_wait_for_state(self, zone_name_or_id, update_zone_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.dns.DnsClient.update_zone` and waits for the :py:class:`~oci.dns.models.Zone` acted upon
to enter the given state(s).
:param str zone_name_or_id: (required)
The name or OCID of the target zone.
:param UpdateZoneDetails update_zone_details: (required)
New data for the zone.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.dns.models.Zone.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.dns.DnsClient.update_zone`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_zone(zone_name_or_id, update_zone_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_zone(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
| 50.918715
| 245
| 0.682841
| 3,542
| 26,936
| 4.939582
| 0.060136
| 0.040409
| 0.053498
| 0.016461
| 0.931527
| 0.927126
| 0.917124
| 0.905864
| 0.894547
| 0.888146
| 0
| 0.001619
| 0.243429
| 26,936
| 528
| 246
| 51.015152
| 0.856912
| 0.441305
| 0
| 0.813278
| 0
| 0
| 0.02632
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053942
| false
| 0
| 0.008299
| 0
| 0.182573
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d82895ac7b9702219cbb8bdc4d8802f09854cf7
| 8,182
|
py
|
Python
|
tests/test_ignore_file_entry.py
|
ozyildirim/rulecheck
|
6c3a8ef79abe0e4a6c2af66e60e88794c96e78b1
|
[
"MIT"
] | 1
|
2021-01-30T17:16:51.000Z
|
2021-01-30T17:16:51.000Z
|
tests/test_ignore_file_entry.py
|
ozyildirim/rulecheck
|
6c3a8ef79abe0e4a6c2af66e60e88794c96e78b1
|
[
"MIT"
] | 23
|
2020-06-27T20:07:10.000Z
|
2020-11-28T21:09:31.000Z
|
tests/test_ignore_file_entry.py
|
ozyildirim/rulecheck
|
6c3a8ef79abe0e4a6c2af66e60e88794c96e78b1
|
[
"MIT"
] | 1
|
2021-04-26T20:04:39.000Z
|
2021-04-26T20:04:39.000Z
|
import pytest
from rulecheck.ignore import IgnoreFileEntry
from rulecheck.rule import LogType
def test_line_with_all_parts():
""" Test parsing of line with all elements """
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: ./../rulecheck/return-256.c:2:4: ERROR: example_rules.file_based_rule: Visited return-256.c")
assert entry.get_hash() == "b0b91dbc35617b55b5620613f8e79bee"
assert entry.get_file_name() == "./../rulecheck/return-256.c"
assert entry.get_line_num() == 2
assert entry.get_col_num() == 4
assert entry.get_rule_name() == "example_rules.file_based_rule"
assert entry.get_log_level() == LogType.ERROR
assert entry.get_message() == "Visited return-256.c"
assert entry.is_valid()
def test_line_with_no_col_num():
""" Test parsing of line with all elements except col number """
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: ./../rulecheck/return-256.c:2: ERROR: example_rules.file_based_rule: Visited return-256.c")
assert entry.get_hash() == "b0b91dbc35617b55b5620613f8e79bee"
assert entry.get_file_name() == "./../rulecheck/return-256.c"
assert entry.get_line_num() == 2
assert entry.get_col_num() == -1
assert entry.get_rule_name() == "example_rules.file_based_rule"
assert entry.get_log_level() == LogType.ERROR
assert entry.get_message() == "Visited return-256.c"
assert entry.is_valid()
def test_line_with_no_line_num():
""" Test parsing of line with all elements except line and col numbers"""
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: ./../rulecheck/return-256.c: ERROR: example_rules.file_based_rule: Visited return-256.c")
assert entry.get_hash() == "b0b91dbc35617b55b5620613f8e79bee"
assert entry.get_file_name() == "./../rulecheck/return-256.c"
assert entry.get_line_num() == -1
assert entry.get_col_num() == -1
assert entry.get_rule_name() == "example_rules.file_based_rule"
assert entry.get_log_level() == LogType.ERROR
assert entry.get_message() == "Visited return-256.c"
assert entry.is_valid()
def test_line_with_all_parts_and_warning_level():
""" Test parsing of line with all elements and WARNING level """
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: ./../rulecheck/return-256.c:2:4: WARNING: example_rules.file_based_rule: Visited return-256.c")
assert entry.get_hash() == "b0b91dbc35617b55b5620613f8e79bee"
assert entry.get_file_name() == "./../rulecheck/return-256.c"
assert entry.get_line_num() == 2
assert entry.get_col_num() == 4
assert entry.get_rule_name() == "example_rules.file_based_rule"
assert entry.get_log_level() == LogType.WARNING
assert entry.get_message() == "Visited return-256.c"
assert entry.is_valid()
def test_line_with_rule_level_error():
""" Test parsing of line resulting from a rule-level error """
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: rulecheck: ERROR: example_rules.file_based_rule: Rule threw exception")
assert entry.get_hash() == "b0b91dbc35617b55b5620613f8e79bee"
assert entry.get_file_name() == "rulecheck"
assert entry.get_line_num() == -1
assert entry.get_col_num() == -1
assert entry.get_rule_name() == "example_rules.file_based_rule"
assert entry.get_log_level() == LogType.ERROR
assert entry.get_message() == "Rule threw exception"
assert entry.is_valid()
def test_line_with_colons_in_message():
""" Test parsing of line where the message field has ':' character(s) """
# Entry has all possible fields and a message with ':'
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: ./../rulecheck/return-256.c:2:4: ERROR: example_rules.file_based_rule: Visited a file: return-256.c")
assert entry.get_hash() == "b0b91dbc35617b55b5620613f8e79bee"
assert entry.get_file_name() == "./../rulecheck/return-256.c"
assert entry.get_line_num() == 2
assert entry.get_col_num() == 4
assert entry.get_rule_name() == "example_rules.file_based_rule"
assert entry.get_log_level() == LogType.ERROR
assert entry.get_message() == "Visited a file: return-256.c"
assert entry.is_valid()
# Entry has fewer than the maximum number of fields and has a message with ':'
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: rulecheck: ERROR: example_rules.file_based_rule: Rule threw exception:KeyError")
assert entry.get_hash() == "b0b91dbc35617b55b5620613f8e79bee"
assert entry.get_file_name() == "rulecheck"
assert entry.get_line_num() == -1
assert entry.get_col_num() == -1
assert entry.get_rule_name() == "example_rules.file_based_rule"
assert entry.get_log_level() == LogType.ERROR
assert entry.get_message() == "Rule threw exception:KeyError"
assert entry.is_valid()
def test_line_with_colons_in_filename():
""" Test parsing of line where the filename field has ':' character """
# Entry has all possible fields and a filename with ':'
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: C:/project/rulecheck/return-256.c:2:4: ERROR: example_rules.file_based_rule: Visited a file: return-256.c")
assert entry.get_hash() == "b0b91dbc35617b55b5620613f8e79bee"
assert entry.get_file_name() == "C:/project/rulecheck/return-256.c"
assert entry.get_line_num() == 2
assert entry.get_col_num() == 4
assert entry.get_rule_name() == "example_rules.file_based_rule"
assert entry.get_log_level() == LogType.ERROR
assert entry.get_message() == "Visited a file: return-256.c"
assert entry.is_valid()
# Entry has all possible fields except col number and has a filename with ':'
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: C:/project/rulecheck/return-256.c:2: ERROR: example_rules.file_based_rule: Visited a file: return-256.c")
assert entry.get_hash() == "b0b91dbc35617b55b5620613f8e79bee"
assert entry.get_file_name() == "C:/project/rulecheck/return-256.c"
assert entry.get_line_num() == 2
assert entry.get_col_num() == -1
assert entry.get_rule_name() == "example_rules.file_based_rule"
assert entry.get_log_level() == LogType.ERROR
assert entry.get_message() == "Visited a file: return-256.c"
assert entry.is_valid()
# Entry has all possible fields except col and line number and has a filename with ':'
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: C:/project/rulecheck/return-256.c: ERROR: example_rules.file_based_rule: Visited a file: return-256.c")
assert entry.get_hash() == "b0b91dbc35617b55b5620613f8e79bee"
assert entry.get_file_name() == "C:/project/rulecheck/return-256.c"
assert entry.get_line_num() == -1
assert entry.get_col_num() == -1
assert entry.get_rule_name() == "example_rules.file_based_rule"
assert entry.get_log_level() == LogType.ERROR
assert entry.get_message() == "Visited a file: return-256.c"
assert entry.is_valid()
def test_lines_with_bad_hashes():
""" Test parsing of line with invalid hash values """
# Too short
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79be: rulecheck: ERROR: example_rules.file_based_rule: Rule threw exception")
assert not entry.is_valid()
# Too long
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79beea: ./../rulecheck/return-256.c:2:4: ERROR: example_rules.file_based_rule: Visited return-256.c")
assert not entry.is_valid()
# Invalid character (3rd)
entry = IgnoreFileEntry("b0+91dbc35617b55b5620613f8e79bee: ./../rulecheck/return-256.c:2: ERROR: example_rules.file_based_rule: Visited return-256.c")
assert not entry.is_valid()
def test_lines_with_bad_log_level():
""" Test parsing of line with invalid log level """
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: ./../rulecheck/return-256.c:2: ERRORA: example_rules.file_based_rule: Visited return-256.c")
assert not entry.is_valid()
def test_lines_missing_too_many_fields():
""" Test parsing of line with invalid column number """
entry = IgnoreFileEntry("b0b91dbc35617b55b5620613f8e79bee: ERROR :example_rules.file_based_rule: Visited return-256.c")
assert not entry.is_valid()
| 50.196319
| 170
| 0.73405
| 1,094
| 8,182
| 5.252285
| 0.078611
| 0.15315
| 0.170553
| 0.077967
| 0.892099
| 0.889488
| 0.856248
| 0.822137
| 0.809781
| 0.767316
| 0
| 0.093263
| 0.145563
| 8,182
| 162
| 171
| 50.506173
| 0.728651
| 0.114886
| 0
| 0.725664
| 0
| 0.097345
| 0.407722
| 0.298857
| 0
| 0
| 0
| 0
| 0.752212
| 1
| 0.088496
| false
| 0
| 0.026549
| 0
| 0.115044
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1d86ac153d56fa1300d3eaf394ffd22a42c7baf8
| 6,234
|
py
|
Python
|
components/migrations/0001_initial.py
|
cfarm/wagtail-design-system
|
529ae1498f8434adca3aa6ceebb7c7939efba76a
|
[
"CC0-1.0"
] | 1
|
2019-05-30T16:19:14.000Z
|
2019-05-30T16:19:14.000Z
|
components/migrations/0001_initial.py
|
cfarm/wagtail-design-system
|
529ae1498f8434adca3aa6ceebb7c7939efba76a
|
[
"CC0-1.0"
] | 2
|
2019-06-04T20:30:51.000Z
|
2019-06-07T18:26:05.000Z
|
components/migrations/0001_initial.py
|
cfarm/wagtail-design-system
|
529ae1498f8434adca3aa6ceebb7c7939efba76a
|
[
"CC0-1.0"
] | 3
|
2019-05-17T14:41:40.000Z
|
2019-06-04T16:00:01.000Z
|
# Generated by Django 2.2.1 on 2019-05-15 16:13
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0041_group_collection_permissions_verbose_name_plural'),
('wagtailimages', '0001_squashed_0021'),
]
operations = [
migrations.CreateModel(
name='ComponentIndexPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'verbose_name': 'Component Index Page',
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='ComponentPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('h1', models.CharField(blank=True, help_text='If you want the main heading on the page to say something other than the above title field, enter it here.', max_length=100)),
('overview', wagtail.core.fields.StreamField([('rich_text', wagtail.core.blocks.RichTextBlock(icon='pilcrow', label='Rich Text')), ('code', wagtail.core.blocks.StructBlock([('language', wagtail.core.blocks.ChoiceBlock(choices=[('bash', 'Bash/Shell'), ('css', 'CSS'), ('diff', 'diff'), ('html', 'HTML'), ('javascript', 'Javascript'), ('json', 'JSON'), ('python', 'Python'), ('scss', 'SCSS'), ('yaml', 'YAML')], help_text='Coding language', identifier='language', label='Language')), ('code', wagtail.core.blocks.TextBlock(identifier='code', label='Code'))], help_text='For displaying code to the reader of a page.', icon='code')), ('raw_html', wagtail.core.blocks.RawHTMLBlock(help_text='HTML that will be rendered on the page. Use as a temporary solution or last resort for something fancy that has not been enabled in a proper Wagtail block yet.', icon='placeholder', label='Raw HTML'))], blank=True)),
('usage', wagtail.core.fields.StreamField([('rich_text', wagtail.core.blocks.RichTextBlock(icon='pilcrow', label='Rich Text')), ('code', wagtail.core.blocks.StructBlock([('language', wagtail.core.blocks.ChoiceBlock(choices=[('bash', 'Bash/Shell'), ('css', 'CSS'), ('diff', 'diff'), ('html', 'HTML'), ('javascript', 'Javascript'), ('json', 'JSON'), ('python', 'Python'), ('scss', 'SCSS'), ('yaml', 'YAML')], help_text='Coding language', identifier='language', label='Language')), ('code', wagtail.core.blocks.TextBlock(identifier='code', label='Code'))], help_text='For displaying code to the reader of a page.', icon='code')), ('raw_html', wagtail.core.blocks.RawHTMLBlock(help_text='HTML that will be rendered on the page. Use as a temporary solution or last resort for something fancy that has not been enabled in a proper Wagtail block yet.', icon='placeholder', label='Raw HTML'))], blank=True)),
('design', wagtail.core.fields.StreamField([('rich_text', wagtail.core.blocks.RichTextBlock(icon='pilcrow', label='Rich Text')), ('code', wagtail.core.blocks.StructBlock([('language', wagtail.core.blocks.ChoiceBlock(choices=[('bash', 'Bash/Shell'), ('css', 'CSS'), ('diff', 'diff'), ('html', 'HTML'), ('javascript', 'Javascript'), ('json', 'JSON'), ('python', 'Python'), ('scss', 'SCSS'), ('yaml', 'YAML')], help_text='Coding language', identifier='language', label='Language')), ('code', wagtail.core.blocks.TextBlock(identifier='code', label='Code'))], help_text='For displaying code to the reader of a page.', icon='code')), ('raw_html', wagtail.core.blocks.RawHTMLBlock(help_text='HTML that will be rendered on the page. Use as a temporary solution or last resort for something fancy that has not been enabled in a proper Wagtail block yet.', icon='placeholder', label='Raw HTML'))], blank=True)),
('code', wagtail.core.fields.StreamField([('rich_text', wagtail.core.blocks.RichTextBlock(icon='pilcrow', label='Rich Text')), ('code', wagtail.core.blocks.StructBlock([('language', wagtail.core.blocks.ChoiceBlock(choices=[('bash', 'Bash/Shell'), ('css', 'CSS'), ('diff', 'diff'), ('html', 'HTML'), ('javascript', 'Javascript'), ('json', 'JSON'), ('python', 'Python'), ('scss', 'SCSS'), ('yaml', 'YAML')], help_text='Coding language', identifier='language', label='Language')), ('code', wagtail.core.blocks.TextBlock(identifier='code', label='Code'))], help_text='For displaying code to the reader of a page.', icon='code')), ('raw_html', wagtail.core.blocks.RawHTMLBlock(help_text='HTML that will be rendered on the page. Use as a temporary solution or last resort for something fancy that has not been enabled in a proper Wagtail block yet.', icon='placeholder', label='Raw HTML'))], blank=True)),
('accessibility', wagtail.core.fields.StreamField([('rich_text', wagtail.core.blocks.RichTextBlock(icon='pilcrow', label='Rich Text')), ('code', wagtail.core.blocks.StructBlock([('language', wagtail.core.blocks.ChoiceBlock(choices=[('bash', 'Bash/Shell'), ('css', 'CSS'), ('diff', 'diff'), ('html', 'HTML'), ('javascript', 'Javascript'), ('json', 'JSON'), ('python', 'Python'), ('scss', 'SCSS'), ('yaml', 'YAML')], help_text='Coding language', identifier='language', label='Language')), ('code', wagtail.core.blocks.TextBlock(identifier='code', label='Code'))], help_text='For displaying code to the reader of a page.', icon='code')), ('raw_html', wagtail.core.blocks.RawHTMLBlock(help_text='HTML that will be rendered on the page. Use as a temporary solution or last resort for something fancy that has not been enabled in a proper Wagtail block yet.', icon='placeholder', label='Raw HTML'))], blank=True)),
('thumbnail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| 132.638298
| 924
| 0.6718
| 782
| 6,234
| 5.292839
| 0.18798
| 0.085045
| 0.106789
| 0.050737
| 0.827495
| 0.827495
| 0.827495
| 0.81928
| 0.81928
| 0.81928
| 0
| 0.005791
| 0.141322
| 6,234
| 46
| 925
| 135.521739
| 0.76742
| 0.007218
| 0
| 0.358974
| 1
| 0.153846
| 0.39583
| 0.008566
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.102564
| 0
| 0.205128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1d95a44139d71cdf1751f37ba44b567831d1e48f
| 14,472
|
py
|
Python
|
manual_tests/manual_test_intervals.py
|
virocon-organization/viroconcom
|
186d768a7f39788b827173467febb038044199c7
|
[
"MIT"
] | 7
|
2018-08-03T17:43:45.000Z
|
2021-03-07T16:51:37.000Z
|
manual_tests/manual_test_intervals.py
|
virocon-organization/viroconcom
|
186d768a7f39788b827173467febb038044199c7
|
[
"MIT"
] | 65
|
2018-10-09T16:22:14.000Z
|
2021-05-21T15:06:26.000Z
|
manual_tests/manual_test_intervals.py
|
virocon-organization/viroconcom
|
186d768a7f39788b827173467febb038044199c7
|
[
"MIT"
] | 7
|
2018-10-16T19:23:26.000Z
|
2021-03-16T11:50:22.000Z
|
import numpy as np
from virocon import (
WidthOfIntervalSlicer,
NumberOfIntervalsSlicer,
PointsPerIntervalSlicer,
)
test_data = np.array([1.2, 1.5, 2.4, 2.5, 2.6, 3.1, 3.5, 3.6, 4.0, 5.0])
# %% test PointsPerIntervalSlicer()
# n_points= 2
ref_intervals = [[1.2, 1.5], [2.4, 2.5], [2.6, 3.1], [3.5, 3.6], [4.0, 5.0]]
ref_references = [np.median(inter) for inter in ref_intervals]
ref_boundaries = [[1.2, 1.95], [1.95, 2.55], [2.55, 3.3], [3.3, 3.8], [3.8, 5.0]]
n_points_slicer = PointsPerIntervalSlicer(2, min_n_points=1)
my_slices, my_references, my_boundaries = n_points_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# n_points= 3, last_full=True
ref_intervals = [[1.2], [1.5, 2.4, 2.5], [2.6, 3.1, 3.5], [3.6, 4.0, 5.0]]
ref_references = [np.median(inter) for inter in ref_intervals]
ref_boundaries = [[1.2, 1.35], [1.35, 2.55], [2.55, 3.55], [3.55, 5.0]]
n_points_slicer = PointsPerIntervalSlicer(3, last_full=True, min_n_points=1)
my_slices, my_references, my_boundaries = n_points_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# n_points= 3, last_full=False
ref_intervals = [[1.2, 1.5, 2.4], [2.5, 2.6, 3.1], [3.5, 3.6, 4.0], [5.0]]
ref_references = [np.median(inter) for inter in ref_intervals]
ref_boundaries = [[1.2, 2.45], [2.45, 3.3], [3.3, 4.5], [4.5, 5.0]]
n_points_slicer = PointsPerIntervalSlicer(3, last_full=False, min_n_points=1)
my_slices, my_references, my_boundaries = n_points_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# n_points= 3, last_full=True, min_n_points=3
ref_intervals = [[1.5, 2.4, 2.5], [2.6, 3.1, 3.5], [3.6, 4.0, 5.0]]
ref_references = [np.median(inter) for inter in ref_intervals]
ref_boundaries = [[1.5, 2.55], [2.55, 3.55], [3.55, 5.0]]
n_points_slicer = PointsPerIntervalSlicer(3, last_full=True, min_n_points=3)
my_slices, my_references, my_boundaries = n_points_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# n_points= 3, last_full=True, reference=np.mean
ref_intervals = [[1.2], [1.5, 2.4, 2.5], [2.6, 3.1, 3.5], [3.6, 4.0, 5.0]]
ref_references = [np.mean(inter) for inter in ref_intervals]
ref_boundaries = [[1.2, 1.35], [1.35, 2.55], [2.55, 3.55], [3.55, 5.0]]
n_points_slicer = PointsPerIntervalSlicer(3, reference=np.mean, min_n_points=1)
my_slices, my_references, my_boundaries = n_points_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# %% test NumberOfIntervalsSlicer()
# n=2 include_max=True
ref_intervals = [[1.2, 1.5, 2.4, 2.5, 2.6], [3.1, 3.5, 3.6, 4.0, 5.0]]
ref_references = [2.15, 4.05]
ref_boundaries = [[1.2, 3.1], [3.1, 5.0]]
number_slicer = NumberOfIntervalsSlicer(2, min_n_points=1)
my_slices, my_references, my_boundaries = number_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_almost_equal(my_boundaries[i], ref_boundaries[i])
# n=2, include_max=False
ref_intervals = [[1.2, 1.5, 2.4, 2.5, 2.6], [3.1, 3.5, 3.6, 4.0]]
ref_references = [2.15, 4.05]
ref_boundaries = [[1.2, 3.1], [3.1, 5.0]]
number_slicer = NumberOfIntervalsSlicer(2, include_max=False, min_n_points=1)
my_slices, my_references, my_boundaries = number_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_almost_equal(my_boundaries[i], ref_boundaries[i])
# n=3 include_max=True
ref_intervals = [[1.2, 1.5, 2.4], [2.5, 2.6, 3.1, 3.5, 3.6], [4.0, 5.0]]
ref_width = (5 - 1.2) / 3
ref_references = [1.2 + ref_width / 2, 1.2 + 3 * ref_width / 2, 1.2 + 5 * ref_width / 2]
ref_boundaries = [(c - ref_width / 2, c + ref_width / 2) for c in ref_references]
number_slicer = NumberOfIntervalsSlicer(3, min_n_points=1)
my_slices, my_references, my_boundaries = number_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_almost_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_almost_equal(my_boundaries[i], ref_boundaries[i])
# n=3 include_max=True, min_n_points=3, min_n_intervals=2
ref_intervals = [[1.2, 1.5, 2.4], [2.5, 2.6, 3.1, 3.5, 3.6]]
ref_width = (5 - 1.2) / 3
ref_references = [1.2 + ref_width / 2, 1.2 + 3 * ref_width / 2]
ref_boundaries = [(c - ref_width / 2, c + ref_width / 2) for c in ref_references]
number_slicer = NumberOfIntervalsSlicer(3, min_n_points=3, min_n_intervals=2)
my_slices, my_references, my_boundaries = number_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_almost_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_almost_equal(my_boundaries[i], ref_boundaries[i])
# n=3 include_max=False
ref_intervals = [[1.2, 1.5, 2.4], [2.5, 2.6, 3.1, 3.5, 3.6], [4.0]]
ref_width = (5 - 1.2) / 3
ref_references = [1.2 + ref_width / 2, 1.2 + 3 * ref_width / 2, 1.2 + 5 * ref_width / 2]
ref_boundaries = [(c - ref_width / 2, c + ref_width / 2) for c in ref_references]
number_slicer = NumberOfIntervalsSlicer(3, include_max=False, min_n_points=1)
my_slices, my_references, my_boundaries = number_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_almost_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_almost_equal(my_boundaries[i], ref_boundaries[i])
# n=2 include_max=True, value_range=(0, 5)
ref_intervals = [[1.2, 1.5, 2.4], [2.5, 2.6, 3.1, 3.5, 3.6, 4.0, 5.0]]
ref_references = [1.25, 3.75]
ref_boundaries = [[0, 2.5], [2.5, 5.0]]
number_slicer = NumberOfIntervalsSlicer(2, value_range=(0, 5), min_n_points=1)
my_slices, my_references, my_boundaries = number_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# n=2 include_max=True, reference=np.median
ref_intervals = [[1.2, 1.5, 2.4, 2.5, 2.6], [3.1, 3.5, 3.6, 4.0, 5.0]]
ref_references = [2.4, 3.6]
ref_boundaries = [[1.2, 3.1], [3.1, 5.0]]
number_slicer = NumberOfIntervalsSlicer(2, reference=np.median, min_n_points=1)
my_slices, my_references, my_boundaries = number_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_almost_equal(my_boundaries[i], ref_boundaries[i])
# n=3 include_max=True, min_n_points=3, min_n_intervals=3
ref_intervals = [[1.2, 1.5, 2.4], [2.5, 2.6, 3.1, 3.5, 3.6]]
ref_width = (5 - 1.2) / 3
ref_references = [1.2 + ref_width / 2, 1.2 + 3 * ref_width / 2]
ref_boundaries = [(c - ref_width / 2, c + ref_width / 2) for c in ref_references]
number_slicer = NumberOfIntervalsSlicer(3, min_n_points=3, min_n_intervals=3)
try:
my_slices, my_references, my_boundaries = number_slicer.slice_(test_data)
except RuntimeError:
pass # we expect a RuntimeError if there are less than min_n_intervals intervals
# %% test WidthOfIntervalSlicer()
# reference="left", right_open,
ref_references = [1, 2, 3, 4, 5]
ref_intervals = [[1.2], [1.5, 2.4], [2.5, 2.6, 3.1], [3.5, 3.6, 4.0], [5.0]]
ref_boundaries = [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5], [3.5, 4.5], [4.5, 5.5]]
width_slicer = WidthOfIntervalSlicer(width=1, reference="left", min_n_points=1)
my_slices, my_references, my_boundaries = width_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# reference="left", left_open
ref_references = [1, 2, 3, 4, 5]
ref_intervals = [[1.2, 1.5], [2.4, 2.5], [2.6, 3.1, 3.5], [3.6, 4.0], [5.0]]
ref_boundaries = [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5], [3.5, 4.5], [4.5, 5.5]]
width_slicer = WidthOfIntervalSlicer(
width=1, reference="left", right_open=False, min_n_points=1
)
my_slices, my_references, my_boundaries = width_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# reference="center", right_open
ref_references = [1.5, 2.5, 3.5, 4.5, 5.5]
ref_intervals = [[1.2, 1.5], [2.4, 2.5, 2.6], [3.1, 3.5, 3.6], [4.0], [5.0]]
ref_boundaries = [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6]]
width_slicer = WidthOfIntervalSlicer(width=1, min_n_points=1)
my_slices, my_references, my_boundaries = width_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# reference="center", left_open
ref_references = [1.5, 2.5, 3.5, 4.5]
ref_intervals = [[1.2, 1.5], [2.4, 2.5, 2.6], [3.1, 3.5, 3.6, 4.0], [5.0]]
ref_boundaries = [[1, 2], [2, 3], [3, 4], [4, 5]]
width_slicer = WidthOfIntervalSlicer(width=1, right_open=False, min_n_points=1)
my_slices, my_references, my_boundaries = width_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# reference=median, right_open
ref_intervals = [[1.2], [1.5, 2.4], [2.5, 2.6, 3.1], [3.5, 3.6, 4.0], [5.0]]
ref_references = [np.median(x) for x in ref_intervals]
ref_boundaries = [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5], [3.5, 4.5], [4.5, 5.5]]
width_slicer = WidthOfIntervalSlicer(width=1, reference=np.median, min_n_points=1)
my_slices, my_references, my_boundaries = width_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
# reference="left", right_open, min_n_points=2
ref_references = [2, 3, 4]
ref_intervals = [[1.5, 2.4], [2.5, 2.6, 3.1], [3.5, 3.6, 4.0]]
ref_boundaries = [[1.5, 2.5], [2.5, 3.5], [3.5, 4.5]]
width_slicer = WidthOfIntervalSlicer(width=1, reference="left", min_n_points=2)
my_slices, my_references, my_boundaries = width_slicer.slice_(test_data)
my_intervals = [test_data[slice_] for slice_ in my_slices]
np.testing.assert_array_equal(my_references, ref_references)
assert len(my_intervals) == len(ref_intervals)
for i in range(len(my_intervals)):
np.testing.assert_array_equal(my_intervals[i], ref_intervals[i])
for i in range(len(my_boundaries)):
np.testing.assert_array_equal(my_boundaries[i], ref_boundaries[i])
| 49.561644
| 88
| 0.733347
| 2,627
| 14,472
| 3.773887
| 0.030072
| 0.079887
| 0.081703
| 0.090781
| 0.951684
| 0.942001
| 0.934335
| 0.930401
| 0.928888
| 0.924148
| 0
| 0.060557
| 0.108831
| 14,472
| 291
| 89
| 49.731959
| 0.708149
| 0.055832
| 0
| 0.763713
| 0
| 0
| 0.00088
| 0
| 0
| 0
| 0
| 0
| 0.303797
| 1
| 0
| false
| 0.004219
| 0.008439
| 0
| 0.008439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1da0d37d4a8dd1053cdc3b0ffe5d4fc99117b821
| 14,691
|
py
|
Python
|
asv_bench/benchmarks/join_merge.py
|
springcoil/pandas
|
945075ad78cef652039feb50d60092b0580604e6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | 2
|
2017-05-07T02:08:12.000Z
|
2017-12-14T01:47:47.000Z
|
asv_bench/benchmarks/join_merge.py
|
springcoil/pandas
|
945075ad78cef652039feb50d60092b0580604e6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
asv_bench/benchmarks/join_merge.py
|
springcoil/pandas
|
945075ad78cef652039feb50d60092b0580604e6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2020-12-09T12:02:39.000Z
|
2020-12-09T12:02:39.000Z
|
from .pandas_vb_common import *
class append_frame_single_homogenous(object):
goal_time = 0.2
def setup(self):
self.df1 = pd.DataFrame(np.random.randn(10000, 4), columns=['A', 'B', 'C', 'D'])
self.df2 = self.df1.copy()
self.df2.index = np.arange(10000, 20000)
self.mdf1 = self.df1.copy()
self.mdf1['obj1'] = 'bar'
self.mdf1['obj2'] = 'bar'
self.mdf1['int1'] = 5
try:
self.mdf1.consolidate(inplace=True)
except:
pass
self.mdf2 = self.mdf1.copy()
self.mdf2.index = self.df2.index
def time_append_frame_single_homogenous(self):
self.df1.append(self.df2)
class append_frame_single_mixed(object):
goal_time = 0.2
def setup(self):
self.df1 = pd.DataFrame(np.random.randn(10000, 4), columns=['A', 'B', 'C', 'D'])
self.df2 = self.df1.copy()
self.df2.index = np.arange(10000, 20000)
self.mdf1 = self.df1.copy()
self.mdf1['obj1'] = 'bar'
self.mdf1['obj2'] = 'bar'
self.mdf1['int1'] = 5
try:
self.mdf1.consolidate(inplace=True)
except:
pass
self.mdf2 = self.mdf1.copy()
self.mdf2.index = self.df2.index
def time_append_frame_single_mixed(self):
self.mdf1.append(self.mdf2)
class concat_empty_frames1(object):
goal_time = 0.2
def setup(self):
self.df = pd.DataFrame(dict(A=range(10000)), index=date_range('20130101', periods=10000, freq='s'))
self.empty = pd.DataFrame()
def time_concat_empty_frames1(self):
concat([self.df, self.empty])
class concat_empty_frames2(object):
goal_time = 0.2
def setup(self):
self.df = pd.DataFrame(dict(A=range(10000)), index=date_range('20130101', periods=10000, freq='s'))
self.empty = pd.DataFrame()
def time_concat_empty_frames2(self):
concat([self.empty, self.df])
class concat_series_axis1(object):
goal_time = 0.2
def setup(self):
self.n = 1000
self.indices = tm.makeStringIndex(1000)
self.s = Series(self.n, index=self.indices)
self.pieces = [self.s[i:(- i)] for i in range(1, 10)]
self.pieces = (self.pieces * 50)
def time_concat_series_axis1(self):
concat(self.pieces, axis=1)
class concat_small_frames(object):
goal_time = 0.2
def setup(self):
self.df = pd.DataFrame(randn(5, 4))
def time_concat_small_frames(self):
concat(([self.df] * 1000))
class i8merge(object):
goal_time = 0.2
def setup(self):
(low, high, n) = (((-1) << 10), (1 << 10), (1 << 20))
self.left = pd.DataFrame(np.random.randint(low, high, (n, 7)), columns=list('ABCDEFG'))
self.left['left'] = self.left.sum(axis=1)
self.i = np.random.permutation(len(self.left))
self.right = self.left.iloc[self.i].copy()
self.right.columns = (self.right.columns[:(-1)].tolist() + ['right'])
self.right.index = np.arange(len(self.right))
self.right['right'] *= (-1)
def time_i8merge(self):
merge(self.left, self.right, how='outer')
class join_dataframe_index_multi(object):
goal_time = 0.2
def setup(self):
self.level1 = tm.makeStringIndex(10).values
self.level2 = tm.makeStringIndex(1000).values
self.label1 = np.arange(10).repeat(1000)
self.label2 = np.tile(np.arange(1000), 10)
self.key1 = np.tile(self.level1.take(self.label1), 10)
self.key2 = np.tile(self.level2.take(self.label2), 10)
self.shuf = np.arange(100000)
random.shuffle(self.shuf)
try:
self.index2 = MultiIndex(levels=[self.level1, self.level2], labels=[self.label1, self.label2])
self.index3 = MultiIndex(levels=[np.arange(10), np.arange(100), np.arange(100)], labels=[np.arange(10).repeat(10000), np.tile(np.arange(100).repeat(100), 10), np.tile(np.tile(np.arange(100), 100), 10)])
self.df_multi = DataFrame(np.random.randn(len(self.index2), 4), index=self.index2, columns=['A', 'B', 'C', 'D'])
except:
pass
try:
self.DataFrame = DataMatrix
except:
pass
self.df = pd.DataFrame({'data1': np.random.randn(100000), 'data2': np.random.randn(100000), 'key1': self.key1, 'key2': self.key2, })
self.df_key1 = pd.DataFrame(np.random.randn(len(self.level1), 4), index=self.level1, columns=['A', 'B', 'C', 'D'])
self.df_key2 = pd.DataFrame(np.random.randn(len(self.level2), 4), index=self.level2, columns=['A', 'B', 'C', 'D'])
self.df_shuf = self.df.reindex(self.df.index[self.shuf])
def time_join_dataframe_index_multi(self):
self.df.join(self.df_multi, on=['key1', 'key2'])
class join_dataframe_index_single_key_bigger(object):
goal_time = 0.2
def setup(self):
self.level1 = tm.makeStringIndex(10).values
self.level2 = tm.makeStringIndex(1000).values
self.label1 = np.arange(10).repeat(1000)
self.label2 = np.tile(np.arange(1000), 10)
self.key1 = np.tile(self.level1.take(self.label1), 10)
self.key2 = np.tile(self.level2.take(self.label2), 10)
self.shuf = np.arange(100000)
random.shuffle(self.shuf)
try:
self.index2 = MultiIndex(levels=[self.level1, self.level2], labels=[self.label1, self.label2])
self.index3 = MultiIndex(levels=[np.arange(10), np.arange(100), np.arange(100)], labels=[np.arange(10).repeat(10000), np.tile(np.arange(100).repeat(100), 10), np.tile(np.tile(np.arange(100), 100), 10)])
self.df_multi = DataFrame(np.random.randn(len(self.index2), 4), index=self.index2, columns=['A', 'B', 'C', 'D'])
except:
pass
try:
self.DataFrame = DataMatrix
except:
pass
self.df = pd.DataFrame({'data1': np.random.randn(100000), 'data2': np.random.randn(100000), 'key1': self.key1, 'key2': self.key2, })
self.df_key1 = pd.DataFrame(np.random.randn(len(self.level1), 4), index=self.level1, columns=['A', 'B', 'C', 'D'])
self.df_key2 = pd.DataFrame(np.random.randn(len(self.level2), 4), index=self.level2, columns=['A', 'B', 'C', 'D'])
self.df_shuf = self.df.reindex(self.df.index[self.shuf])
def time_join_dataframe_index_single_key_bigger(self):
self.df.join(self.df_key2, on='key2')
class join_dataframe_index_single_key_bigger_sort(object):
goal_time = 0.2
def setup(self):
self.level1 = tm.makeStringIndex(10).values
self.level2 = tm.makeStringIndex(1000).values
self.label1 = np.arange(10).repeat(1000)
self.label2 = np.tile(np.arange(1000), 10)
self.key1 = np.tile(self.level1.take(self.label1), 10)
self.key2 = np.tile(self.level2.take(self.label2), 10)
self.shuf = np.arange(100000)
random.shuffle(self.shuf)
try:
self.index2 = MultiIndex(levels=[self.level1, self.level2], labels=[self.label1, self.label2])
self.index3 = MultiIndex(levels=[np.arange(10), np.arange(100), np.arange(100)], labels=[np.arange(10).repeat(10000), np.tile(np.arange(100).repeat(100), 10), np.tile(np.tile(np.arange(100), 100), 10)])
self.df_multi = DataFrame(np.random.randn(len(self.index2), 4), index=self.index2, columns=['A', 'B', 'C', 'D'])
except:
pass
try:
self.DataFrame = DataMatrix
except:
pass
self.df = pd.DataFrame({'data1': np.random.randn(100000), 'data2': np.random.randn(100000), 'key1': self.key1, 'key2': self.key2, })
self.df_key1 = pd.DataFrame(np.random.randn(len(self.level1), 4), index=self.level1, columns=['A', 'B', 'C', 'D'])
self.df_key2 = pd.DataFrame(np.random.randn(len(self.level2), 4), index=self.level2, columns=['A', 'B', 'C', 'D'])
self.df_shuf = self.df.reindex(self.df.index[self.shuf])
def time_join_dataframe_index_single_key_bigger_sort(self):
self.df_shuf.join(self.df_key2, on='key2', sort=True)
class join_dataframe_index_single_key_small(object):
goal_time = 0.2
def setup(self):
self.level1 = tm.makeStringIndex(10).values
self.level2 = tm.makeStringIndex(1000).values
self.label1 = np.arange(10).repeat(1000)
self.label2 = np.tile(np.arange(1000), 10)
self.key1 = np.tile(self.level1.take(self.label1), 10)
self.key2 = np.tile(self.level2.take(self.label2), 10)
self.shuf = np.arange(100000)
random.shuffle(self.shuf)
try:
self.index2 = MultiIndex(levels=[self.level1, self.level2], labels=[self.label1, self.label2])
self.index3 = MultiIndex(levels=[np.arange(10), np.arange(100), np.arange(100)], labels=[np.arange(10).repeat(10000), np.tile(np.arange(100).repeat(100), 10), np.tile(np.tile(np.arange(100), 100), 10)])
self.df_multi = DataFrame(np.random.randn(len(self.index2), 4), index=self.index2, columns=['A', 'B', 'C', 'D'])
except:
pass
try:
self.DataFrame = DataMatrix
except:
pass
self.df = pd.DataFrame({'data1': np.random.randn(100000), 'data2': np.random.randn(100000), 'key1': self.key1, 'key2': self.key2, })
self.df_key1 = pd.DataFrame(np.random.randn(len(self.level1), 4), index=self.level1, columns=['A', 'B', 'C', 'D'])
self.df_key2 = pd.DataFrame(np.random.randn(len(self.level2), 4), index=self.level2, columns=['A', 'B', 'C', 'D'])
self.df_shuf = self.df.reindex(self.df.index[self.shuf])
def time_join_dataframe_index_single_key_small(self):
self.df.join(self.df_key1, on='key1')
class join_dataframe_integer_2key(object):
goal_time = 0.2
def setup(self):
self.df = pd.DataFrame({'key1': np.tile(np.arange(500).repeat(10), 2), 'key2': np.tile(np.arange(250).repeat(10), 4), 'value': np.random.randn(10000), })
self.df2 = pd.DataFrame({'key1': np.arange(500), 'value2': randn(500), })
self.df3 = self.df[:5000]
def time_join_dataframe_integer_2key(self):
merge(self.df, self.df3)
class join_dataframe_integer_key(object):
goal_time = 0.2
def setup(self):
self.df = pd.DataFrame({'key1': np.tile(np.arange(500).repeat(10), 2), 'key2': np.tile(np.arange(250).repeat(10), 4), 'value': np.random.randn(10000), })
self.df2 = pd.DataFrame({'key1': np.arange(500), 'value2': randn(500), })
self.df3 = self.df[:5000]
def time_join_dataframe_integer_key(self):
merge(self.df, self.df2, on='key1')
class join_non_unique_equal(object):
goal_time = 0.2
def setup(self):
self.date_index = date_range('01-Jan-2013', '23-Jan-2013', freq='T')
self.daily_dates = self.date_index.to_period('D').to_timestamp('S', 'S')
self.fracofday = (self.date_index.view(np.ndarray) - self.daily_dates.view(np.ndarray))
self.fracofday = (self.fracofday.astype('timedelta64[ns]').astype(np.float64) / 86400000000000.0)
self.fracofday = TimeSeries(self.fracofday, self.daily_dates)
self.index = date_range(self.date_index.min().to_period('A').to_timestamp('D', 'S'), self.date_index.max().to_period('A').to_timestamp('D', 'E'), freq='D')
self.temp = TimeSeries(1.0, self.index)
def time_join_non_unique_equal(self):
(self.fracofday * self.temp[self.fracofday.index])
class left_outer_join_index(object):
goal_time = 0.2
def setup(self):
np.random.seed(2718281)
self.n = 50000
self.left = pd.DataFrame(np.random.randint(1, (self.n / 500), (self.n, 2)), columns=['jim', 'joe'])
self.right = pd.DataFrame(np.random.randint(1, (self.n / 500), (self.n, 2)), columns=['jolie', 'jolia']).set_index('jolie')
def time_left_outer_join_index(self):
self.left.join(self.right, on='jim')
class merge_2intkey_nosort(object):
goal_time = 0.2
def setup(self):
self.N = 10000
self.indices = tm.makeStringIndex(self.N).values
self.indices2 = tm.makeStringIndex(self.N).values
self.key = np.tile(self.indices[:8000], 10)
self.key2 = np.tile(self.indices2[:8000], 10)
self.left = pd.DataFrame({'key': self.key, 'key2': self.key2, 'value': np.random.randn(80000), })
self.right = pd.DataFrame({'key': self.indices[2000:], 'key2': self.indices2[2000:], 'value2': np.random.randn(8000), })
def time_merge_2intkey_nosort(self):
merge(self.left, self.right, sort=False)
class merge_2intkey_sort(object):
goal_time = 0.2
def setup(self):
self.N = 10000
self.indices = tm.makeStringIndex(self.N).values
self.indices2 = tm.makeStringIndex(self.N).values
self.key = np.tile(self.indices[:8000], 10)
self.key2 = np.tile(self.indices2[:8000], 10)
self.left = pd.DataFrame({'key': self.key, 'key2': self.key2, 'value': np.random.randn(80000), })
self.right = pd.DataFrame({'key': self.indices[2000:], 'key2': self.indices2[2000:], 'value2': np.random.randn(8000), })
def time_merge_2intkey_sort(self):
merge(self.left, self.right, sort=True)
class series_align_int64_index(object):
goal_time = 0.2
def setup(self):
self.n = 1000000
self.sz = 500000
self.rng = np.arange(0, 10000000000000, 10000000)
self.stamps = (np.datetime64(datetime.now()).view('i8') + self.rng)
self.idx1 = np.sort(self.sample(self.stamps, self.sz))
self.idx2 = np.sort(self.sample(self.stamps, self.sz))
self.ts1 = Series(np.random.randn(self.sz), self.idx1)
self.ts2 = Series(np.random.randn(self.sz), self.idx2)
def time_series_align_int64_index(self):
(self.ts1 + self.ts2)
def sample(self, values, k):
self.sampler = np.random.permutation(len(values))
return values.take(self.sampler[:k])
class series_align_left_monotonic(object):
goal_time = 0.2
def setup(self):
self.n = 1000000
self.sz = 500000
self.rng = np.arange(0, 10000000000000, 10000000)
self.stamps = (np.datetime64(datetime.now()).view('i8') + self.rng)
self.idx1 = np.sort(self.sample(self.stamps, self.sz))
self.idx2 = np.sort(self.sample(self.stamps, self.sz))
self.ts1 = Series(np.random.randn(self.sz), self.idx1)
self.ts2 = Series(np.random.randn(self.sz), self.idx2)
def time_series_align_left_monotonic(self):
self.ts1.align(self.ts2, join='left')
def sample(self, values, k):
self.sampler = np.random.permutation(len(values))
return values.take(self.sampler[:k])
| 40.922006
| 214
| 0.621809
| 2,109
| 14,691
| 4.241347
| 0.093409
| 0.032197
| 0.046506
| 0.031861
| 0.807937
| 0.803689
| 0.781778
| 0.769927
| 0.757406
| 0.749581
| 0
| 0.079227
| 0.206997
| 14,691
| 359
| 215
| 40.922006
| 0.688584
| 0
| 0
| 0.704626
| 0
| 0
| 0.02777
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142349
| false
| 0.035587
| 0.003559
| 0
| 0.288256
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d561fce0935ed105033c0ec4df8f6c71b875dbb3
| 99
|
py
|
Python
|
grayscale/math/pow.py
|
KennethanCeyer/grayscale
|
646a11ea47f2120f317e554c736d8054aa55c4c4
|
[
"MIT"
] | null | null | null |
grayscale/math/pow.py
|
KennethanCeyer/grayscale
|
646a11ea47f2120f317e554c736d8054aa55c4c4
|
[
"MIT"
] | null | null | null |
grayscale/math/pow.py
|
KennethanCeyer/grayscale
|
646a11ea47f2120f317e554c736d8054aa55c4c4
|
[
"MIT"
] | null | null | null |
from math import pow as math_pow
def pow(x: float, y: float) -> float:
return math_pow(x, y)
| 16.5
| 37
| 0.666667
| 19
| 99
| 3.368421
| 0.526316
| 0.21875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 99
| 5
| 38
| 19.8
| 0.831169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d5aa1cdb2a0164d7440cb8dc29f430a3155f1dce
| 72,128
|
py
|
Python
|
exampleLaplaceRun.py
|
eugenewickett/logistigateanalysis
|
5174f40db5f79bfd12491850cef53edde825b71b
|
[
"MIT"
] | null | null | null |
exampleLaplaceRun.py
|
eugenewickett/logistigateanalysis
|
5174f40db5f79bfd12491850cef53edde825b71b
|
[
"MIT"
] | null | null | null |
exampleLaplaceRun.py
|
eugenewickett/logistigateanalysis
|
5174f40db5f79bfd12491850cef53edde825b71b
|
[
"MIT"
] | null | null | null |
import numpy as np
import scipy.optimize as spo
import scipy.special as sps
# Workaround for the 'methods' file not being able to locate the 'mcmcsamplers' folder for importing
import sys
import os
SCRIPT_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__))))
sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, 'logistigate','logistigate')))
import utilities as util # Pull from the submodule "develop" branch
import methods as methods # Pull from the submodule "develop" branch
def examiningLaplaceApprox():
'''
This script constitutes a detailed breakdown of what's happening in the Laplace
approximation process. The goal is to understand why negative Hessian diagonal
values are so common.
'''
# First generate a random system using a fixed seed
newSysDict = util.generateRandDataDict(numImp=3, numOut=10, numSamples=50 * 20,
diagSens=0.9, diagSpec=0.99,
dataType='Tracked', randSeed=5)
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=3)}) # Set prior variance here
#import inspect
#lines = inspect.getsource(methods.prior_normal)
#print(lines)
# Form Laplace approximation estimates
outDict = methods.FormEstimates(newSysDict, retOptStatus=True)
print(np.diag(outDict['hess'])) # Negative diagonals present
print(np.diag(outDict['hessinv']))
soln = np.append(outDict['impEst'],outDict['outEst'])
soln_trans = sps.logit(soln) # Transformed solution
# Check Jacobian + Hessian at this solution point
soln_jac = methods.Tracked_LogPost_Grad(soln_trans,newSysDict['N'], newSysDict['Y'],
newSysDict['diagSens'], newSysDict['diagSpec'],
prior=newSysDict['prior'])
soln_hess = methods.Tracked_LogPost_Hess(soln_trans, newSysDict['N'], newSysDict['Y'],
newSysDict['diagSens'], newSysDict['diagSpec'],
prior=newSysDict['prior'])
print(soln_jac) # Gradient seems within tolerance of 0
print(np.diag(soln_hess))
# Check 2nd-order derivatives at this point
(nOut, nImp) = newSysDict['N'].shape
# Use a non-default prior
# prior = methods.prior_normal(mu=1, var=2)
# Grab the likelihood and gradient at beta0
dL0 = methods.Tracked_LogPost_Grad(soln_trans, newSysDict['N'], newSysDict['Y'],
newSysDict['diagSens'], newSysDict['diagSpec'],
prior=newSysDict['prior'])
ddL0 = methods.Tracked_LogPost_Hess(soln_trans, newSysDict['N'], newSysDict['Y'],
newSysDict['diagSens'], newSysDict['diagSpec'],
prior=newSysDict['prior'])
print(np.diag(ddL0))
# Move in every direction and flag if the difference from the gradient is more than epsilon
for k in range(nImp + nOut):
beta1 = 1 * soln_trans[:]
beta1[k] = beta1[k] + 10 ** (-5)
dL1 = methods.Tracked_LogPost_Grad(beta1, newSysDict['N'], newSysDict['Y'],
newSysDict['diagSens'], newSysDict['diagSpec'],
prior=newSysDict['prior'])
print((dL1 - dL0) * (10 ** (5)))
print(ddL0[k])
#print((dL1 - dL0) * (10 ** (5)) - ddL0[k])
print(np.linalg.norm((dL1 - dL0) * (10 ** (5)) - ddL0[k]))
# Do it line by line here (from methods.FormEstimates)
N, Y = newSysDict['N'], newSysDict['Y']
Sens, Spec = newSysDict['diagSens'], newSysDict['diagSpec']
prior = newSysDict['prior']
(numOut, numImp) = N.shape
beta0_List = []
for sampNum in range(10): # Generate 10 random samples via the prior
beta0_List.append(prior.rand(numImp + numOut))
# Loop through each possible initial point and store the optimal solution likelihood values
likelihoodsList = []
solsList = []
OptStatusList = []
bds = spo.Bounds(np.zeros(numImp + numOut) - 8, np.zeros(numImp + numOut) + 8)
for curr_beta0 in beta0_List:
opVal = spo.minimize(methods.Tracked_NegLogPost, curr_beta0,
args=(N, Y, Sens, Spec, prior), method='L-BFGS-B',
jac=methods.Tracked_NegLogPost_Grad,
options={'disp': False}, bounds=bds)
likelihoodsList.append(opVal.fun)
solsList.append(opVal.x)
OptStatusList.append(opVal.status) # 0 means convergence; alternatively, use opVal.message
print(likelihoodsList) # Check that our restarts are giving similar solutions
best_x = solsList[np.argmin(likelihoodsList)]
jac = methods.Tracked_LogPost_Grad(best_x, N, Y, Sens, Spec, prior)
hess = methods.Tracked_NegLogPost_Hess(best_x, N, Y, Sens, Spec, prior)
print(jac)
print(np.diag(hess))
print(np.diag(soln_hess))
# Check 2nd-order derivatives at this point
(nOut, nImp) = N.shape
# Use a non-default prior
#prior = methods.prior_normal(mu=1, var=2)
# Grab the likelihood and gradient at beta0
dL0 = methods.Tracked_LogPost_Grad(best_x, N, Y, Sens, Spec, prior)
ddL0 = methods.Tracked_LogPost_Hess(best_x, N, Y, Sens, Spec, prior)
print(np.diag(ddL0))
# Move in every direction and flag if the difference from the gradient is more than epsilon
for k in range(nImp + nOut):
beta1 = 1 * best_x[:]
beta1[k] = beta1[k] + 10 ** (-5)
dL1 = methods.Tracked_LogPost_Grad(beta1, N, Y, Sens, Spec, prior)
print((dL1 - dL0) * (10 ** (5)) - ddL0[k])
print(np.linalg.norm((dL1 - dL0) * (10 ** (5)) - ddL0[k]))
return
def laplaceTests():
'''
A script for checking the Laplace approximation in producing credible intervals.
'''
# Check L-BFGS exit flags for some random systems
import numpy as np
for randSys in range(10):
newSysDict = util.generateRandDataDict(numImp=10, numOut=100, numSamples=100 * 20,
dataType='Tracked')
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal()})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True)
print(np.sum(outDict['optStatus']))
# Ran for 100 systems of size 10/100; no instance of a non-successful optimizer exit
# Check the generated Hessian diagonals WRT the prior variance; try for 3 different system sizes
priorVarList = [1,5,9]
numSystems = 100
resultsMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for proportion of Hessian diagonals that are negative
avgdevsMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for SIZE of negative diagonals
percOutMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for proportion of negative diagonals that are outlets
percNegEigMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for proportion negative eigenvalues
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=5, numOut=20, numSamples=20 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i<0]
resultsMat_5_20[currVarInd, randSysInd] = len(negDiags)/totalEnts
avgdevsMat_5_20[currVarInd, randSysInd] = np.average(negDiags)
percOutMat_5_20[currVarInd, randSysInd] = len([i for i in currHessDiags[5:] if i<0])/len(negDiags)
percNegEigMat_5_20[currVarInd, randSysInd] = len([i for i in np.linalg.eigvals(outDict['hess']) if i<0])/totalEnts
resultsMat_10_40 = np.zeros((len(priorVarList), numSystems))
avgdevsMat_10_40 = np.zeros((len(priorVarList), numSystems))
percOutMat_10_40 = np.zeros((len(priorVarList), numSystems))
percNegEigMat_10_40 = np.zeros((len(priorVarList), numSystems))
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=10, numOut=40, numSamples=40 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
resultsMat_10_40[currVarInd, randSysInd] = len(negDiags) / totalEnts
avgdevsMat_10_40[currVarInd, randSysInd] = np.average(negDiags)
percOutMat_10_40[currVarInd, randSysInd] = len([i for i in currHessDiags[10:] if i < 0]) / len(negDiags)
percNegEigMat_10_40[currVarInd, randSysInd] = len([i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
resultsMat_15_60 = np.zeros((len(priorVarList), numSystems))
avgdevsMat_15_60 = np.zeros((len(priorVarList), numSystems))
percOutMat_15_60 = np.zeros((len(priorVarList), numSystems))
percNegEigMat_15_60 = np.zeros((len(priorVarList), numSystems))
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=15, numOut=60, numSamples=60 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
resultsMat_15_60[currVarInd, randSysInd] = len(negDiags) / totalEnts
avgdevsMat_15_60[currVarInd, randSysInd] = np.average(negDiags)
percOutMat_15_60[currVarInd, randSysInd] = len([i for i in currHessDiags[15:] if i < 0]) / len(negDiags)
percNegEigMat_15_60[currVarInd, randSysInd] = len([i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
resultsMat_15_100 = np.zeros((len(priorVarList), numSystems))
avgdevsMat_15_100 = np.zeros((len(priorVarList), numSystems))
percOutMat_15_100 = np.zeros((len(priorVarList), numSystems))
percNegEigMat_15_100 = np.zeros((len(priorVarList), numSystems))
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=15, numOut=100, numSamples=100 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
resultsMat_15_100[currVarInd, randSysInd] = len(negDiags) / totalEnts
avgdevsMat_15_100[currVarInd, randSysInd] = np.average(negDiags)
percOutMat_15_100[currVarInd, randSysInd] = len([i for i in currHessDiags[15:] if i < 0]) / len(negDiags)
percNegEigMat_15_100[currVarInd, randSysInd] = len([i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
resultsSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
resultsSummaryMat[currVarInd, 0] = np.quantile(resultsMat_5_20[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 1] = np.quantile(resultsMat_5_20[currVarInd, :], 0.95)
resultsSummaryMat[currVarInd, 2] = np.quantile(resultsMat_10_40[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 3] = np.quantile(resultsMat_10_40[currVarInd, :], 0.95)
resultsSummaryMat[currVarInd, 4] = np.quantile(resultsMat_15_60[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 5] = np.quantile(resultsMat_15_60[currVarInd, :], 0.95)
resultsSummaryMat[currVarInd, 6] = np.quantile(resultsMat_15_100[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 7] = np.quantile(resultsMat_15_100[currVarInd, :], 0.95)
avgdevsSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
avgdevsSummaryMat[currVarInd, 0] = np.quantile(avgdevsMat_5_20[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 1] = np.quantile(avgdevsMat_5_20[currVarInd, :], 0.95)
avgdevsSummaryMat[currVarInd, 2] = np.quantile(avgdevsMat_10_40[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 3] = np.quantile(avgdevsMat_10_40[currVarInd, :], 0.95)
avgdevsSummaryMat[currVarInd, 4] = np.quantile(avgdevsMat_15_60[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 5] = np.quantile(avgdevsMat_15_60[currVarInd, :], 0.95)
avgdevsSummaryMat[currVarInd, 6] = np.quantile(avgdevsMat_15_100[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 7] = np.quantile(avgdevsMat_15_100[currVarInd, :], 0.95)
percOutSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
percOutSummaryMat[currVarInd, 0] = np.quantile(percOutMat_5_20[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 1] = np.quantile(percOutMat_5_20[currVarInd, :], 0.95)
percOutSummaryMat[currVarInd, 2] = np.quantile(percOutMat_10_40[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 3] = np.quantile(percOutMat_10_40[currVarInd, :], 0.95)
percOutSummaryMat[currVarInd, 4] = np.quantile(percOutMat_15_60[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 5] = np.quantile(percOutMat_15_60[currVarInd, :], 0.95)
percOutSummaryMat[currVarInd, 6] = np.quantile(percOutMat_15_100[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 7] = np.quantile(percOutMat_15_100[currVarInd, :], 0.95)
percNegEigSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
percNegEigSummaryMat[currVarInd, 0] = np.quantile(percNegEigMat_5_20[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 1] = np.quantile(percNegEigMat_5_20[currVarInd, :], 0.95)
percNegEigSummaryMat[currVarInd, 2] = np.quantile(percNegEigMat_10_40[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 3] = np.quantile(percNegEigMat_10_40[currVarInd, :], 0.95)
percNegEigSummaryMat[currVarInd, 4] = np.quantile(percNegEigMat_15_60[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 5] = np.quantile(percNegEigMat_15_60[currVarInd, :], 0.95)
percNegEigSummaryMat[currVarInd, 6] = np.quantile(percNegEigMat_15_100[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 7] = np.quantile(percNegEigMat_15_100[currVarInd, :], 0.95)
import matplotlib.pyplot as plt
#from matplotlib.lines import Line2D
#zippedList1 = zip(resultsSummaryMat[:, 0], resultsSummaryMat[:, 1], priorVarList)
#zippedList2 = zip(resultsSummaryMat[:, 2], resultsSummaryMat[:, 3], priorVarList)
#zippedList3 = zip(resultsSummaryMat[:, 4], resultsSummaryMat[:, 5], priorVarList)
#custom_lines = [Line2D([0], [0], color='orange', lw=4),
# Line2D([0], [0], color='red', lw=4),
# Line2D([0], [0], color='purple', lw=4)]
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
#for lower, upper, name in zippedList1:
# plt.plot((name, name), (lower, upper), 'o-', color='orange')
#for lower, upper, name in zippedList2:
# plt.plot((name, name), (lower, upper), 'o-', color='red')
#for lower, upper, name in zippedList3:
# plt.plot((name, name), (lower, upper), 'o-', color='purple')
plt.plot(priorVarList, resultsSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 1])
plt.title('90% Intervals on PERCENTAGE of Neg. Hessian Diagonal Values\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
#ax.legend(custom_lines, ['5 importers, 20 outlets', '10 importers, 40 outlets', '15 importers, 60 outlets'])
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Size of deviations below 0
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, avgdevsSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([-3, 0])
plt.title('90% Intervals on SIZE of Neg. Hessian Diagonal Values\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Percentage of negative diagonals that are outlets
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, percOutSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 1])
plt.title('90% Intervals on % THAT ARE OUTLETS of Neg. Hessian Diag. Vals.\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Percentage of eigenvalues that are negative
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, percNegEigSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 1])
plt.title(
'90% Intervals on % NEG. EIGENVALUES of Hessian\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
return
def testtoberemovedlaterforhessanalysis():
import numpy as np
for randSys in range(10):
newSysDict = util.generateRandDataDict(numImp=10, numOut=100, numSamples=100 * 20,
dataType='Tracked')
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal()})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True)
print(np.sum(outDict['optStatus']))
# Ran for 100 systems of size 10/100; no instance of a non-successful optimizer exit
# Check the generated Hessian diagonals WRT the prior variance; try for 3 different system sizes
priorVarList = [0.1,1,3,5,7]
numSystems = 100
resultsMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for proportion of Hessian diagonals that are negative
avgdevsMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for SIZE of negative diagonals
avgPosdevsMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for SIZE of positive diagonals
#percOutMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for proportion of negative diagonals that are outlets
percNegEigMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for proportion negative eigenvalues
avgEigMat_5_20 = np.zeros((len(priorVarList), numSystems, 5+20)) # for average eigenvalue size
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=5, numOut=20, numSamples=20 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
posDiags = [i for i in currHessDiags if i > 0]
resultsMat_5_20[currVarInd, randSysInd] = len(negDiags) / totalEnts
if len(negDiags) > 0:
avgdevsMat_5_20[currVarInd, randSysInd] = np.average(negDiags)
else:
avgdevsMat_5_20[currVarInd, randSysInd] = 0
if len(posDiags) > 0:
avgPosdevsMat_5_20[currVarInd, randSysInd] = np.average(posDiags)
else:
avgPosdevsMat_5_20[currVarInd, randSysInd] = 0
#percOutMat_5_20[currVarInd, randSysInd] = len([i for i in currHessDiags[5:] if i < 0]) / len(negDiags)
percNegEigMat_5_20[currVarInd, randSysInd] = len(
[i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
avgEigMat_5_20[currVarInd, randSysInd, :] = np.linalg.eigvals(outDict['hess'])
resultsMat_10_40 = np.zeros((len(priorVarList), numSystems))
avgdevsMat_10_40 = np.zeros((len(priorVarList), numSystems))
avgPosdevsMat_10_40 = np.zeros((len(priorVarList), numSystems)) # for SIZE of positive diagonals
#percOutMat_10_40 = np.zeros((len(priorVarList), numSystems))
percNegEigMat_10_40 = np.zeros((len(priorVarList), numSystems))
avgEigMat_10_40 = np.zeros((len(priorVarList), numSystems, 10+40)) # for average eigenvalue size
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=10, numOut=40, numSamples=40 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
posDiags = [i for i in currHessDiags if i > 0]
resultsMat_10_40[currVarInd, randSysInd] = len(negDiags) / totalEnts
if len(negDiags) > 0:
avgdevsMat_10_40[currVarInd, randSysInd] = np.average(negDiags)
else:
avgdevsMat_10_40[currVarInd, randSysInd] = 0
if len(posDiags) > 0:
avgPosdevsMat_10_40[currVarInd, randSysInd] = np.average(posDiags)
else:
avgPosdevsMat_10_40[currVarInd, randSysInd] = 0
#percOutMat_10_40[currVarInd, randSysInd] = len([i for i in currHessDiags[10:] if i < 0]) / len(negDiags)
percNegEigMat_10_40[currVarInd, randSysInd] = len(
[i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
avgEigMat_10_40[currVarInd, randSysInd, :] = np.linalg.eigvals(outDict['hess'])
resultsMat_15_60 = np.zeros((len(priorVarList), numSystems))
avgdevsMat_15_60 = np.zeros((len(priorVarList), numSystems))
avgPosdevsMat_15_60 = np.zeros((len(priorVarList), numSystems)) # for SIZE of positive diagonals
#percOutMat_15_60 = np.zeros((len(priorVarList), numSystems))
percNegEigMat_15_60 = np.zeros((len(priorVarList), numSystems))
avgEigMat_15_60 = np.zeros((len(priorVarList), numSystems, 15+60)) # for average eigenvalue size
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=15, numOut=60, numSamples=60 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
posDiags = [i for i in currHessDiags if i > 0]
resultsMat_15_60[currVarInd, randSysInd] = len(negDiags) / totalEnts
if len(negDiags) > 0:
avgdevsMat_15_60[currVarInd, randSysInd] = np.average(negDiags)
else:
avgdevsMat_15_60[currVarInd, randSysInd] = 0
if len(posDiags) > 0:
avgPosdevsMat_15_60[currVarInd, randSysInd] = np.average(posDiags)
else:
avgPosdevsMat_15_60[currVarInd, randSysInd] = 0
#percOutMat_15_60[currVarInd, randSysInd] = len([i for i in currHessDiags[15:] if i < 0]) / len(negDiags)
percNegEigMat_15_60[currVarInd, randSysInd] = len(
[i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
avgEigMat_15_60[currVarInd, randSysInd, :] = np.linalg.eigvals(outDict['hess'])
resultsMat_15_100 = np.zeros((len(priorVarList), numSystems))
avgdevsMat_15_100 = np.zeros((len(priorVarList), numSystems))
avgPosdevsMat_15_100 = np.zeros((len(priorVarList), numSystems)) # for SIZE of positive diagonals
#percOutMat_15_100 = np.zeros((len(priorVarList), numSystems))
percNegEigMat_15_100 = np.zeros((len(priorVarList), numSystems))
avgEigMat_15_100 = np.zeros((len(priorVarList), numSystems, 15+100)) # for average eigenvalue size
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=15, numOut=100, numSamples=100 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
posDiags = [i for i in currHessDiags if i > 0]
resultsMat_15_100[currVarInd, randSysInd] = len(negDiags) / totalEnts
if len(negDiags) > 0:
avgdevsMat_15_100[currVarInd, randSysInd] = np.average(negDiags)
else:
avgdevsMat_15_100[currVarInd, randSysInd] = 0
if len(posDiags) > 0:
avgPosdevsMat_15_100[currVarInd, randSysInd] = np.average(posDiags)
else:
avgPosdevsMat_15_100[currVarInd, randSysInd] = 0
#percOutMat_15_100[currVarInd, randSysInd] = len([i for i in currHessDiags[15:] if i < 0]) / len(negDiags)
percNegEigMat_15_100[currVarInd, randSysInd] = len(
[i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
avgEigMat_15_100[currVarInd, randSysInd, :] = np.linalg.eigvals(outDict['hess'])
resultsSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
resultsSummaryMat[currVarInd, 0] = np.quantile(resultsMat_5_20[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 1] = np.quantile(resultsMat_5_20[currVarInd, :], 0.95)
resultsSummaryMat[currVarInd, 2] = np.quantile(resultsMat_10_40[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 3] = np.quantile(resultsMat_10_40[currVarInd, :], 0.95)
resultsSummaryMat[currVarInd, 4] = np.quantile(resultsMat_15_60[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 5] = np.quantile(resultsMat_15_60[currVarInd, :], 0.95)
resultsSummaryMat[currVarInd, 6] = np.quantile(resultsMat_15_100[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 7] = np.quantile(resultsMat_15_100[currVarInd, :], 0.95)
avgdevsSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
avgdevsSummaryMat[currVarInd, 0] = np.quantile(avgdevsMat_5_20[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 1] = np.quantile(avgdevsMat_5_20[currVarInd, :], 0.95)
avgdevsSummaryMat[currVarInd, 2] = np.quantile(avgdevsMat_10_40[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 3] = np.quantile(avgdevsMat_10_40[currVarInd, :], 0.95)
avgdevsSummaryMat[currVarInd, 4] = np.quantile(avgdevsMat_15_60[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 5] = np.quantile(avgdevsMat_15_60[currVarInd, :], 0.95)
avgdevsSummaryMat[currVarInd, 6] = np.quantile(avgdevsMat_15_100[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 7] = np.quantile(avgdevsMat_15_100[currVarInd, :], 0.95)
avgPosdevsSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
avgPosdevsSummaryMat[currVarInd, 0] = np.quantile(avgPosdevsMat_5_20[currVarInd, :], 0.05)
avgPosdevsSummaryMat[currVarInd, 1] = np.quantile(avgPosdevsMat_5_20[currVarInd, :], 0.95)
avgPosdevsSummaryMat[currVarInd, 2] = np.quantile(avgPosdevsMat_10_40[currVarInd, :], 0.05)
avgPosdevsSummaryMat[currVarInd, 3] = np.quantile(avgPosdevsMat_10_40[currVarInd, :], 0.95)
avgPosdevsSummaryMat[currVarInd, 4] = np.quantile(avgPosdevsMat_15_60[currVarInd, :], 0.05)
avgPosdevsSummaryMat[currVarInd, 5] = np.quantile(avgPosdevsMat_15_60[currVarInd, :], 0.95)
avgPosdevsSummaryMat[currVarInd, 6] = np.quantile(avgPosdevsMat_15_100[currVarInd, :], 0.05)
avgPosdevsSummaryMat[currVarInd, 7] = np.quantile(avgPosdevsMat_15_100[currVarInd, :], 0.95)
'''
percOutSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
percOutSummaryMat[currVarInd, 0] = np.quantile(percOutMat_5_20[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 1] = np.quantile(percOutMat_5_20[currVarInd, :], 0.95)
percOutSummaryMat[currVarInd, 2] = np.quantile(percOutMat_10_40[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 3] = np.quantile(percOutMat_10_40[currVarInd, :], 0.95)
percOutSummaryMat[currVarInd, 4] = np.quantile(percOutMat_15_60[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 5] = np.quantile(percOutMat_15_60[currVarInd, :], 0.95)
percOutSummaryMat[currVarInd, 6] = np.quantile(percOutMat_15_100[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 7] = np.quantile(percOutMat_15_100[currVarInd, :], 0.95)
'''
percNegEigSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
percNegEigSummaryMat[currVarInd, 0] = np.quantile(percNegEigMat_5_20[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 1] = np.quantile(percNegEigMat_5_20[currVarInd, :], 0.95)
percNegEigSummaryMat[currVarInd, 2] = np.quantile(percNegEigMat_10_40[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 3] = np.quantile(percNegEigMat_10_40[currVarInd, :], 0.95)
percNegEigSummaryMat[currVarInd, 4] = np.quantile(percNegEigMat_15_60[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 5] = np.quantile(percNegEigMat_15_60[currVarInd, :], 0.95)
percNegEigSummaryMat[currVarInd, 6] = np.quantile(percNegEigMat_15_100[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 7] = np.quantile(percNegEigMat_15_100[currVarInd, :], 0.95)
avgEigSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
avgEigSummaryMat[currVarInd, 0] = np.quantile(avgEigMat_5_20[currVarInd, :, :], 0.05)
avgEigSummaryMat[currVarInd, 1] = np.quantile(avgEigMat_5_20[currVarInd, :, :], 0.95)
avgEigSummaryMat[currVarInd, 2] = np.quantile(avgEigMat_10_40[currVarInd, :, :], 0.05)
avgEigSummaryMat[currVarInd, 3] = np.quantile(avgEigMat_10_40[currVarInd, :, :], 0.95)
avgEigSummaryMat[currVarInd, 4] = np.quantile(avgEigMat_15_60[currVarInd, :, :], 0.05)
avgEigSummaryMat[currVarInd, 5] = np.quantile(avgEigMat_15_60[currVarInd, :, :], 0.95)
avgEigSummaryMat[currVarInd, 6] = np.quantile(avgEigMat_15_100[currVarInd, :, :], 0.05)
avgEigSummaryMat[currVarInd, 7] = np.quantile(avgEigMat_15_100[currVarInd, :, :], 0.95)
import matplotlib.pyplot as plt
# from matplotlib.lines import Line2D
# zippedList1 = zip(resultsSummaryMat[:, 0], resultsSummaryMat[:, 1], priorVarList)
# zippedList2 = zip(resultsSummaryMat[:, 2], resultsSummaryMat[:, 3], priorVarList)
# zippedList3 = zip(resultsSummaryMat[:, 4], resultsSummaryMat[:, 5], priorVarList)
# custom_lines = [Line2D([0], [0], color='orange', lw=4),
# Line2D([0], [0], color='red', lw=4),
# Line2D([0], [0], color='purple', lw=4)]
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
# for lower, upper, name in zippedList1:
# plt.plot((name, name), (lower, upper), 'o-', color='orange')
# for lower, upper, name in zippedList2:
# plt.plot((name, name), (lower, upper), 'o-', color='red')
# for lower, upper, name in zippedList3:
# plt.plot((name, name), (lower, upper), 'o-', color='purple')
plt.plot(priorVarList, resultsSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 1])
plt.title(
'90% Intervals on PERCENTAGE of Neg. Hessian Diagonal Values\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
# ax.legend(custom_lines, ['5 importers, 20 outlets', '10 importers, 40 outlets', '15 importers, 60 outlets'])
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Size of deviations below 0
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, avgdevsSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([-3, 0])
plt.title('90% Intervals on SIZE of Neg. Hessian Diagonal Values\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Size of deviations above 0
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 13])
plt.title('90% Intervals on SIZE of Pos. Hessian Diagonal Values\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Percentage of negative diagonals that are outlets
'''
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, percOutSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 1])
plt.title(
'90% Intervals on % THAT ARE OUTLETS of Neg. Hessian Diag. Vals.\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
'''
# Percentage of eigenvalues that are negative
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, percNegEigSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 1])
plt.title(
'90% Intervals on % NEG. EIGENVALUES of Hessian\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Distribution of size of eigenvalues
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, avgEigSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 1])
plt.title(
'90% Intervals on SIZE OF EIGENVALUES of Hessian\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
return
def testing():
# For testing the Laplace and MCMC methods under non-diffuse sourcing matrices
testing = util.generateRandDataDict(numImp=5, numOut=3, diagSens=0.90,
diagSpec=0.99, numSamples=5 * 50,
dataType='Tracked', transMatLambda=0.05,
randSeed=3,trueRates=[])
_ = util.GetVectorForms(testing)
print(testing['transMat'])
import numpy as np
print(testing['N'])
print(testing['Y'])
MCMCdict = {'MCMCtype': 'NUTS', 'Madapt': 5000, 'delta': 0.4}
testing.update({'diagSens': 0.90,
'diagSpec': 0.99,
'numPostSamples': 500,
'prior': methods.prior_normal(),
'MCMCdict': MCMCdict})
logistigateDict = runlogistigate(testing)
util.plotPostSamples(logistigateDict)
return
def testtoberemovedlaterforhessanalysis():
import numpy as np
for randSys in range(10):
newSysDict = util.generateRandDataDict(numImp=10, numOut=100, numSamples=100 * 20,
dataType='Tracked')
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal()})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True)
print(np.sum(outDict['optStatus']))
# Ran for 100 systems of size 10/100; no instance of a non-successful optimizer exit
# Check the generated Hessian diagonals WRT the prior variance; try for 3 different system sizes
priorVarList = [0.1,1,3,5,7]
numSystems = 100
resultsMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for proportion of Hessian diagonals that are negative
avgdevsMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for SIZE of negative diagonals
avgPosdevsMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for SIZE of positive diagonals
#percOutMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for proportion of negative diagonals that are outlets
percNegEigMat_5_20 = np.zeros((len(priorVarList), numSystems)) # for proportion negative eigenvalues
avgEigMat_5_20 = np.zeros((len(priorVarList), numSystems, 5+20)) # for average eigenvalue size
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=5, numOut=20, numSamples=20 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
posDiags = [i for i in currHessDiags if i > 0]
resultsMat_5_20[currVarInd, randSysInd] = len(negDiags) / totalEnts
if len(negDiags) > 0:
avgdevsMat_5_20[currVarInd, randSysInd] = np.average(negDiags)
else:
avgdevsMat_5_20[currVarInd, randSysInd] = 0
if len(posDiags) > 0:
avgPosdevsMat_5_20[currVarInd, randSysInd] = np.average(posDiags)
else:
avgPosdevsMat_5_20[currVarInd, randSysInd] = 0
#percOutMat_5_20[currVarInd, randSysInd] = len([i for i in currHessDiags[5:] if i < 0]) / len(negDiags)
percNegEigMat_5_20[currVarInd, randSysInd] = len(
[i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
avgEigMat_5_20[currVarInd, randSysInd, :] = np.linalg.eigvals(outDict['hess'])
resultsMat_10_40 = np.zeros((len(priorVarList), numSystems))
avgdevsMat_10_40 = np.zeros((len(priorVarList), numSystems))
avgPosdevsMat_10_40 = np.zeros((len(priorVarList), numSystems)) # for SIZE of positive diagonals
#percOutMat_10_40 = np.zeros((len(priorVarList), numSystems))
percNegEigMat_10_40 = np.zeros((len(priorVarList), numSystems))
avgEigMat_10_40 = np.zeros((len(priorVarList), numSystems, 10+40)) # for average eigenvalue size
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=10, numOut=40, numSamples=40 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
posDiags = [i for i in currHessDiags if i > 0]
resultsMat_10_40[currVarInd, randSysInd] = len(negDiags) / totalEnts
if len(negDiags) > 0:
avgdevsMat_10_40[currVarInd, randSysInd] = np.average(negDiags)
else:
avgdevsMat_10_40[currVarInd, randSysInd] = 0
if len(posDiags) > 0:
avgPosdevsMat_10_40[currVarInd, randSysInd] = np.average(posDiags)
else:
avgPosdevsMat_10_40[currVarInd, randSysInd] = 0
#percOutMat_10_40[currVarInd, randSysInd] = len([i for i in currHessDiags[10:] if i < 0]) / len(negDiags)
percNegEigMat_10_40[currVarInd, randSysInd] = len(
[i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
avgEigMat_10_40[currVarInd, randSysInd, :] = np.linalg.eigvals(outDict['hess'])
resultsMat_15_60 = np.zeros((len(priorVarList), numSystems))
avgdevsMat_15_60 = np.zeros((len(priorVarList), numSystems))
avgPosdevsMat_15_60 = np.zeros((len(priorVarList), numSystems)) # for SIZE of positive diagonals
#percOutMat_15_60 = np.zeros((len(priorVarList), numSystems))
percNegEigMat_15_60 = np.zeros((len(priorVarList), numSystems))
avgEigMat_15_60 = np.zeros((len(priorVarList), numSystems, 15+60)) # for average eigenvalue size
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=15, numOut=60, numSamples=60 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
posDiags = [i for i in currHessDiags if i > 0]
resultsMat_15_60[currVarInd, randSysInd] = len(negDiags) / totalEnts
if len(negDiags) > 0:
avgdevsMat_15_60[currVarInd, randSysInd] = np.average(negDiags)
else:
avgdevsMat_15_60[currVarInd, randSysInd] = 0
if len(posDiags) > 0:
avgPosdevsMat_15_60[currVarInd, randSysInd] = np.average(posDiags)
else:
avgPosdevsMat_15_60[currVarInd, randSysInd] = 0
#percOutMat_15_60[currVarInd, randSysInd] = len([i for i in currHessDiags[15:] if i < 0]) / len(negDiags)
percNegEigMat_15_60[currVarInd, randSysInd] = len(
[i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
avgEigMat_15_60[currVarInd, randSysInd, :] = np.linalg.eigvals(outDict['hess'])
resultsMat_15_100 = np.zeros((len(priorVarList), numSystems))
avgdevsMat_15_100 = np.zeros((len(priorVarList), numSystems))
avgPosdevsMat_15_100 = np.zeros((len(priorVarList), numSystems)) # for SIZE of positive diagonals
#percOutMat_15_100 = np.zeros((len(priorVarList), numSystems))
percNegEigMat_15_100 = np.zeros((len(priorVarList), numSystems))
avgEigMat_15_100 = np.zeros((len(priorVarList), numSystems, 15+100)) # for average eigenvalue size
for currVarInd, currVar in enumerate(priorVarList):
print('Working on variance of ' + str(currVar) + '...')
for randSysInd in range(numSystems): # Systems of size 5, 20
newSysDict = util.generateRandDataDict(numImp=15, numOut=100, numSamples=100 * 20,
dataType='Tracked')
totalEnts = len(newSysDict['importerNames']) + len(newSysDict['outletNames'])
_ = util.GetVectorForms(newSysDict)
newSysDict.update({'prior': methods.prior_normal(var=currVar)})
outDict = methods.FormEstimates(newSysDict, retOptStatus=True, printUpdate=False)
currHessDiags = np.diag(outDict['hess'])
negDiags = [i for i in currHessDiags if i < 0]
posDiags = [i for i in currHessDiags if i > 0]
resultsMat_15_100[currVarInd, randSysInd] = len(negDiags) / totalEnts
if len(negDiags) > 0:
avgdevsMat_15_100[currVarInd, randSysInd] = np.average(negDiags)
else:
avgdevsMat_15_100[currVarInd, randSysInd] = 0
if len(posDiags) > 0:
avgPosdevsMat_15_100[currVarInd, randSysInd] = np.average(posDiags)
else:
avgPosdevsMat_15_100[currVarInd, randSysInd] = 0
#percOutMat_15_100[currVarInd, randSysInd] = len([i for i in currHessDiags[15:] if i < 0]) / len(negDiags)
percNegEigMat_15_100[currVarInd, randSysInd] = len(
[i for i in np.linalg.eigvals(outDict['hess']) if i < 0]) / totalEnts
avgEigMat_15_100[currVarInd, randSysInd, :] = np.linalg.eigvals(outDict['hess'])
resultsSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
resultsSummaryMat[currVarInd, 0] = np.quantile(resultsMat_5_20[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 1] = np.quantile(resultsMat_5_20[currVarInd, :], 0.95)
resultsSummaryMat[currVarInd, 2] = np.quantile(resultsMat_10_40[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 3] = np.quantile(resultsMat_10_40[currVarInd, :], 0.95)
resultsSummaryMat[currVarInd, 4] = np.quantile(resultsMat_15_60[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 5] = np.quantile(resultsMat_15_60[currVarInd, :], 0.95)
resultsSummaryMat[currVarInd, 6] = np.quantile(resultsMat_15_100[currVarInd, :], 0.05)
resultsSummaryMat[currVarInd, 7] = np.quantile(resultsMat_15_100[currVarInd, :], 0.95)
avgdevsSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
avgdevsSummaryMat[currVarInd, 0] = np.quantile(avgdevsMat_5_20[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 1] = np.quantile(avgdevsMat_5_20[currVarInd, :], 0.95)
avgdevsSummaryMat[currVarInd, 2] = np.quantile(avgdevsMat_10_40[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 3] = np.quantile(avgdevsMat_10_40[currVarInd, :], 0.95)
avgdevsSummaryMat[currVarInd, 4] = np.quantile(avgdevsMat_15_60[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 5] = np.quantile(avgdevsMat_15_60[currVarInd, :], 0.95)
avgdevsSummaryMat[currVarInd, 6] = np.quantile(avgdevsMat_15_100[currVarInd, :], 0.05)
avgdevsSummaryMat[currVarInd, 7] = np.quantile(avgdevsMat_15_100[currVarInd, :], 0.95)
avgPosdevsSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
avgPosdevsSummaryMat[currVarInd, 0] = np.quantile(avgPosdevsMat_5_20[currVarInd, :], 0.05)
avgPosdevsSummaryMat[currVarInd, 1] = np.quantile(avgPosdevsMat_5_20[currVarInd, :], 0.95)
avgPosdevsSummaryMat[currVarInd, 2] = np.quantile(avgPosdevsMat_10_40[currVarInd, :], 0.05)
avgPosdevsSummaryMat[currVarInd, 3] = np.quantile(avgPosdevsMat_10_40[currVarInd, :], 0.95)
avgPosdevsSummaryMat[currVarInd, 4] = np.quantile(avgPosdevsMat_15_60[currVarInd, :], 0.05)
avgPosdevsSummaryMat[currVarInd, 5] = np.quantile(avgPosdevsMat_15_60[currVarInd, :], 0.95)
avgPosdevsSummaryMat[currVarInd, 6] = np.quantile(avgPosdevsMat_15_100[currVarInd, :], 0.05)
avgPosdevsSummaryMat[currVarInd, 7] = np.quantile(avgPosdevsMat_15_100[currVarInd, :], 0.95)
'''
percOutSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
percOutSummaryMat[currVarInd, 0] = np.quantile(percOutMat_5_20[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 1] = np.quantile(percOutMat_5_20[currVarInd, :], 0.95)
percOutSummaryMat[currVarInd, 2] = np.quantile(percOutMat_10_40[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 3] = np.quantile(percOutMat_10_40[currVarInd, :], 0.95)
percOutSummaryMat[currVarInd, 4] = np.quantile(percOutMat_15_60[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 5] = np.quantile(percOutMat_15_60[currVarInd, :], 0.95)
percOutSummaryMat[currVarInd, 6] = np.quantile(percOutMat_15_100[currVarInd, :], 0.05)
percOutSummaryMat[currVarInd, 7] = np.quantile(percOutMat_15_100[currVarInd, :], 0.95)
'''
percNegEigSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
percNegEigSummaryMat[currVarInd, 0] = np.quantile(percNegEigMat_5_20[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 1] = np.quantile(percNegEigMat_5_20[currVarInd, :], 0.95)
percNegEigSummaryMat[currVarInd, 2] = np.quantile(percNegEigMat_10_40[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 3] = np.quantile(percNegEigMat_10_40[currVarInd, :], 0.95)
percNegEigSummaryMat[currVarInd, 4] = np.quantile(percNegEigMat_15_60[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 5] = np.quantile(percNegEigMat_15_60[currVarInd, :], 0.95)
percNegEigSummaryMat[currVarInd, 6] = np.quantile(percNegEigMat_15_100[currVarInd, :], 0.05)
percNegEigSummaryMat[currVarInd, 7] = np.quantile(percNegEigMat_15_100[currVarInd, :], 0.95)
avgEigSummaryMat = np.zeros((len(priorVarList), 8))
for currVarInd, currVar in enumerate(priorVarList):
avgEigSummaryMat[currVarInd, 0] = np.quantile(avgEigMat_5_20[currVarInd, :, :], 0.05)
avgEigSummaryMat[currVarInd, 1] = np.quantile(avgEigMat_5_20[currVarInd, :, :], 0.95)
avgEigSummaryMat[currVarInd, 2] = np.quantile(avgEigMat_10_40[currVarInd, :, :], 0.05)
avgEigSummaryMat[currVarInd, 3] = np.quantile(avgEigMat_10_40[currVarInd, :, :], 0.95)
avgEigSummaryMat[currVarInd, 4] = np.quantile(avgEigMat_15_60[currVarInd, :, :], 0.05)
avgEigSummaryMat[currVarInd, 5] = np.quantile(avgEigMat_15_60[currVarInd, :, :], 0.95)
avgEigSummaryMat[currVarInd, 6] = np.quantile(avgEigMat_15_100[currVarInd, :, :], 0.05)
avgEigSummaryMat[currVarInd, 7] = np.quantile(avgEigMat_15_100[currVarInd, :, :], 0.95)
import matplotlib.pyplot as plt
# from matplotlib.lines import Line2D
# zippedList1 = zip(resultsSummaryMat[:, 0], resultsSummaryMat[:, 1], priorVarList)
# zippedList2 = zip(resultsSummaryMat[:, 2], resultsSummaryMat[:, 3], priorVarList)
# zippedList3 = zip(resultsSummaryMat[:, 4], resultsSummaryMat[:, 5], priorVarList)
# custom_lines = [Line2D([0], [0], color='orange', lw=4),
# Line2D([0], [0], color='red', lw=4),
# Line2D([0], [0], color='purple', lw=4)]
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
# for lower, upper, name in zippedList1:
# plt.plot((name, name), (lower, upper), 'o-', color='orange')
# for lower, upper, name in zippedList2:
# plt.plot((name, name), (lower, upper), 'o-', color='red')
# for lower, upper, name in zippedList3:
# plt.plot((name, name), (lower, upper), 'o-', color='purple')
plt.plot(priorVarList, resultsSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, resultsSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 1])
plt.title(
'90% Intervals on PERCENTAGE of Neg. Hessian Diagonal Values\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
# ax.legend(custom_lines, ['5 importers, 20 outlets', '10 importers, 40 outlets', '15 importers, 60 outlets'])
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Size of deviations below 0
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, avgdevsSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, avgdevsSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([-3, 0])
plt.title('90% Intervals on SIZE of Neg. Hessian Diagonal Values\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Size of deviations above 0
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, avgPosdevsSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 13])
plt.title('90% Intervals on SIZE of Pos. Hessian Diagonal Values\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Percentage of negative diagonals that are outlets
'''
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, percOutSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, percOutSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 1])
plt.title(
'90% Intervals on % THAT ARE OUTLETS of Neg. Hessian Diag. Vals.\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
'''
# Percentage of eigenvalues that are negative
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, percNegEigSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, percNegEigSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 1])
plt.title(
'90% Intervals on % NEG. EIGENVALUES of Hessian\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
# Distribution of size of eigenvalues
fig, ax = plt.subplots(figsize=(8, 10), ncols=1)
plt.plot(priorVarList, avgEigSummaryMat[:, 0], 'o--', color='orange', label='5imp_20out - lower 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 1], 'o-', color='orange', label='5imp_20out - upper 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 2], 'o--', color='red', label='10imp_40out - lower 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 3], 'o-', color='red', label='10imp_40out - upper 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 4], 'o--', color='purple', label='15imp_60out - lower 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 5], 'o-', color='purple', label='15imp_60out - upper 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 6], 'o--', color='blue', label='15imp_100out - lower 90%')
plt.plot(priorVarList, avgEigSummaryMat[:, 7], 'o-', color='blue', label='15imp_100out - upper 90%')
plt.ylim([0, 17])
plt.title(
'90% Intervals on SIZE OF EIGENVALUES of Hessian\nvs. Prior Variance, for 4 different system sizes',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Prior variance', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 14, 'fontname': 'Trebuchet MS'})
ax.legend()
fig.tight_layout()
plt.show()
plt.close()
return
if __name__ == '__main__':
examiningLaplaceApprox()
| 66.661738
| 132
| 0.657318
| 8,413
| 72,128
| 5.541543
| 0.045049
| 0.033976
| 0.052165
| 0.050449
| 0.949572
| 0.94747
| 0.943974
| 0.942773
| 0.93711
| 0.93608
| 0
| 0.057784
| 0.199104
| 72,128
| 1,081
| 133
| 66.723404
| 0.749269
| 0.109056
| 0
| 0.869258
| 0
| 0.014134
| 0.128066
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005889
| false
| 0
| 0.030624
| 0
| 0.042403
| 0.053004
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
63ad77170c34a05436889c92a4c1294a38ffd5d1
| 193
|
py
|
Python
|
pkg/gcp/features/__init__.py
|
christus02/citrix-cloud-controller
|
9f6d3d6e009efbc2943588bc42b13cb309d053b5
|
[
"Apache-2.0"
] | 6
|
2020-05-19T06:22:47.000Z
|
2020-10-19T07:46:06.000Z
|
pkg/gcp/features/__init__.py
|
christus02/citrix-cloud-controller
|
9f6d3d6e009efbc2943588bc42b13cb309d053b5
|
[
"Apache-2.0"
] | null | null | null |
pkg/gcp/features/__init__.py
|
christus02/citrix-cloud-controller
|
9f6d3d6e009efbc2943588bc42b13cb309d053b5
|
[
"Apache-2.0"
] | null | null | null |
from . import forwardingrules # noqa
from . import helper # noqa
from . import instances # noqa
from . import metadata # noqa
from . import targetinstances # noqa
from . import clouddns # noqa
| 27.571429
| 36
| 0.751295
| 24
| 193
| 6.041667
| 0.375
| 0.413793
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186529
| 193
| 6
| 37
| 32.166667
| 0.923567
| 0.150259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
63b8f62e6705777bb9aa51431fdce6c909368dcc
| 155
|
py
|
Python
|
utils.py
|
HaeckelK/connected-cards-api
|
97efbece2aac038a1380208f8dc3bf092023c78c
|
[
"MIT"
] | null | null | null |
utils.py
|
HaeckelK/connected-cards-api
|
97efbece2aac038a1380208f8dc3bf092023c78c
|
[
"MIT"
] | 20
|
2021-09-10T07:55:03.000Z
|
2021-09-15T15:30:56.000Z
|
utils.py
|
HaeckelK/connected-cards-api
|
97efbece2aac038a1380208f8dc3bf092023c78c
|
[
"MIT"
] | null | null | null |
import time
def timestamp() -> int:
return int(time.time())
def yesterday_midnight() -> int:
return (int(time.time() // 86400)) * 86400 - 86400
| 17.222222
| 54
| 0.632258
| 20
| 155
| 4.85
| 0.45
| 0.14433
| 0.247423
| 0.329897
| 0.412371
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120968
| 0.2
| 155
| 8
| 55
| 19.375
| 0.66129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
89823e4d535f8ca599fb60c09e98bee13c56d5b7
| 10,761
|
py
|
Python
|
tests/test_pbo_reader.py
|
tjensen/dayz-dev-tools
|
e88c7f4d169778737f2299b6cb10dac9b1a29127
|
[
"MIT"
] | 4
|
2021-07-26T00:27:01.000Z
|
2022-03-20T16:39:42.000Z
|
tests/test_pbo_reader.py
|
tjensen/dayz-dev-tools
|
e88c7f4d169778737f2299b6cb10dac9b1a29127
|
[
"MIT"
] | 19
|
2021-08-04T21:56:12.000Z
|
2022-02-27T22:25:19.000Z
|
tests/test_pbo_reader.py
|
tjensen/dayz-dev-tools
|
e88c7f4d169778737f2299b6cb10dac9b1a29127
|
[
"MIT"
] | 2
|
2021-09-17T01:11:44.000Z
|
2022-01-28T16:43:01.000Z
|
import io
import os
import unittest
from dayz_dev_tools import pbo_reader
class TestPBOReader(unittest.TestCase):
def test_files_returns_empty_list_when_pbo_is_empty(self) -> None:
reader = pbo_reader.PBOReader(io.BytesIO(b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"))
assert reader.files() == []
def test_files_returns_empty_list_when_pbo_has_empty_file_list(self) -> None:
reader = pbo_reader.PBOReader(io.BytesIO())
assert reader.files() == []
def test_files_returns_list_of_files_in_pbo(self) -> None:
pbo_file = io.BytesIO(
b"f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
files = reader.files()
assert len(files) == 2
assert files[0].filename == b"f1"
assert files[0].mime_type == b"\x01\x02\x03\x04"
assert files[0].original_size == 0x8070605
assert files[0].reserved == 0xc0b0a09
assert files[0].time_stamp == 0x100f0e0d
assert files[0].data_size == 12
assert files[0].content_reader is not None
assert files[1].filename == b"f2"
assert files[1].mime_type == b"\x11\x12\x13\x14"
assert files[1].original_size == 0x18171615
assert files[1].reserved == 0x1c1b1a19
assert files[1].time_stamp == 0x201f1e1d
assert files[1].data_size == 9
assert files[1].content_reader is not None
# Asserting out of order to ensure that subreader is created correctly
assert files[1].content_reader.read(9) == b"file2data"
assert files[0].content_reader.read(12) == b"file1content"
def test_files_returns_list_of_files_when_pbo_has_headers(self) -> None:
pbo_file = io.BytesIO(
b"\0\x73\x72\x65\x56\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"foo\0bar\0"
b"fizz\0buzz\0"
b"\0"
b"f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
files = reader.files()
assert len(files) == 2
def test_files_returns_list_of_files_when_pbo_has_headers_but_no_dummy_record(self) -> None:
pbo_file = io.BytesIO(
b"\0"
b"foo\0bar\0"
b"fizz\0buzz\0"
b"\0"
b"f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
files = reader.files()
assert len(files) == 2
def test_inserts_prefix_into_filenames_when_prefix_header_is_set(self) -> None:
pbo_file = io.BytesIO(
b"\0\x73\x72\x65\x56\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"prefix\0PREFIX\0"
b"\0"
b"f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
filenames = [f.filename for f in reader.files()]
assert filenames == [b"PREFIX\\f1", b"PREFIX\\f2"]
def test_file_returns_none_if_filename_string_does_not_match_any_in_pbo(self) -> None:
pbo_file = io.BytesIO(
b"dir\\f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"dir\\f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
matching_file = reader.file("unmatched")
assert matching_file is None
def test_file_returns_none_if_filename_bytes_does_not_match_any_in_pbo(self) -> None:
pbo_file = io.BytesIO(
b"dir\\f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"dir\\f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
matching_file = reader.file(b"unmatched")
assert matching_file is None
def test_file_returns_file_with_matching_filename_string(self) -> None:
pbo_file = io.BytesIO(
b"dir\\f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"dir\\f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
matching_file = reader.file(os.path.join("dir", "f1"))
assert matching_file == reader.files()[0]
def test_file_matches_string_names_case_insensitively(self) -> None:
pbo_file = io.BytesIO(
b"dir\\f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"Dir\\F2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
matching_file = reader.file(os.path.join("dir", "F2"))
assert matching_file == reader.files()[1]
def test_file_returns_file_with_matching_filename_bytes(self) -> None:
pbo_file = io.BytesIO(
b"dir\\f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"dir\\f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
matching_file = reader.file(b"dir\\f1")
assert matching_file == reader.files()[0]
def test_file_matches_bytes_names_case_insensitively(self) -> None:
pbo_file = io.BytesIO(
b"dir\\f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"Dir\\F2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
matching_file = reader.file(b"dir\\f2")
assert matching_file == reader.files()[1]
def test_headers_returns_empty_list_when_pbo_is_empty(self) -> None:
reader = pbo_reader.PBOReader(io.BytesIO())
assert reader.headers() == []
def test_headers_returns_empty_list_when_pbo_does_not_have_headers(self) -> None:
pbo_file = io.BytesIO(
b"f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
assert reader.headers() == []
def test_headers_returns_list_of_headers_when_pbo_has_headers(self) -> None:
pbo_file = io.BytesIO(
b"\0\x73\x72\x65\x56\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"foo\0bar\0"
b"fizz\0buzz\0"
b"foo\0repeated keys are not ignored or overwritten\0"
b"\0"
b"f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
headers = reader.headers()
assert headers == [
(b"foo", b"bar"),
(b"fizz", b"buzz"),
(b"foo", b"repeated keys are not ignored or overwritten")
]
def test_headers_returns_list_of_headers_when_pbo_has_headers_but_no_dummy_record(self) -> None:
pbo_file = io.BytesIO(
b"\0"
b"foo\0bar\0"
b"fizz\0buzz\0"
b"\0"
b"f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
headers = reader.headers()
assert headers == [
(b"foo", b"bar"),
(b"fizz", b"buzz")
]
def test_prefix_returns_none_if_prefix_header_is_not_present(self) -> None:
pbo_file = io.BytesIO(
b"\0"
b"foo\0bar\0"
b"fizz\0buzz\0"
b"\0"
b"f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
assert reader.prefix() is None
def test_prefix_returns_prefix_header_value_when_present(self) -> None:
pbo_file = io.BytesIO(
b"\0"
b"foo\0bar\0"
b"prefix\0PREFIX\0"
b"fizz\0buzz\0"
b"\0"
b"f1\0\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x0c\0\0\0"
b"f2\0\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x09\0\0\0"
b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
b"file1content"
b"file2data")
reader = pbo_reader.PBOReader(pbo_file)
assert reader.prefix() == b"PREFIX"
| 40.152985
| 100
| 0.586191
| 1,982
| 10,761
| 3.063068
| 0.075681
| 0.14001
| 0.185801
| 0.215451
| 0.863779
| 0.844342
| 0.830176
| 0.811728
| 0.785867
| 0.785044
| 0
| 0.19001
| 0.240963
| 10,761
| 267
| 101
| 40.303371
| 0.553257
| 0.006319
| 0
| 0.737089
| 0
| 0.230047
| 0.371715
| 0.301375
| 0
| 0
| 0.005425
| 0
| 0.159624
| 1
| 0.084507
| false
| 0
| 0.018779
| 0
| 0.107981
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
982deb4bea80f53f73c63709e2f298c9fa5d75fd
| 22,699
|
py
|
Python
|
tests/test_dataframe/test_get.py
|
bearsh/raccoon
|
bd7a59c3dcf7ad7b995194a4a49631759d9e565c
|
[
"MIT"
] | 62
|
2016-07-11T01:23:15.000Z
|
2022-01-14T17:42:17.000Z
|
tests/test_dataframe/test_get.py
|
bearsh/raccoon
|
bd7a59c3dcf7ad7b995194a4a49631759d9e565c
|
[
"MIT"
] | 13
|
2016-07-11T01:24:02.000Z
|
2021-05-17T14:51:58.000Z
|
tests/test_dataframe/test_get.py
|
bearsh/raccoon
|
bd7a59c3dcf7ad7b995194a4a49631759d9e565c
|
[
"MIT"
] | 14
|
2017-03-22T17:23:02.000Z
|
2021-05-08T05:16:30.000Z
|
import pytest
import raccoon as rc
from raccoon.utils import assert_frame_equal
def test_get_cell():
actual = rc.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6], 'c': [7, 8, 9]}, index=[10, 11, 13], columns=['a', 'b', 'c'],
sort=False)
assert actual.get(10, 'a') == 1
assert actual.get(11, 'a') == 2
assert actual.get(13, 'c') == 9
# test items not in index raise errors
with pytest.raises(ValueError):
actual.get(1, 'a')
with pytest.raises(ValueError):
actual.get(100, 'a')
with pytest.raises(ValueError):
actual.get(12, 'a')
def test_get_cell_sorted():
actual = rc.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6], 'c': [7, 8, 9]}, index=[10, 11, 13], columns=['a', 'b', 'c'],
sort=True)
assert actual.get(10, 'a') == 1
assert actual.get(11, 'a') == 2
assert actual.get(13, 'c') == 9
# test items not in index raise errors
with pytest.raises(ValueError):
actual.get(1, 'a')
with pytest.raises(ValueError):
actual.get(100, 'a')
with pytest.raises(ValueError):
actual.get(12, 'a')
def test_get_rows():
df = rc.DataFrame({'a': [1, 2, 3, 4], 'b': [4, 5, 6, 7], 'c': [7, 8, 9, None]}, index=[10, 11, 12, 99],
columns=['a', 'b', 'c'], index_name='start_10', sort=False)
expected = rc.DataFrame({'c': [8, 9]}, index=[11, 12], index_name='start_10', sort=False)
actual = df.get([11, 12], 'c')
assert_frame_equal(actual, expected)
# test with boolean list
actual = df.get([False, True, True, False], 'c')
assert_frame_equal(actual, expected)
# index out of order
expected = rc.DataFrame({'c': [None, 7]}, index=[99, 10], index_name='start_10', sort=False)
actual = df.get([99, 10], 'c')
assert_frame_equal(actual, expected)
# get as a list
assert df.get([11, 12], 'c', as_list=True) == [8, 9]
# get as a list
assert df.get([False, True, True, False], 'c', as_list=True) == [8, 9]
# get entire column
assert df.get(columns='b', as_list=True) == [4, 5, 6, 7]
# items not in index raise errors
with pytest.raises(ValueError):
df.get([11, 88], 'c', as_list=True)
# not enough items in boolean list
with pytest.raises(ValueError):
df.get([True, True], 'c')
def test_get_rows_sorted():
df = rc.DataFrame({'a': [1, 2, 3, 4], 'b': [4, 5, 6, 7], 'c': [7, 8, 9, None]}, index=[10, 11, 12, 99],
columns=['a', 'b', 'c'], index_name='start_10', sort=True)
expected = rc.DataFrame({'c': [8, 9]}, index=[11, 12], index_name='start_10', sort=True)
actual = df.get([11, 12], 'c')
assert_frame_equal(actual, expected)
# get as a list
assert df.get([11, 12], 'c', as_list=True) == [8, 9]
# test with boolean list
actual = df.get([False, True, True, False], 'c')
assert_frame_equal(actual, expected)
# index out of order
expected = rc.DataFrame({'c': [7, None]}, index=[10, 99], index_name='start_10', sort=True)
actual = df.get([99, 10], 'c')
assert_frame_equal(actual, expected)
# get as a list
assert df.get([False, True, True, False], 'c', as_list=True) == [8, 9]
# get entire column
assert df.get(columns='b', as_list=True) == [4, 5, 6, 7]
# items not in index raise errors
with pytest.raises(ValueError):
df.get([11, 88], 'c', as_list=True)
# not enough items in boolean list
with pytest.raises(ValueError):
df.get([True, True], 'c')
def test_get_columns():
df = rc.DataFrame({'a': [1, 2, 3, 4], 'b': [4, 5, 6, 7], 'c': [7, 8, 9, None]}, index=[10, 11, 12, 99],
columns=['a', 'b', 'c'], index_name='start_10', sort=False)
# no columns given
expected = rc.DataFrame({'a': [4], 'b': [7], 'c': [None]}, index=[99], columns=['a', 'b', 'c'],
index_name='start_10', sort=False)
actual = df.get_columns(99)
assert_frame_equal(actual, expected)
# specific columns
expected = rc.DataFrame({'a': [4], 'c': [None]}, index=[99], columns=['a', 'c'], index_name='start_10',
sort=False)
actual = df.get(99, ['a', 'c'])
assert_frame_equal(actual, expected)
# test with boolean list
actual = df.get(99, [True, False, True])
assert_frame_equal(actual, expected)
# columns out of order
expected = rc.DataFrame({'c': [8], 'b': [5]}, index=[11], columns=['c', 'b'], index_name='start_10',
sort=False)
actual = df.get(11, ['c', 'b'])
assert_frame_equal(actual, expected)
# as_dict
assert df.get(11, ['b', 'c'], as_dict=True) == {'start_10': 11, 'b': 5, 'c': 8}
assert df.get_columns(11, ['b', 'c'], as_dict=True) == {'start_10': 11, 'b': 5, 'c': 8}
# test boolean list not same length as columns
with pytest.raises(ValueError):
df.get(99, [True, False])
# test index out of bounds
with pytest.raises(ValueError):
df.get(88, ['a', 'c'])
def test_get_columns_sorted():
df = rc.DataFrame({'a': [1, 2, 3, 4], 'b': [4, 5, 6, 7], 'c': [7, 8, 9, None]}, index=[10, 11, 12, 99],
columns=['a', 'b', 'c'], index_name='start_10', sort=True)
expected = rc.DataFrame({'a': [4], 'c': [None]}, index=[99], columns=['a', 'c'], index_name='start_10',
sort=True)
actual = df.get(99, ['a', 'c'])
assert_frame_equal(actual, expected)
# test with boolean list
actual = df.get(99, [True, False, True])
assert_frame_equal(actual, expected)
# columns out of order
expected = rc.DataFrame({'c': [8], 'b': [5]}, index=[11], columns=['c', 'b'], index_name='start_10',
sort=True)
actual = df.get(11, ['c', 'b'])
assert_frame_equal(actual, expected)
# test boolean list not same length as columns
with pytest.raises(ValueError):
df.get(99, [True, False])
# test index out of bounds
with pytest.raises(ValueError):
df.get(88, ['a', 'c'])
def test_get_matrix():
df = rc.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6], 'c': [7, 8, 9], 'd': [10, 11, 12]}, index=['x', 'y', 'z'],
columns=['a', 'b', 'c', 'd'], index_name='letters', sort=False)
expected = rc.DataFrame({'b': [4, 6], 'd': [10, 12]}, index=['x', 'z'], columns=['b', 'd'], index_name='letters',
sort=False)
actual = df.get(['x', 'z'], ['b', 'd'])
assert_frame_equal(actual, expected)
# test with booleans
actual = df.get([True, False, True], [False, True, False, True])
assert_frame_equal(actual, expected)
# columns out of order
expected = rc.DataFrame({'d': [10, 12], 'c': [7, 9]}, index=['x', 'z'], columns=['d', 'c'], index_name='letters',
sort=False)
actual = df.get(['x', 'z'], ['d', 'c'])
assert_frame_equal(actual, expected)
# get everything
everything = df.get()
assert_frame_equal(everything, df)
# boolean list does not match index length
with pytest.raises(ValueError):
df.get([True, False], [False, True, False, True])
# boolean list does not match columns length
with pytest.raises(ValueError):
df.get([True, False, True], [False, True])
# missing index
with pytest.raises(ValueError):
df.get_matrix(['BAD', 'x'], ['a', 'b'])
# missing column
with pytest.raises(ValueError):
df.get_matrix(['x', 'y'], ['a', 'b', 'BAD'])
def test_get_matrix_sorted():
df = rc.DataFrame({'a': [2, 1, 3], 'b': [5, 4, 6], 'c': [8, 7, 9], 'd': [11, 10, 12]}, index=['y', 'x', 'z'],
columns=['a', 'b', 'c', 'd'], index_name='letters', sort=True)
expected = rc.DataFrame({'b': [4, 6], 'd': [10, 12]}, index=['x', 'z'], columns=['b', 'd'], index_name='letters',
sort=True)
actual = df.get(['x', 'z'], ['b', 'd'])
assert_frame_equal(actual, expected)
# test with booleans
actual = df.get([True, False, True], [False, True, False, True])
assert_frame_equal(actual, expected)
# columns out of order
expected = rc.DataFrame({'d': [10, 12], 'c': [7, 9]}, index=['x', 'z'], columns=['d', 'c'], index_name='letters',
sort=True)
actual = df.get(['x', 'z'], ['d', 'c'])
assert_frame_equal(actual, expected)
# get everything
everything = df.get()
assert_frame_equal(everything, df)
# boolean list does not match index length
with pytest.raises(ValueError):
df.get([True, False], [False, True, False, True])
# boolean list does not match columns length
with pytest.raises(ValueError):
df.get([True, False, True], [False, True])
# missing index
with pytest.raises(ValueError):
df.get_matrix(['BAD', 'x'], ['a', 'b'])
# missing column
with pytest.raises(ValueError):
df.get_matrix(['x', 'y'], ['a', 'b', 'BAD'])
def test_get_location():
df = rc.DataFrame({'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]}, index=[2, 4, 6, 8])
# forward indexing, all columns
assert_frame_equal(df.get_location(2), rc.DataFrame({'a': [3], 'b': [7]}, index=[6]))
assert df.get_location(2, as_dict=True) == {'index': 6, 'a': 3, 'b': 7}
assert df.get_location(2, as_dict=True, index=False) == {'a': 3, 'b': 7}
# reverse indexing, all columns
assert_frame_equal(df.get_location(-1), rc.DataFrame({'a': [4], 'b': [8]}, index=[8]))
assert df.get_location(-1, as_dict=True) == {'index': 8, 'a': 4, 'b': 8}
assert df.get_location(-1, as_dict=True, index=False) == {'a': 4, 'b': 8}
# forward indexing, one column
assert_frame_equal(df.get_location(0, ['a']), rc.DataFrame({'a': [1]}, index=[2]))
assert df.get_location(0, ['a'], as_dict=True) == {'index': 2, 'a': 1}
assert df.get_location(0, ['a'], as_dict=True, index=False) == {'a': 1}
# reverse indexing, all columns
assert_frame_equal(df.get_location(-2, ['b']), rc.DataFrame({'b': [7]}, index=[6]))
assert df.get_location(-2, ['b'], as_dict=True) == {'index': 6, 'b': 7}
assert df.get_location(-2, ['b'], as_dict=True, index=False) == {'b': 7}
# single value for column and not list returns just the value
assert df.get_location(1, 'b') == 6
def test_get_locations():
df = rc.DataFrame({'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]}, index=[2, 4, 6, 8])
# multi row, multi columns
assert_frame_equal(df.get_locations([0, 2]), rc.DataFrame({'a': [1, 3], 'b': [5, 7]}, index=[2, 6]))
# multiple rows, single columns
assert_frame_equal(df.get_locations([1, 3], 'a'), rc.DataFrame({'a': [2, 4]}, index=[4, 8]))
assert df.get_locations([0, 2], 'b', as_list=True) == [5, 7]
# single row, multiple columns
assert_frame_equal(df.get_locations([2]), rc.DataFrame({'a': [3], 'b': [7]}, index=[6]))
def test_get_slice():
# fails for non-sort DataFrame
df = rc.DataFrame({'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]}, index=[2, 4, 6, 8])
with pytest.raises(RuntimeError):
df.get_slice(2, 4)
# empty DataFrame
df = rc.DataFrame(sort=True)
assert_frame_equal(df.get_slice(3, 3), rc.DataFrame(sort=True))
df = rc.DataFrame(sort=True, columns=['a', 'b'])
assert_frame_equal(df.get_slice(3, 3), rc.DataFrame(sort=True, columns=['a', 'b']))
# full DataFrame
df = rc.DataFrame({'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]}, columns=['a', 'b'], index=[2, 4, 6, 8], sort=True)
assert_frame_equal(df.get_slice(2, 8), df)
assert_frame_equal(df.get_slice(1, 8), df)
assert_frame_equal(df.get_slice(2, 10), df)
assert_frame_equal(df.get_slice(1, 10), df)
assert_frame_equal(df.get_slice(4, 4, ['b']), rc.DataFrame({'b': [6]}, index=[4], sort=True))
assert_frame_equal(df.get_slice(3, 4, ['b']), rc.DataFrame({'b': [6]}, index=[4], sort=True))
assert_frame_equal(df.get_slice(4, 5, ['b']), rc.DataFrame({'b': [6]}, index=[4], sort=True))
assert_frame_equal(df.get_slice(3, 5, ['b']), rc.DataFrame({'b': [6]}, index=[4], sort=True))
assert_frame_equal(df.get_slice(4, 6, ['a']), rc.DataFrame({'a': [2, 3]}, index=[4, 6], sort=True))
assert_frame_equal(df.get_slice(3, 6, ['a']), rc.DataFrame({'a': [2, 3]}, index=[4, 6], sort=True))
assert_frame_equal(df.get_slice(4, 7, ['a']), rc.DataFrame({'a': [2, 3]}, index=[4, 6], sort=True))
assert_frame_equal(df.get_slice(3, 7, ['a']), rc.DataFrame({'a': [2, 3]}, index=[4, 6], sort=True))
assert_frame_equal(df.get_slice(None, 5, ['a']), rc.DataFrame({'a': [1, 2]}, index=[2, 4], sort=True))
assert_frame_equal(df.get_slice(5, None, [True, False]), rc.DataFrame({'a': [3, 4]}, index=[6, 8], sort=True))
# boolean column list not the right size
with pytest.raises(ValueError):
df.get_slice(5, None, [True])
assert_frame_equal(df.get_slice(3, 3), rc.DataFrame({'a': [], 'b': []}, columns=['a', 'b'], sort=True))
assert_frame_equal(df.get_slice(0, 0), rc.DataFrame({'a': [], 'b': []}, columns=['a', 'b'], sort=True))
assert_frame_equal(df.get_slice(10, 10), rc.DataFrame({'a': [], 'b': []}, columns=['a', 'b'], sort=True))
def test_get_slice_as_dict():
# fails for non-sort DataFrame
df = rc.DataFrame({'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]}, index=[2, 4, 6, 8])
with pytest.raises(RuntimeError):
df.get_slice(2, 4)
df = rc.DataFrame({'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]}, index=[2, 4, 6, 8], sort=True)
assert df.get_slice(2, 8, as_dict=True) == ([2, 4, 6, 8], {'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]})
assert df.get_slice(1, 8, as_dict=True) == ([2, 4, 6, 8], {'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]})
assert df.get_slice(2, 10, as_dict=True) == ([2, 4, 6, 8], {'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]})
assert df.get_slice(1, 10, as_dict=True) == ([2, 4, 6, 8], {'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]})
assert df.get_slice(4, 4, ['b'], as_dict=True) == ([4], {'b': [6]})
assert df.get_slice(3, 4, ['b'], as_dict=True) == ([4], {'b': [6]})
assert df.get_slice(4, 5, ['b'], as_dict=True) == ([4], {'b': [6]})
assert df.get_slice(3, 5, ['b'], as_dict=True) == ([4], {'b': [6]})
assert df.get_slice(4, 6, ['a'], as_dict=True) == ([4, 6], {'a': [2, 3]})
assert df.get_slice(3, 6, ['a'], as_dict=True) == ([4, 6], {'a': [2, 3]})
assert df.get_slice(4, 7, ['a'], as_dict=True) == ([4, 6], {'a': [2, 3]})
assert df.get_slice(3, 7, ['a'], as_dict=True) == ([4, 6], {'a': [2, 3]})
assert df.get_slice(None, 5, ['a'], as_dict=True) == ([2, 4], {'a': [1, 2]})
assert df.get_slice(5, None, ['a'], as_dict=True) == ([6, 8], {'a': [3, 4]})
assert df.get_slice(3, 3, as_dict=True) == ([], {'a': [], 'b': []})
assert df.get_slice(0, 0, as_dict=True) == ([], {'a': [], 'b': []})
assert df.get_slice(10, 10, as_dict=True) == ([], {'a': [], 'b': []})
def test_get_square_brackets():
df = rc.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6], 'c': [7, 8, 9], 'd': [10, 11, 12]}, columns=['a', 'b', 'c', 'd'],
sort=False)
# df['b'] -- get column
assert_frame_equal(df['b'], rc.DataFrame({'b': [4, 5, 6]}, sort=False))
# df[['a', 'b', c']] -- get columns
assert_frame_equal(df[['a', 'b', 'c']], rc.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6], 'c': [7, 8, 9]},
columns=['a', 'b', 'c'], sort=False))
assert_frame_equal(df[['c', 'a']], rc.DataFrame({'c': [7, 8, 9], 'a': [1, 2, 3]}, columns=['c', 'a'], sort=False))
# df[1, 'd'] -- get cell at index = 5, column = 'b'
assert df[1, 'd'] == 11
# df[[0, 2]] -- get indexes = [0, 2] all columns
assert_frame_equal(df[[0, 2], df.columns],
rc.DataFrame({'a': [1, 3], 'b': [4, 6], 'c': [7, 9], 'd': [10, 12]},
columns=['a', 'b', 'c', 'd'], index=[0, 2], sort=False))
assert_frame_equal(df[[2, 1], df.columns],
rc.DataFrame({'a': [3, 2], 'b': [6, 5], 'c': [9, 8], 'd': [12, 11]},
columns=['a', 'b', 'c', 'd'], index=[2, 1], sort=False))
# df[[0, 2], 'c'] -- get indexes = [4, 5], column = 'b'
assert_frame_equal(df[[0, 2], 'c'], rc.DataFrame({'c': [7, 9]}, index=[0, 2], sort=False))
assert_frame_equal(df[[2, 0], 'c'], rc.DataFrame({'c': [9, 7]}, index=[2, 0], sort=False))
# df[[1, 2], ['a', 'd']] -- get indexes = [4, 5], columns = ['a', 'b']
assert_frame_equal(df[[1, 2], ['a', 'd']], rc.DataFrame({'a': [2, 3], 'd': [11, 12]}, columns=['a', 'd'],
index=[1, 2], sort=False))
assert_frame_equal(df[[2, 0], ['d', 'a']], rc.DataFrame({'d': [12, 10], 'a': [3, 1]}, columns=['d', 'a'],
index=[2, 0], sort=False))
def test_get_square_brackets_sorted():
df = rc.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6], 'c': [7, 8, 9], 'd': [10, 11, 12]}, columns=['a', 'b', 'c', 'd'],
sort=True)
# df['b'] -- get column
assert_frame_equal(df['b'], rc.DataFrame({'b': [4, 5, 6]}, sort=True))
# df[['a', 'b', c']] -- get columns
assert_frame_equal(df[['a', 'b', 'c']], rc.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6], 'c': [7, 8, 9]},
columns=['a', 'b', 'c'], sort=True))
assert_frame_equal(df[['c', 'a']], rc.DataFrame({'c': [7, 8, 9], 'a': [1, 2, 3]}, columns=['c', 'a'], sort=True))
# df[1, 'd'] -- get cell at index = 5, column = 'b'
assert df[1, 'd'] == 11
# df[[0, 2]] -- get indexes = [0, 2] all columns
assert_frame_equal(df[[0, 2], df.columns],
rc.DataFrame({'a': [1, 3], 'b': [4, 6], 'c': [7, 9], 'd': [10, 12]},
columns=['a', 'b', 'c', 'd'], index=[0, 2], sort=True))
assert_frame_equal(df[[2, 1], df.columns],
rc.DataFrame({'a': [2, 3], 'b': [5, 6], 'c': [8, 9], 'd': [11, 12]},
columns=['a', 'b', 'c', 'd'], index=[1, 2], sort=True))
# df[[0, 2], 'c'] -- get indexes = [4, 5], column = 'b'
assert_frame_equal(df[[0, 2], 'c'], rc.DataFrame({'c': [7, 9]}, index=[0, 2], sort=True))
assert_frame_equal(df[[2, 0], 'c'], rc.DataFrame({'c': [9, 7]}, index=[2, 0], sort=True))
# df[[1, 2], ['a', 'd']] -- get indexes = [4, 5], columns = ['a', 'b']
assert_frame_equal(df[[1, 2], ['a', 'd']], rc.DataFrame({'a': [2, 3], 'd': [11, 12]}, columns=['a', 'd'],
index=[1, 2], sort=True))
assert_frame_equal(df[[2, 0], ['d', 'a']], rc.DataFrame({'d': [10, 12], 'a': [1, 3]}, columns=['d', 'a'],
index=[0, 2], sort=True))
def test_get_slicer():
df = rc.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6], 'c': [7, 8, 9], 'd': [10, 11, 12]}, columns=['a', 'b', 'c', 'd'],
sort=False)
# df[1:2] -- get slice from index 1 to 2, all columns
assert_frame_equal(df[1:2],
rc.DataFrame({'a': [2, 3], 'b': [5, 6], 'c': [8, 9], 'd': [11, 12]},
columns=['a', 'b', 'c', 'd'], index=[1, 2], sort=False))
# df[0:1, ['c', 'd']] -- get slice from index 0 to 1, columns ['c', 'd']
assert_frame_equal(df[0:1, ['c', 'd']], rc.DataFrame({'c': [7, 8], 'd': [10, 11]},
columns=['c', 'd'], index=[0, 1], sort=False))
assert_frame_equal(df[0:1, ['d', 'c']], rc.DataFrame({'d': [10, 11], 'c': [7, 8]},
columns=['d', 'c'], index=[0, 1], sort=False))
# df[1:1, 'c'] -- get slice 1 to 1 and column 'c'
assert_frame_equal(df[1:1, 'c'], rc.DataFrame({'c': [8]}, index=[1], sort=False))
# test indexes not in the range
with pytest.raises(IndexError):
_ = df[4:5, 'c']
with pytest.raises(IndexError):
_ = df[0:8, 'c']
with pytest.raises(IndexError):
_ = df[2:1, 'c']
def test_get_slicer_sorted():
df = rc.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6], 'c': [7, 8, 9], 'd': [10, 11, 12]}, columns=['a', 'b', 'c', 'd'],
sort=True)
# df[1:2] -- get slice from index 1 to 2, all columns
assert_frame_equal(df[1:2],
rc.DataFrame({'a': [2, 3], 'b': [5, 6], 'c': [8, 9], 'd': [11, 12]},
columns=['a', 'b', 'c', 'd'], index=[1, 2], sort=True))
# df[0:1, ['c', 'd']] -- get slice from index 0 to 1, columns ['c', 'd']
assert_frame_equal(df[0:1, ['c', 'd']], rc.DataFrame({'c': [7, 8], 'd': [10, 11]},
columns=['c', 'd'], index=[0, 1], sort=True))
assert_frame_equal(df[0:1, ['d', 'c']], rc.DataFrame({'d': [10, 11], 'c': [7, 8]},
columns=['d', 'c'], index=[0, 1], sort=True))
# df[1:1, 'c'] -- get slice 1 to 1 and column 'c'
assert_frame_equal(df[1:1, 'c'], rc.DataFrame({'c': [8]}, index=[1], sort=True))
# test indexes not in the range
assert_frame_equal(df[4:5], rc.DataFrame(columns=['a', 'b', 'c', 'd'], sort=True))
assert_frame_equal(df[2:1], rc.DataFrame(columns=['a', 'b', 'c', 'd'], sort=True))
assert_frame_equal(df[0:8], df)
assert_frame_equal(df[1.5:3.5], df.get_slice(1.5, 3.5))
def test_head():
df = rc.DataFrame({1: [0, 1, 2], 2: [3, 4, 5]}, columns=[1, 2], sort=False)
assert_frame_equal(df.head(0), rc.DataFrame(columns=[1, 2], sort=False))
assert_frame_equal(df.head(1), rc.DataFrame({1: [0], 2: [3]}, columns=[1, 2], sort=False))
assert_frame_equal(df.head(2), rc.DataFrame({1: [0, 1], 2: [3, 4]}, columns=[1, 2], sort=False))
assert_frame_equal(df.head(3), rc.DataFrame({1: [0, 1, 2], 2: [3, 4, 5]}, columns=[1, 2], sort=False))
assert_frame_equal(df.head(999), rc.DataFrame({1: [0, 1, 2], 2: [3, 4, 5]}, columns=[1, 2], sort=False))
def test_tail():
df = rc.DataFrame({1: [0, 1, 2], 2: [3, 4, 5]}, columns=[1, 2], sort=False)
assert_frame_equal(df.tail(0), rc.DataFrame(columns=[1, 2], sort=False))
assert_frame_equal(df.tail(1), rc.DataFrame({1: [2], 2: [5]}, columns=[1, 2], index=[2], sort=False))
assert_frame_equal(df.tail(2), rc.DataFrame({1: [1, 2], 2: [4, 5]}, columns=[1, 2], index=[1, 2], sort=False))
assert_frame_equal(df.tail(3), rc.DataFrame({1: [0, 1, 2], 2: [3, 4, 5]}, columns=[1, 2], sort=False))
assert_frame_equal(df.tail(999), rc.DataFrame({1: [0, 1, 2], 2: [3, 4, 5]}, columns=[1, 2], sort=False))
| 43.072106
| 120
| 0.515573
| 3,571
| 22,699
| 3.177261
| 0.033324
| 0.04495
| 0.124097
| 0.104707
| 0.925701
| 0.884541
| 0.856954
| 0.841266
| 0.822669
| 0.796757
| 0
| 0.066713
| 0.241905
| 22,699
| 526
| 121
| 43.153992
| 0.592631
| 0.102648
| 0
| 0.555556
| 0
| 0
| 0.033606
| 0
| 0
| 0
| 0
| 0
| 0.428105
| 1
| 0.058824
| false
| 0
| 0.009804
| 0
| 0.068627
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
988997daa383528a1ba48fa105dcae37fe5e576e
| 111
|
py
|
Python
|
tests/utils/__init__.py
|
ComplexCity/policosm
|
548d4d694df49603f91cd45af7fe50ced79aea68
|
[
"MIT"
] | 6
|
2017-06-05T07:30:46.000Z
|
2022-03-07T00:47:22.000Z
|
tests/utils/__init__.py
|
ComplexCity/policosm
|
548d4d694df49603f91cd45af7fe50ced79aea68
|
[
"MIT"
] | 1
|
2017-12-14T05:40:42.000Z
|
2017-12-14T05:40:42.000Z
|
tests/utils/__init__.py
|
ComplexCity/policosm
|
548d4d694df49603f91cd45af7fe50ced79aea68
|
[
"MIT"
] | 1
|
2020-10-22T19:18:30.000Z
|
2020-10-22T19:18:30.000Z
|
from . import test_access
from . import test_bicycles
from . import test_levels
from . import test_projections
| 22.2
| 30
| 0.81982
| 16
| 111
| 5.4375
| 0.4375
| 0.45977
| 0.643678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144144
| 111
| 4
| 31
| 27.75
| 0.915789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
98a8f23e873a997d5576bc773e4a7d6c8b8ca0ae
| 41,841
|
py
|
Python
|
dace/frontend/common/distr.py
|
meshtag/dace
|
e6751ee6a4f6356b47b93065d43cefb3fd54ebaa
|
[
"BSD-3-Clause"
] | 1
|
2022-03-11T13:36:34.000Z
|
2022-03-11T13:36:34.000Z
|
dace/frontend/common/distr.py
|
meshtag/dace
|
e6751ee6a4f6356b47b93065d43cefb3fd54ebaa
|
[
"BSD-3-Clause"
] | null | null | null |
dace/frontend/common/distr.py
|
meshtag/dace
|
e6751ee6a4f6356b47b93065d43cefb3fd54ebaa
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
from numbers import Integral, Number
from typing import Sequence, Union
import dace
from dace import dtypes, symbolic
from dace.frontend.common import op_repository as oprepo
from dace.memlet import Memlet
from dace.sdfg import SDFG, SDFGState
import sympy as sp
from dace.frontend.python.replacements import _define_local_scalar
ShapeType = Sequence[Union[Integral, str, symbolic.symbol, symbolic.SymExpr, symbolic.sympy.Basic]]
RankType = Union[Integral, str, symbolic.symbol, symbolic.SymExpr, symbolic.sympy.Basic]
@oprepo.replaces('dace.comm.Bcast')
def _bcast(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
buffer: str,
root: Union[str, sp.Expr, Number] = 0,
grid: str = None):
from dace.libraries.mpi.nodes.bcast import Bcast
libnode = Bcast('_Bcast_', grid)
desc = sdfg.arrays[buffer]
in_buffer = state.add_read(buffer)
out_buffer = state.add_write(buffer)
if isinstance(root, str) and root in sdfg.arrays.keys():
root_node = state.add_read(root)
else:
storage = desc.storage
root_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
root_node = state.add_access(root_name)
root_tasklet = state.add_tasklet('_set_root_', {}, {'__out'}, '__out = {}'.format(root))
state.add_edge(root_tasklet, '__out', root_node, None, Memlet.simple(root_name, '0'))
state.add_edge(in_buffer, None, libnode, '_inbuffer', Memlet.from_array(buffer, desc))
state.add_edge(root_node, None, libnode, '_root', Memlet.simple(root_node.data, '0'))
state.add_edge(libnode, '_outbuffer', out_buffer, None, Memlet.from_array(buffer, desc))
return None
@oprepo.replaces('dace.comm.Reduce')
def _Reduce(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
buffer: str,
op: str,
root: Union[str, sp.Expr, Number] = 0,
grid: str = None):
from dace.libraries.mpi.nodes.reduce import Reduce
libnode = Reduce('_Reduce_', op, grid)
desc = sdfg.arrays[buffer]
in_buffer = state.add_read(buffer)
out_buffer = state.add_write(buffer)
if isinstance(root, str) and root in sdfg.arrays.keys():
root_node = state.add_read(root)
else:
storage = desc.storage
root_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
root_node = state.add_access(root_name)
root_tasklet = state.add_tasklet('_set_root_', {}, {'__out'}, '__out = {}'.format(root))
state.add_edge(root_tasklet, '__out', root_node, None, Memlet.simple(root_name, '0'))
state.add_edge(in_buffer, None, libnode, '_inbuffer', Memlet.from_array(buffer, desc))
state.add_edge(root_node, None, libnode, '_root', Memlet.simple(root_node.data, '0'))
state.add_edge(libnode, '_outbuffer', out_buffer, None, Memlet.from_array(buffer, desc))
return None
@oprepo.replaces('dace.comm.Allreduce')
def _Allreduce(pv: 'ProgramVisitor', sdfg: SDFG, state: SDFGState, buffer: str, op: str, grid: str = None):
from dace.libraries.mpi.nodes.allreduce import Allreduce
libnode = Allreduce('_Allreduce_', op, grid)
desc = sdfg.arrays[buffer]
in_buffer = state.add_read(buffer)
out_buffer = state.add_write(buffer)
state.add_edge(in_buffer, None, libnode, '_inbuffer', Memlet.from_array(buffer, desc))
state.add_edge(libnode, '_outbuffer', out_buffer, None, Memlet.from_array(buffer, desc))
return None
@oprepo.replaces('dace.comm.Scatter')
def _scatter(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
in_buffer: str,
out_buffer: str,
root: Union[str, sp.Expr, Number] = 0):
from dace.libraries.mpi.nodes.scatter import Scatter
libnode = Scatter('_Scatter_')
in_desc = sdfg.arrays[in_buffer]
out_desc = sdfg.arrays[out_buffer]
in_node = state.add_read(in_buffer)
out_node = state.add_write(out_buffer)
if isinstance(root, str) and root in sdfg.arrays.keys():
root_node = state.add_read(root)
else:
storage = in_desc.storage
root_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
root_node = state.add_access(root_name)
root_tasklet = state.add_tasklet('_set_root_', {}, {'__out'}, '__out = {}'.format(root))
state.add_edge(root_tasklet, '__out', root_node, None, Memlet.simple(root_name, '0'))
state.add_edge(in_node, None, libnode, '_inbuffer', Memlet.from_array(in_buffer, in_desc))
state.add_edge(root_node, None, libnode, '_root', Memlet.simple(root_node.data, '0'))
state.add_edge(libnode, '_outbuffer', out_node, None, Memlet.from_array(out_buffer, out_desc))
return None
@oprepo.replaces('dace.comm.Gather')
def _gather(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
in_buffer: str,
out_buffer: str,
root: Union[str, sp.Expr, Number] = 0):
from dace.libraries.mpi.nodes.gather import Gather
libnode = Gather('_Gather_')
in_desc = sdfg.arrays[in_buffer]
out_desc = sdfg.arrays[out_buffer]
in_node = state.add_read(in_buffer)
out_node = state.add_write(out_buffer)
if isinstance(root, str) and root in sdfg.arrays.keys():
root_node = state.add_read(root)
else:
storage = in_desc.storage
root_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
root_node = state.add_access(root_name)
root_tasklet = state.add_tasklet('_set_root_', {}, {'__out'}, '__out = {}'.format(root))
state.add_edge(root_tasklet, '__out', root_node, None, Memlet.simple(root_name, '0'))
state.add_edge(in_node, None, libnode, '_inbuffer', Memlet.from_array(in_buffer, in_desc))
state.add_edge(root_node, None, libnode, '_root', Memlet.simple(root_node.data, '0'))
state.add_edge(libnode, '_outbuffer', out_node, None, Memlet.from_array(out_buffer, out_desc))
return None
@oprepo.replaces('dace.comm.Send')
def _send(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
buffer: str,
dst: Union[str, sp.Expr, Number],
tag: Union[str, sp.Expr, Number] = 0):
from dace.libraries.mpi.nodes.send import Send
libnode = Send('_Send_')
buf_range = None
if isinstance(buffer, tuple):
buf_name, buf_range = buffer
else:
buf_name = buffer
desc = sdfg.arrays[buf_name]
conn = libnode.in_connectors
conn = {c: (dtypes.pointer(desc.dtype) if c == '_buffer' else t) for c, t in conn.items()}
libnode.in_connectors = conn
buf_node = state.add_write(buf_name)
dst_range = None
if isinstance(dst, tuple):
dst_name, dst_range = dst
dst_node = state.add_read(dst_name)
elif isinstance(dst, str) and dst in sdfg.arrays.keys():
dst_name = dst
dst_node = state.add_read(dst_name)
else:
storage = desc.storage
dst_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
dst_node = state.add_access(dst_name)
dst_tasklet = state.add_tasklet('_set_dst_', {}, {'__out'}, '__out = {}'.format(dst))
state.add_edge(dst_tasklet, '__out', dst_node, None, Memlet.simple(dst_name, '0'))
tag_range = None
if isinstance(tag, tuple):
tag_name, tag_range = tag
tag_node = state.add_read(tag_name)
if isinstance(tag, str) and tag in sdfg.arrays.keys():
tag_name = tag
tag_node = state.add_read(tag)
else:
storage = desc.storage
tag_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
tag_node = state.add_access(tag_name)
tag_tasklet = state.add_tasklet('_set_tag_', {}, {'__out'}, '__out = {}'.format(tag))
state.add_edge(tag_tasklet, '__out', tag_node, None, Memlet.simple(tag_name, '0'))
if buf_range:
buf_mem = Memlet.simple(buf_name, buf_range)
else:
buf_mem = Memlet.from_array(buf_name, desc)
if dst_range:
dst_mem = Memlet.simple(dst_name, dst_range)
else:
dst_mem = Memlet.simple(dst_name, '0')
if tag_range:
tag_mem = Memlet.simple(tag_name, tag_range)
else:
tag_mem = Memlet.simple(tag_name, '0')
state.add_edge(buf_node, None, libnode, '_buffer', buf_mem)
state.add_edge(dst_node, None, libnode, '_dest', dst_mem)
state.add_edge(tag_node, None, libnode, '_tag', tag_mem)
return None
@oprepo.replaces('dace.comm.Isend')
def _isend(pv: 'ProgramVisitor', sdfg: SDFG, state: SDFGState, buffer: str, dst: Union[str, sp.Expr, Number],
tag: Union[str, sp.Expr, Number], request: str):
from dace.libraries.mpi.nodes.isend import Isend
libnode = Isend('_Isend_')
buf_range = None
if isinstance(buffer, tuple):
buf_name, buf_range = buffer
else:
buf_name = buffer
desc = sdfg.arrays[buf_name]
buf_node = state.add_read(buf_name)
req_range = None
if isinstance(request, tuple):
req_name, req_range = request
else:
req_name = request
req_desc = sdfg.arrays[req_name]
req_node = state.add_write(req_name)
iconn = libnode.in_connectors
iconn = {c: (dtypes.pointer(desc.dtype) if c == '_buffer' else t) for c, t in iconn.items()}
libnode.in_connectors = iconn
oconn = libnode.out_connectors
oconn = {c: (dtypes.pointer(req_desc.dtype) if c == '_request' else t) for c, t in oconn.items()}
libnode.out_connectors = oconn
dst_range = None
if isinstance(dst, tuple):
dst_name, dst_range = dst
dst_node = state.add_read(dst_name)
elif isinstance(dst, str) and dst in sdfg.arrays.keys():
dst_name = dst
dst_node = state.add_read(dst_name)
else:
storage = desc.storage
dst_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
dst_node = state.add_access(dst_name)
dst_tasklet = state.add_tasklet('_set_dst_', {}, {'__out'}, '__out = {}'.format(dst))
state.add_edge(dst_tasklet, '__out', dst_node, None, Memlet.simple(dst_name, '0'))
tag_range = None
if isinstance(tag, tuple):
tag_name, tag_range = tag
tag_node = state.add_read(tag_name)
if isinstance(tag, str) and tag in sdfg.arrays.keys():
tag_name = tag
tag_node = state.add_read(tag)
else:
storage = desc.storage
tag_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
tag_node = state.add_access(tag_name)
tag_tasklet = state.add_tasklet('_set_tag_', {}, {'__out'}, '__out = {}'.format(tag))
state.add_edge(tag_tasklet, '__out', tag_node, None, Memlet.simple(tag_name, '0'))
if buf_range:
buf_mem = Memlet.simple(buf_name, buf_range)
else:
buf_mem = Memlet.from_array(buf_name, desc)
if req_range:
req_mem = Memlet.simple(req_name, req_range)
else:
req_mem = Memlet.from_array(req_name, req_desc)
if dst_range:
dst_mem = Memlet.simple(dst_name, dst_range)
else:
dst_mem = Memlet.simple(dst_name, '0')
if tag_range:
tag_mem = Memlet.simple(tag_name, tag_range)
else:
tag_mem = Memlet.simple(tag_name, '0')
state.add_edge(buf_node, None, libnode, '_buffer', buf_mem)
state.add_edge(dst_node, None, libnode, '_dest', dst_mem)
state.add_edge(tag_node, None, libnode, '_tag', tag_mem)
state.add_edge(libnode, '_request', req_node, None, req_mem)
return None
@oprepo.replaces('dace.comm.Recv')
def _recv(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
buffer: str,
src: Union[str, sp.Expr, Number],
tag: Union[str, sp.Expr, Number] = 0):
from dace.libraries.mpi.nodes.recv import Recv
libnode = Recv('_Recv_')
buf_range = None
if isinstance(buffer, tuple):
buf_name, buf_range = buffer
else:
buf_name = buffer
desc = sdfg.arrays[buf_name]
conn = libnode.out_connectors
conn = {c: (dtypes.pointer(desc.dtype) if c == '_buffer' else t) for c, t in conn.items()}
libnode.out_connectors = conn
buf_node = state.add_write(buf_name)
src_range = None
if isinstance(src, tuple):
src_name, src_range = src
src_node = state.add_read(src_name)
elif isinstance(src, str) and src in sdfg.arrays.keys():
src_name = src
src_node = state.add_read(src_name)
else:
storage = desc.storage
src_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
src_node = state.add_access(src_name)
src_tasklet = state.add_tasklet('_set_src_', {}, {'__out'}, '__out = {}'.format(src))
state.add_edge(src_tasklet, '__out', src_node, None, Memlet.simple(src_name, '0'))
tag_range = None
if isinstance(tag, tuple):
tag_name, tag_range = tag
tag_node = state.add_read(tag_name)
if isinstance(tag, str) and tag in sdfg.arrays.keys():
tag_name = tag
tag_node = state.add_read(tag)
else:
storage = desc.storage
tag_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
tag_node = state.add_access(tag_name)
tag_tasklet = state.add_tasklet('_set_tag_', {}, {'__out'}, '__out = {}'.format(tag))
state.add_edge(tag_tasklet, '__out', tag_node, None, Memlet.simple(tag_name, '0'))
if buf_range:
buf_mem = Memlet.simple(buf_name, buf_range)
else:
buf_mem = Memlet.from_array(buf_name, desc)
if src_range:
src_mem = Memlet.simple(src_name, src_range)
else:
src_mem = Memlet.simple(src_name, '0')
if tag_range:
tag_mem = Memlet.simple(tag_name, tag_range)
else:
tag_mem = Memlet.simple(tag_name, '0')
state.add_edge(libnode, '_buffer', buf_node, None, buf_mem)
state.add_edge(src_node, None, libnode, '_src', src_mem)
state.add_edge(tag_node, None, libnode, '_tag', tag_mem)
return None
@oprepo.replaces('dace.comm.Irecv')
def _irecv(pv: 'ProgramVisitor', sdfg: SDFG, state: SDFGState, buffer: str, src: Union[str, sp.Expr, Number],
tag: Union[str, sp.Expr, Number], request: str):
from dace.libraries.mpi.nodes.irecv import Irecv
libnode = Irecv('_Irecv_')
buf_range = None
if isinstance(buffer, tuple):
buf_name, buf_range = buffer
else:
buf_name = buffer
desc = sdfg.arrays[buf_name]
buf_node = state.add_read(buf_name)
req_range = None
if isinstance(request, tuple):
req_name, req_range = request
else:
req_name = request
req_desc = sdfg.arrays[req_name]
req_node = state.add_write(req_name)
conn = libnode.out_connectors
conn = {c: (dtypes.pointer(desc.dtype) if c == '_buffer' else t) for c, t in conn.items()}
conn = {c: (dtypes.pointer(req_desc.dtype) if c == '_request' else t) for c, t in conn.items()}
libnode.out_connectors = conn
src_range = None
if isinstance(src, tuple):
src_name, src_range = src
src_node = state.add_read(src_name)
elif isinstance(src, str) and src in sdfg.arrays.keys():
src_name = src
src_node = state.add_read(src_name)
else:
storage = desc.storage
src_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
src_node = state.add_access(src_name)
src_tasklet = state.add_tasklet('_set_src_', {}, {'__out'}, '__out = {}'.format(src))
state.add_edge(src_tasklet, '__out', src_node, None, Memlet.simple(src_name, '0'))
tag_range = None
if isinstance(tag, tuple):
tag_name, tag_range = tag
tag_node = state.add_read(tag_name)
if isinstance(tag, str) and tag in sdfg.arrays.keys():
tag_name = tag
tag_node = state.add_read(tag)
else:
storage = desc.storage
tag_name = _define_local_scalar(pv, sdfg, state, dace.int32, storage)
tag_node = state.add_access(tag_name)
tag_tasklet = state.add_tasklet('_set_tag_', {}, {'__out'}, '__out = {}'.format(tag))
state.add_edge(tag_tasklet, '__out', tag_node, None, Memlet.simple(tag_name, '0'))
if buf_range:
buf_mem = Memlet.simple(buf_name, buf_range)
else:
buf_mem = Memlet.from_array(buf_name, desc)
if req_range:
req_mem = Memlet.simple(req_name, req_range)
else:
req_mem = Memlet.from_array(req_name, req_desc)
if src_range:
src_mem = Memlet.simple(src_name, src_range)
else:
src_mem = Memlet.simple(src_name, '0')
if tag_range:
tag_mem = Memlet.simple(tag_name, tag_range)
else:
tag_mem = Memlet.simple(tag_name, '0')
state.add_edge(libnode, '_buffer', buf_node, None, buf_mem)
state.add_edge(src_node, None, libnode, '_src', src_mem)
state.add_edge(tag_node, None, libnode, '_tag', tag_mem)
state.add_edge(libnode, '_request', req_node, None, req_mem)
return None
@oprepo.replaces('dace.comm.Wait')
def _wait(pv: 'ProgramVisitor', sdfg: SDFG, state: SDFGState, request: str):
from dace.libraries.mpi.nodes.wait import Wait
libnode = Wait('_Wait_')
req_range = None
if isinstance(request, tuple):
req_name, req_range = request
else:
req_name = request
desc = sdfg.arrays[req_name]
req_node = state.add_access(req_name)
src = sdfg.add_temp_transient([1], dtypes.int32)
src_node = state.add_write(src[0])
tag = sdfg.add_temp_transient([1], dtypes.int32)
tag_node = state.add_write(tag[0])
if req_range:
req_mem = Memlet.simple(req_name, req_range)
else:
req_mem = Memlet.from_array(req_name, desc)
state.add_edge(req_node, None, libnode, '_request', req_mem)
state.add_edge(libnode, '_stat_source', src_node, None, Memlet.from_array(*src))
state.add_edge(libnode, '_stat_tag', tag_node, None, Memlet.from_array(*tag))
return None
@oprepo.replaces('dace.comm.Waitall')
def _wait(pv: 'ProgramVisitor', sdfg: SDFG, state: SDFGState, request: str):
from dace.libraries.mpi.nodes.wait import Waitall
libnode = Waitall('_Waitall_')
req_range = None
if isinstance(request, tuple):
req_name, req_range = request
else:
req_name = request
desc = sdfg.arrays[req_name]
req_node = state.add_access(req_name)
if req_range:
req_mem = Memlet.simple(req_name, req_range)
else:
req_mem = Memlet.from_array(req_name, desc)
state.add_edge(req_node, None, libnode, '_request', req_mem)
return None
@oprepo.replaces('dace.comm.Cart_create')
def _cart_create(pv: 'ProgramVisitor', sdfg: SDFG, state: SDFGState, dims: ShapeType):
""" Creates a process-grid and adds it to the DaCe program. The process-grid is implemented with [MPI_Cart_create](https://www.mpich.org/static/docs/latest/www3/MPI_Cart_create.html).
:param dims: Shape of the process-grid (see `dims` parameter of `MPI_Cart_create`), e.g., [2, 3, 3].
:return: Name of the new process-grid descriptor.
"""
pgrid_name = sdfg.add_pgrid(dims)
# Dummy tasklet adds MPI variables to the program's state.
from dace.libraries.mpi import Dummy
tasklet = Dummy(pgrid_name, [
f'MPI_Comm {pgrid_name}_comm;',
f'MPI_Group {pgrid_name}_group;',
f'int {pgrid_name}_coords[{len(dims)}];',
f'int {pgrid_name}_dims[{len(dims)}];',
f'int {pgrid_name}_rank;',
f'int {pgrid_name}_size;',
f'bool {pgrid_name}_valid;',
])
state.add_node(tasklet)
# Pseudo-writing to a dummy variable to avoid removal of Dummy node by transformations.
_, scal = sdfg.add_scalar(pgrid_name, dace.int32, transient=True)
wnode = state.add_write(pgrid_name)
state.add_edge(tasklet, '__out', wnode, None, Memlet.from_array(pgrid_name, scal))
return pgrid_name
@oprepo.replaces('dace.comm.Cart_sub')
def _cart_sub(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
parent_grid: str,
color: Sequence[Union[Integral, bool]],
exact_grid: RankType = None):
""" Partitions the `parent_grid` to lower-dimensional sub-grids and adds them to the DaCe program.
The sub-grids are implemented with [MPI_Cart_sub](https://www.mpich.org/static/docs/latest/www3/MPI_Cart_sub.html).
:param parent_grid: Parent process-grid (similar to the `comm` parameter of `MPI_Cart_sub`).
:param color: The i-th entry specifies whether the i-th dimension is kept in the sub-grid or is dropped (see `remain_dims` input of `MPI_Cart_sub`).
:param exact_grid: [DEVELOPER] If set then, out of all the sub-grids created, only the one that contains the rank with id `exact_grid` will be utilized for collective communication.
:return: Name of the new sub-grid descriptor.
"""
pgrid_name = sdfg.add_pgrid(parent_grid=parent_grid, color=color, exact_grid=exact_grid)
# Count sub-grid dimensions.
pgrid_ndims = sum([bool(c) for c in color])
# Dummy tasklet adds MPI variables to the program's state.
from dace.libraries.mpi import Dummy
tasklet = Dummy(pgrid_name, [
f'MPI_Comm {pgrid_name}_comm;',
f'MPI_Group {pgrid_name}_group;',
f'int {pgrid_name}_coords[{pgrid_ndims}];',
f'int {pgrid_name}_dims[{pgrid_ndims}];',
f'int {pgrid_name}_rank;',
f'int {pgrid_name}_size;',
f'bool {pgrid_name}_valid;',
])
state.add_node(tasklet)
# Pseudo-writing to a dummy variable to avoid removal of Dummy node by transformations.
_, scal = sdfg.add_scalar(pgrid_name, dace.int32, transient=True)
wnode = state.add_write(pgrid_name)
state.add_edge(tasklet, '__out', wnode, None, Memlet.from_array(pgrid_name, scal))
return pgrid_name
@oprepo.replaces('dace.comm.Subarray')
def _subarray(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
array: Union[str, ShapeType],
subarray: Union[str, ShapeType],
dtype: dtypes.typeclass = None,
process_grid: str = None,
correspondence: Sequence[Integral] = None):
""" Adds a sub-array descriptor to the DaCe Program.
Sub-arrays are implemented (when `process_grid` is set) with [MPI_Type_create_subarray](https://www.mpich.org/static/docs/v3.2/www3/MPI_Type_create_subarray.html).
:param array: Either the name of an Array descriptor or the shape of the array (similar to the `array_of_sizes` parameter of `MPI_Type_create_subarray`).
:param subarray: Either the name of an Array descriptor or the sub-shape of the (sub-)array (similar to the `array_of_subsizes` parameter of `MPI_Type_create_subarray`).
:param dtype: Datatype of the array/sub-array (similar to the `oldtype` parameter of `MPI_Type_create_subarray`).
:process_grid: Name of the process-grid for collective scatter/gather operations.
:param correspondence: Matching of the array/sub-array's dimensions to the process-grid's dimensions.
:return: Name of the new sub-array descriptor.
"""
# Get dtype, shape, and subshape
if isinstance(array, str):
shape = sdfg.arrays[array].shape
arr_dtype = sdfg.arrays[array].dtype
else:
shape = array
arr_dtype = None
if isinstance(subarray, str):
subshape = sdfg.arrays[subarray].shape
sub_dtype = sdfg.arrays[subarray].dtype
else:
subshape = subarray
sub_dtype = None
dtype = dtype or arr_dtype or sub_dtype
subarray_name = sdfg.add_subarray(dtype, shape, subshape, process_grid, correspondence)
# Generate subgraph only if process-grid is set, i.e., the sub-array will be used for collective scatter/gather ops.
if process_grid:
# Dummy tasklet adds MPI variables to the program's state.
from dace.libraries.mpi import Dummy
tasklet = Dummy(
subarray_name,
[f'MPI_Datatype {subarray_name};', f'int* {subarray_name}_counts;', f'int* {subarray_name}_displs;'])
state.add_node(tasklet)
# Pseudo-writing to a dummy variable to avoid removal of Dummy node by transformations.
_, scal = sdfg.add_scalar(subarray_name, dace.int32, transient=True)
wnode = state.add_write(subarray_name)
state.add_edge(tasklet, '__out', wnode, None, Memlet.from_array(subarray_name, scal))
return subarray_name
@oprepo.replaces('dace.comm.BlockScatter')
def _block_scatter(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
in_buffer: str,
out_buffer: str,
scatter_grid: str,
bcast_grid: str = None,
correspondence: Sequence[Integral] = None):
""" Block-scatters an Array using process-grids, sub-arrays, and the BlockScatter library node.
This method currently does not support Array slices and imperfect tiling.
:param in_buffer: Name of the (global) Array descriptor.
:param out_buffer: Name of the (local) Array descriptor.
:param scatter_grid: Name of the sub-grid used for scattering the Array (replication group leaders).
:param bcast_grid: Name of the sub-grid used for broadcasting the Array (replication groups).
:param correspondence: Matching of the array/sub-array's dimensions to the process-grid's dimensions.
:return: Name of the new sub-array descriptor.
"""
in_desc = sdfg.arrays[in_buffer]
out_desc = sdfg.arrays[out_buffer]
if in_desc.dtype != out_desc.dtype:
raise ValueError("Input/output buffer datatypes must match!")
subarray_name = _subarray(pv,
sdfg,
state,
in_buffer,
out_buffer,
process_grid=scatter_grid,
correspondence=correspondence)
from dace.libraries.mpi import BlockScatter
libnode = BlockScatter('_BlockScatter_', subarray_name, scatter_grid, bcast_grid)
inbuf_name = in_buffer
in_desc = sdfg.arrays[inbuf_name]
inbuf_node = state.add_read(inbuf_name)
inbuf_mem = Memlet.from_array(inbuf_name, in_desc)
outbuf_name = out_buffer
out_desc = sdfg.arrays[outbuf_name]
outbuf_node = state.add_write(outbuf_name)
outbuf_mem = Memlet.from_array(outbuf_name, out_desc)
state.add_edge(inbuf_node, None, libnode, '_inp_buffer', inbuf_mem)
state.add_edge(libnode, '_out_buffer', outbuf_node, None, outbuf_mem)
return subarray_name
@oprepo.replaces('dace.comm.BlockGather')
def _block_gather(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
in_buffer: str,
out_buffer: str,
gather_grid: str,
reduce_grid: str = None,
correspondence: Sequence[Integral] = None):
""" Block-gathers an Array using process-grids, sub-arrays, and the BlockGather library node.
This method currently does not support Array slices and imperfect tiling.
:param in_buffer: Name of the (local) Array descriptor.
:param out_buffer: Name of the (global) Array descriptor.
:param gather_grid: Name of the sub-grid used for gathering the Array (reduction group leaders).
:param reduce_grid: Name of the sub-grid used for broadcasting the Array (reduction groups).
:param correspondence: Matching of the array/sub-array's dimensions to the process-grid's dimensions.
:return: Name of the new sub-array descriptor.
"""
in_desc = sdfg.arrays[in_buffer]
out_desc = sdfg.arrays[out_buffer]
if in_desc.dtype != out_desc.dtype:
raise ValueError("Input/output buffer datatypes must match!")
subarray_name = _subarray(pv,
sdfg,
state,
out_buffer,
in_buffer,
process_grid=gather_grid,
correspondence=correspondence)
from dace.libraries.mpi import BlockGather
libnode = BlockGather('_BlockGather_', subarray_name, gather_grid, reduce_grid)
inbuf_name = in_buffer
in_desc = sdfg.arrays[inbuf_name]
inbuf_node = state.add_read(inbuf_name)
inbuf_mem = Memlet.from_array(inbuf_name, in_desc)
outbuf_name = out_buffer
out_desc = sdfg.arrays[outbuf_name]
outbuf_node = state.add_write(outbuf_name)
outbuf_mem = Memlet.from_array(outbuf_name, out_desc)
state.add_edge(inbuf_node, None, libnode, '_inp_buffer', inbuf_mem)
state.add_edge(libnode, '_out_buffer', outbuf_node, None, outbuf_mem)
return subarray_name
@oprepo.replaces('dace.comm.Redistribute')
def _redistribute(pv: 'ProgramVisitor', sdfg: SDFG, state: SDFGState, in_buffer: str, in_subarray: str, out_buffer: str,
out_subarray: str):
""" Redistributes an Array using process-grids, sub-arrays, and the Redistribute library node.
:param in_buffer: Name of the (local) input Array descriptor.
:param in_subarray: Input sub-array descriptor.
:param out_buffer: Name of the (local) output Array descriptor.
:param out_subarray: Output sub-array descriptor.
:return: Name of the new redistribution descriptor.
"""
in_desc = sdfg.arrays[in_buffer]
out_desc = sdfg.arrays[out_buffer]
rdistrarray_name = sdfg.add_rdistrarray(in_subarray, out_subarray)
from dace.libraries.mpi import Dummy, Redistribute
tasklet = Dummy(rdistrarray_name, [
f'MPI_Datatype {rdistrarray_name};', f'int {rdistrarray_name}_sends;',
f'MPI_Datatype* {rdistrarray_name}_send_types;', f'int* {rdistrarray_name}_dst_ranks;',
f'int {rdistrarray_name}_recvs;', f'MPI_Datatype* {rdistrarray_name}_recv_types;',
f'int* {rdistrarray_name}_src_ranks;', f'int {rdistrarray_name}_self_copies;',
f'int* {rdistrarray_name}_self_src;', f'int* {rdistrarray_name}_self_dst;',
f'int* {rdistrarray_name}_self_size;'
])
state.add_node(tasklet)
_, scal = sdfg.add_scalar(rdistrarray_name, dace.int32, transient=True)
wnode = state.add_write(rdistrarray_name)
state.add_edge(tasklet, '__out', wnode, None, Memlet.from_array(rdistrarray_name, scal))
libnode = Redistribute('_Redistribute_', rdistrarray_name)
inbuf_range = None
if isinstance(in_buffer, tuple):
inbuf_name, inbuf_range = in_buffer
else:
inbuf_name = in_buffer
in_desc = sdfg.arrays[inbuf_name]
inbuf_node = state.add_read(inbuf_name)
outbuf_range = None
if isinstance(out_buffer, tuple):
outbuf_name, outbuf_range = out_buffer
else:
outbuf_name = out_buffer
out_desc = sdfg.arrays[outbuf_name]
outbuf_node = state.add_write(outbuf_name)
if inbuf_range:
inbuf_mem = Memlet.simple(inbuf_name, inbuf_range)
else:
inbuf_mem = Memlet.from_array(inbuf_name, in_desc)
if outbuf_range:
outbuf_mem = Memlet.simple(outbuf_name, outbuf_range)
else:
outbuf_mem = Memlet.from_array(outbuf_name, out_desc)
state.add_edge(inbuf_node, None, libnode, '_inp_buffer', inbuf_mem)
state.add_edge(libnode, '_out_buffer', outbuf_node, None, outbuf_mem)
return rdistrarray_name
@oprepo.replaces('dace.comm.BCScatter')
def _bcscatter(pv: 'ProgramVisitor', sdfg: SDFG, state: SDFGState, in_buffer: str, out_buffer: str,
block_sizes: Union[str, Sequence[Union[sp.Expr, Number]]]):
from dace.libraries.pblas.nodes.pgeadd import BlockCyclicScatter
libnode = BlockCyclicScatter('_BCScatter_')
inbuf_range = None
if isinstance(in_buffer, tuple):
inbuf_name, inbuf_range = in_buffer
else:
inbuf_name = in_buffer
in_desc = sdfg.arrays[inbuf_name]
inbuf_node = state.add_read(inbuf_name)
bsizes_range = None
if isinstance(block_sizes, (list, tuple)):
if isinstance(block_sizes[0], str):
bsizes_name, bsizes_range = block_sizes
bsizes_desc = sdfg.arrays[bsizes_name]
bsizes_node = state.add_read(bsizes_name)
else:
bsizes_name, bsizes_desc = sdfg.add_temp_transient((len(block_sizes), ), dtype=dace.int32)
bsizes_node = state.add_access(bsizes_name)
bsizes_tasklet = state.add_tasklet(
'_set_bsizes_', {}, {'__out'},
";".join(["__out[{}] = {}".format(i, sz) for i, sz in enumerate(block_sizes)]))
state.add_edge(bsizes_tasklet, '__out', bsizes_node, None, Memlet.from_array(bsizes_name, bsizes_desc))
else:
bsizes_name = block_sizes
bsizes_desc = sdfg.arrays[bsizes_name]
bsizes_node = state.add_read(bsizes_name)
outbuf_range = None
if isinstance(out_buffer, tuple):
outbuf_name, outbuf_range = out_buffer
else:
outbuf_name = out_buffer
out_desc = sdfg.arrays[outbuf_name]
outbuf_node = state.add_write(outbuf_name)
gdesc = sdfg.add_temp_transient((9, ), dtype=dace.int32)
gdesc_node = state.add_write(gdesc[0])
ldesc = sdfg.add_temp_transient((9, ), dtype=dace.int32)
ldesc_node = state.add_write(ldesc[0])
if inbuf_range:
inbuf_mem = Memlet.simple(inbuf_name, inbuf_range)
else:
inbuf_mem = Memlet.from_array(inbuf_name, in_desc)
if bsizes_range:
bsizes_mem = Memlet.simple(bsizes_name, bsizes_range)
else:
bsizes_mem = Memlet.from_array(bsizes_name, bsizes_desc)
if outbuf_range:
outbuf_mem = Memlet.simple(outbuf_name, outbuf_range)
else:
outbuf_mem = Memlet.from_array(outbuf_name, out_desc)
gdesc_mem = Memlet.from_array(*gdesc)
ldesc_mem = Memlet.from_array(*ldesc)
state.add_edge(inbuf_node, None, libnode, '_inbuffer', inbuf_mem)
state.add_edge(bsizes_node, None, libnode, '_block_sizes', bsizes_mem)
state.add_edge(libnode, '_outbuffer', outbuf_node, None, outbuf_mem)
state.add_edge(libnode, '_gdescriptor', gdesc_node, None, gdesc_mem)
state.add_edge(libnode, '_ldescriptor', ldesc_node, None, ldesc_mem)
return [gdesc[0], ldesc[0]]
@oprepo.replaces('dace.comm.BCGather')
def _bcgather(pv: 'ProgramVisitor', sdfg: SDFG, state: SDFGState, in_buffer: str, out_buffer: str,
block_sizes: Union[str, Sequence[Union[sp.Expr, Number]]]):
from dace.libraries.pblas.nodes.pgeadd import BlockCyclicGather
libnode = BlockCyclicGather('_BCGather_')
inbuf_range = None
if isinstance(in_buffer, tuple):
inbuf_name, inbuf_range = in_buffer
else:
inbuf_name = in_buffer
in_desc = sdfg.arrays[inbuf_name]
inbuf_node = state.add_read(inbuf_name)
bsizes_range = None
if isinstance(block_sizes, (list, tuple)):
if isinstance(block_sizes[0], str):
bsizes_name, bsizes_range = block_sizes
bsizes_desc = sdfg.arrays[bsizes_name]
bsizes_node = state.add_read(bsizes_name)
else:
bsizes_name, bsizes_desc = sdfg.add_temp_transient((len(block_sizes), ), dtype=dace.int32)
bsizes_node = state.add_access(bsizes_name)
bsizes_tasklet = state.add_tasklet(
'_set_bsizes_', {}, {'__out'},
";".join(["__out[{}] = {}".format(i, sz) for i, sz in enumerate(block_sizes)]))
state.add_edge(bsizes_tasklet, '__out', bsizes_node, None, Memlet.from_array(bsizes_name, bsizes_desc))
else:
bsizes_name = block_sizes
bsizes_desc = sdfg.arrays[bsizes_name]
bsizes_node = state.add_read(bsizes_name)
outbuf_range = None
if isinstance(out_buffer, tuple):
outbuf_name, outbuf_range = out_buffer
else:
outbuf_name = out_buffer
out_desc = sdfg.arrays[outbuf_name]
outbuf_node = state.add_write(outbuf_name)
if inbuf_range:
inbuf_mem = Memlet.simple(inbuf_name, inbuf_range)
else:
inbuf_mem = Memlet.from_array(inbuf_name, in_desc)
if bsizes_range:
bsizes_mem = Memlet.simple(bsizes_name, bsizes_range)
else:
bsizes_mem = Memlet.from_array(bsizes_name, bsizes_desc)
if outbuf_range:
outbuf_mem = Memlet.simple(outbuf_name, outbuf_range)
else:
outbuf_mem = Memlet.from_array(outbuf_name, out_desc)
state.add_edge(inbuf_node, None, libnode, '_inbuffer', inbuf_mem)
state.add_edge(bsizes_node, None, libnode, '_block_sizes', bsizes_mem)
state.add_edge(libnode, '_outbuffer', outbuf_node, None, outbuf_mem)
return None
@oprepo.replaces('distr.MatMult')
def _distr_matmult(pv: 'ProgramVisitor',
sdfg: SDFG,
state: SDFGState,
opa: str,
opb: str,
shape: Sequence[Union[sp.Expr, Number]],
a_block_sizes: Union[str, Sequence[Union[sp.Expr, Number]]] = None,
b_block_sizes: Union[str, Sequence[Union[sp.Expr, Number]]] = None,
c_block_sizes: Union[str, Sequence[Union[sp.Expr, Number]]] = None):
arra = sdfg.arrays[opa]
arrb = sdfg.arrays[opb]
if len(shape) == 3:
gm, gn, gk = shape
else:
gm, gn = shape
a_block_sizes = a_block_sizes or arra.shape
if len(a_block_sizes) < 2:
a_block_sizes = (a_block_sizes[0], 1)
b_block_sizes = b_block_sizes or arrb.shape
if len(b_block_sizes) < 2:
b_block_sizes = (b_block_sizes[0], 1)
if len(arra.shape) == 1 and len(arrb.shape) == 2:
a_block_sizes, b_block_sizes = b_block_sizes, a_block_sizes
a_bsizes_range = None
if isinstance(a_block_sizes, (list, tuple)):
if isinstance(a_block_sizes[0], str):
a_bsizes_name, a_bsizes_range = a_block_sizes
a_bsizes_desc = sdfg.arrays[a_bsizes_name]
a_bsizes_node = state.add_read(a_bsizes_name)
else:
a_bsizes_name, a_bsizes_desc = sdfg.add_temp_transient((len(a_block_sizes), ), dtype=dace.int32)
a_bsizes_node = state.add_access(a_bsizes_name)
a_bsizes_tasklet = state.add_tasklet(
'_set_a_bsizes_', {}, {'__out'},
";".join(["__out[{}] = {}".format(i, sz) for i, sz in enumerate(a_block_sizes)]))
state.add_edge(a_bsizes_tasklet, '__out', a_bsizes_node, None,
Memlet.from_array(a_bsizes_name, a_bsizes_desc))
else:
a_bsizes_name = a_block_sizes
a_bsizes_desc = sdfg.arrays[a_bsizes_name]
a_bsizes_node = state.add_read(a_bsizes_name)
b_bsizes_range = None
if isinstance(a_block_sizes, (list, tuple)):
if isinstance(a_block_sizes[0], str):
b_bsizes_name, b_sizes_range = b_block_sizes
b_bsizes_desc = sdfg.arrays[b_bsizes_name]
b_bsizes_node = state.add_read(b_bsizes_name)
else:
b_bsizes_name, b_bsizes_desc = sdfg.add_temp_transient((len(b_block_sizes), ), dtype=dace.int32)
b_bsizes_node = state.add_access(b_bsizes_name)
b_bsizes_tasklet = state.add_tasklet(
'_set_b_sizes_', {}, {'__out'},
";".join(["__out[{}] = {}".format(i, sz) for i, sz in enumerate(b_block_sizes)]))
state.add_edge(b_bsizes_tasklet, '__out', b_bsizes_node, None,
Memlet.from_array(b_bsizes_name, b_bsizes_desc))
else:
b_bsizes_name = b_block_sizes
b_bsizes_desc = sdfg.arrays[b_bsizes_name]
b_bsizes_node = state.add_read(b_bsizes_name)
if len(arra.shape) == 2 and len(arrb.shape) == 2:
# Gemm
from dace.libraries.pblas.nodes.pgemm import Pgemm
tasklet = Pgemm("__DistrMatMult__", gm, gn, gk)
m = arra.shape[0]
n = arrb.shape[-1]
out = sdfg.add_temp_transient((m, n), dtype=arra.dtype)
elif len(arra.shape) == 2 and len(arrb.shape) == 1:
# Gemv
from dace.libraries.pblas.nodes.pgemv import Pgemv
tasklet = Pgemv("__DistrMatVecMult__", m=gm, n=gn)
if c_block_sizes:
m = c_block_sizes[0]
else:
m = arra.shape[0]
out = sdfg.add_temp_transient((m, ), dtype=arra.dtype)
elif len(arra.shape) == 1 and len(arrb.shape) == 2:
# Gemv transposed
# Swap a and b
opa, opb = opb, opa
arra, arrb = arrb, arra
from dace.libraries.pblas.nodes.pgemv import Pgemv
tasklet = Pgemv("__DistrMatVecMult__", transa='T', m=gm, n=gn)
if c_block_sizes:
n = c_block_sizes[0]
else:
n = arra.shape[1]
out = sdfg.add_temp_transient((n, ), dtype=arra.dtype)
anode = state.add_read(opa)
bnode = state.add_read(opb)
cnode = state.add_write(out[0])
if a_bsizes_range:
a_bsizes_mem = Memlet.simple(a_bsizes_name, a_bsizes_range)
else:
a_bsizes_mem = Memlet.from_array(a_bsizes_name, a_bsizes_desc)
if b_bsizes_range:
b_bsizes_mem = Memlet.simple(b_bsizes_name, b_bsizes_range)
else:
b_bsizes_mem = Memlet.from_array(b_bsizes_name, b_bsizes_desc)
state.add_edge(anode, None, tasklet, '_a', Memlet.from_array(opa, arra))
state.add_edge(bnode, None, tasklet, '_b', Memlet.from_array(opb, arrb))
state.add_edge(a_bsizes_node, None, tasklet, '_a_block_sizes', a_bsizes_mem)
state.add_edge(b_bsizes_node, None, tasklet, '_b_block_sizes', b_bsizes_mem)
state.add_edge(tasklet, '_c', cnode, None, Memlet.from_array(*out))
return out[0]
| 39.472642
| 189
| 0.659162
| 5,739
| 41,841
| 4.518557
| 0.056456
| 0.053679
| 0.032855
| 0.022829
| 0.814939
| 0.77387
| 0.746761
| 0.728251
| 0.703764
| 0.677001
| 0
| 0.00407
| 0.230683
| 41,841
| 1,059
| 190
| 39.509915
| 0.801547
| 0.100834
| 0
| 0.720535
| 0
| 0
| 0.074468
| 0.0147
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024301
| false
| 0
| 0.037667
| 0
| 0.08627
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f2fd4ed0ceeac138c53082df47ef8dee32ea612
| 9,026
|
py
|
Python
|
plugins/proofpoint_tap/komand_proofpoint_tap/actions/parse_tap_alert/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/proofpoint_tap/komand_proofpoint_tap/actions/parse_tap_alert/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/proofpoint_tap/komand_proofpoint_tap/actions/parse_tap_alert/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
import insightconnect_plugin_runtime
import json
class Component:
DESCRIPTION = "Parses a TAP alert"
class Input:
TAP_ALERT = "tap_alert"
class Output:
RESULTS = "results"
class ParseTapAlertInput(insightconnect_plugin_runtime.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"tap_alert": {
"type": "string",
"title": "Proofpoint TAP Alert",
"description": "A Proofpoint TAP alert",
"order": 1
}
},
"required": [
"tap_alert"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class ParseTapAlertOutput(insightconnect_plugin_runtime.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"results": {
"$ref": "#/definitions/tap_results",
"title": "Results",
"description": "Proofpoint TAP results",
"order": 1
}
},
"definitions": {
"browser": {
"type": "object",
"title": "browser",
"properties": {
"source_ip": {
"type": "string",
"title": "Source IP",
"description": "Source IP",
"order": 2
},
"time": {
"type": "string",
"title": "Time",
"description": "Time",
"order": 1
},
"user_agent": {
"type": "string",
"title": "User Agent",
"description": "User agent string",
"order": 3
}
}
},
"message": {
"type": "object",
"title": "message",
"properties": {
"header_from": {
"type": "string",
"title": "Header From",
"description": "Header from",
"order": 5
},
"header_replyto": {
"type": "string",
"title": "Header Reply To",
"description": "Header reply to",
"order": 6
},
"message_guid": {
"type": "string",
"title": "Message GUID",
"description": "Message GUID",
"order": 7
},
"message_id": {
"type": "string",
"title": "Message ID",
"description": "Message ID",
"order": 8
},
"message_size": {
"type": "string",
"title": "Message Size",
"description": "Message size",
"order": 10
},
"recipients": {
"type": "string",
"title": "Recipients",
"description": "Recipients",
"order": 2
},
"sender": {
"type": "string",
"title": "Sender",
"description": "Sender",
"order": 4
},
"sender_ip": {
"type": "string",
"title": "Sender IP",
"description": "Sender IP",
"order": 9
},
"subject": {
"type": "string",
"title": "Subject",
"description": "Subject",
"order": 3
},
"threat_id": {
"type": "string",
"title": "Threat ID",
"description": "Unique identifier for this threat",
"order": 11
},
"time_delivered": {
"type": "string",
"title": "Time Delivered",
"description": "Time Delivered",
"order": 1
}
}
},
"tap_results": {
"type": "object",
"title": "tap_results",
"properties": {
"browser": {
"$ref": "#/definitions/browser",
"title": "Browser",
"description": "Browser information",
"order": 3
},
"message": {
"$ref": "#/definitions/message",
"title": "Message",
"description": "TAP alert meta data",
"order": 2
},
"threat": {
"$ref": "#/definitions/threat",
"title": "Threat",
"description": "Threat information",
"order": 1
}
},
"definitions": {
"browser": {
"type": "object",
"title": "browser",
"properties": {
"source_ip": {
"type": "string",
"title": "Source IP",
"description": "Source IP",
"order": 2
},
"time": {
"type": "string",
"title": "Time",
"description": "Time",
"order": 1
},
"user_agent": {
"type": "string",
"title": "User Agent",
"description": "User agent string",
"order": 3
}
}
},
"message": {
"type": "object",
"title": "message",
"properties": {
"header_from": {
"type": "string",
"title": "Header From",
"description": "Header from",
"order": 5
},
"header_replyto": {
"type": "string",
"title": "Header Reply To",
"description": "Header reply to",
"order": 6
},
"message_guid": {
"type": "string",
"title": "Message GUID",
"description": "Message GUID",
"order": 7
},
"message_id": {
"type": "string",
"title": "Message ID",
"description": "Message ID",
"order": 8
},
"message_size": {
"type": "string",
"title": "Message Size",
"description": "Message size",
"order": 10
},
"recipients": {
"type": "string",
"title": "Recipients",
"description": "Recipients",
"order": 2
},
"sender": {
"type": "string",
"title": "Sender",
"description": "Sender",
"order": 4
},
"sender_ip": {
"type": "string",
"title": "Sender IP",
"description": "Sender IP",
"order": 9
},
"subject": {
"type": "string",
"title": "Subject",
"description": "Subject",
"order": 3
},
"threat_id": {
"type": "string",
"title": "Threat ID",
"description": "Unique identifier for this threat",
"order": 11
},
"time_delivered": {
"type": "string",
"title": "Time Delivered",
"description": "Time Delivered",
"order": 1
}
}
},
"threat": {
"type": "object",
"title": "threat",
"properties": {
"attachment_sha256": {
"type": "string",
"title": "Attachment SHA256 Hash",
"description": "Attachment SHA256 hash",
"order": 1
},
"category": {
"type": "string",
"title": "Category",
"description": "Category",
"order": 3
},
"condemnation_time": {
"type": "string",
"title": "Condemnation Time",
"description": "Condemnation Time",
"order": 4
},
"threat_details_url": {
"type": "string",
"title": "Threat Details URL",
"description": "URL for Details of the Threat",
"order": 5
},
"url": {
"type": "string",
"title": "URL",
"description": "URL",
"order": 2
}
}
}
}
},
"threat": {
"type": "object",
"title": "threat",
"properties": {
"attachment_sha256": {
"type": "string",
"title": "Attachment SHA256 Hash",
"description": "Attachment SHA256 hash",
"order": 1
},
"category": {
"type": "string",
"title": "Category",
"description": "Category",
"order": 3
},
"condemnation_time": {
"type": "string",
"title": "Condemnation Time",
"description": "Condemnation Time",
"order": 4
},
"threat_details_url": {
"type": "string",
"title": "Threat Details URL",
"description": "URL for Details of the Threat",
"order": 5
},
"url": {
"type": "string",
"title": "URL",
"description": "URL",
"order": 2
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| 25.7151
| 65
| 0.402725
| 638
| 9,026
| 5.595611
| 0.137931
| 0.109244
| 0.163866
| 0.036975
| 0.803922
| 0.803922
| 0.803922
| 0.803922
| 0.776471
| 0.776471
| 0
| 0.012785
| 0.436738
| 9,026
| 350
| 66
| 25.788571
| 0.689418
| 0.004099
| 0
| 0.715134
| 1
| 0
| 0.94136
| 0.015689
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005935
| false
| 0
| 0.005935
| 0
| 0.041543
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7f5b4e84e63b5bfd61a80c60ab0eb7316ba0c5d8
| 2,593
|
py
|
Python
|
tick/survival/survival.py
|
sumau/tick
|
1b56924a35463e12f7775bc0aec182364f26f2c6
|
[
"BSD-3-Clause"
] | 411
|
2017-03-30T15:22:05.000Z
|
2022-03-27T01:58:34.000Z
|
tick/survival/survival.py
|
saurabhdash/tick
|
bbc561804eb1fdcb4c71b9e3e2d83a66e7b13a48
|
[
"BSD-3-Clause"
] | 345
|
2017-04-13T14:53:20.000Z
|
2022-03-26T00:46:22.000Z
|
tick/survival/survival.py
|
saurabhdash/tick
|
bbc561804eb1fdcb4c71b9e3e2d83a66e7b13a48
|
[
"BSD-3-Clause"
] | 102
|
2017-04-25T11:47:53.000Z
|
2022-02-15T11:45:49.000Z
|
# License: BSD 3 clause
def kaplan_meier(timestamps, event_observed):
"""Computes the Kaplan-Meier survival function estimation
given by:
.. math::
S{(t)} = \\prod_{i:t_{(i)}<t}^n \\left(1 - \\frac{d_i}{n_i} \\right)
where
- :math:`d_i` are the number of deaths at :math:`t_{i}`
- :math:`n_i` are the number of patients alive just before :math:`t_{i}`
Parameters
----------
timestamps : `numpy.array`
Timestamps for each observation
event_observed : `numpy.array`
Bool array denoting if the death event was observed or not
Returns
-------
output : `numpy.array`
The computed Kaplan-Meier survival function estimation
"""
import numpy as np
if isinstance(timestamps, list):
timestamps = np.array(timestamps)
if isinstance(event_observed, list):
event_observed = np.array(event_observed)
timestamps_observed = timestamps[event_observed == 1]
unique_timestamps_observed = np.concatenate(
(np.zeros(1), np.unique(timestamps_observed)))
return np.cumprod(
np.fromiter(
(1.0 - np.sum(t == timestamps_observed) / np.sum(t <= timestamps)
for t in unique_timestamps_observed), dtype='float',
count=unique_timestamps_observed.size))
def nelson_aalen(timestamps, event_observed):
"""Computes the Nelson-Aalen cumulative hazard rate estimation
given by:
.. math::
\\Lambda{(t_{i})} = \\sum_{j=1}^i \\frac{d_j}{n_j}
where
- :math:`d_j` are the number of deaths at :math:`t_{j}`
- :math:`n_j` are the number of patients alive just before :math:`t_{j}`
Parameters
----------
timestamps : `numpy.array`
Timestamps for each observation
event_observed : `numpy.array`
Bool array denoting if the death event was observed or not
Returns
-------
output : `numpy.array`
The computed Nelson-Aalen cumulative hazard rate
"""
import numpy as np
if isinstance(timestamps, list):
timestamps = np.array(timestamps)
if isinstance(event_observed, list):
event_observed = np.array(event_observed)
timestamps_observed = timestamps[event_observed == 1]
unique_timestamps_observed = np.concatenate(
(np.zeros(1), np.unique(timestamps_observed)))
return np.cumsum(
np.fromiter((np.sum(t == timestamps_observed) / np.sum(t <= timestamps)
for t in unique_timestamps_observed), dtype='float',
count=unique_timestamps_observed.size))
| 29.134831
| 79
| 0.6344
| 325
| 2,593
| 4.913846
| 0.246154
| 0.097683
| 0.120225
| 0.035066
| 0.876644
| 0.7464
| 0.7464
| 0.7464
| 0.712586
| 0.712586
| 0
| 0.004604
| 0.246047
| 2,593
| 88
| 80
| 29.465909
| 0.812276
| 0.438103
| 0
| 0.740741
| 0
| 0
| 0.007686
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.074074
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f6b4ea2542b5996a0487453a26b6b399f605181b
| 150
|
py
|
Python
|
tests/__init__.py
|
Python-Tools/schema_entry
|
5884843607a44cc6de5943e2be04c1a5c48f1886
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
Python-Tools/schema_entry
|
5884843607a44cc6de5943e2be04c1a5c48f1886
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
Python-Tools/schema_entry
|
5884843607a44cc6de5943e2be04c1a5c48f1886
|
[
"MIT"
] | null | null | null |
def setUpModule() -> None:
print("[SetUp Module entry_tree Test]")
def tearDownModule() -> None:
print("[TearDown Module entry_tree Test]")
| 21.428571
| 46
| 0.68
| 18
| 150
| 5.555556
| 0.611111
| 0.18
| 0.3
| 0.38
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173333
| 150
| 6
| 47
| 25
| 0.806452
| 0
| 0
| 0
| 0
| 0
| 0.42
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
f6f14167e083748c2737c75d91b2160f51e6acda
| 13,769
|
py
|
Python
|
english/migrations/0001_initial.py
|
moumou1/English-Pronunciation
|
99f6fa231feb8e614ee5dc10ceaf536a77fc45cd
|
[
"MIT"
] | null | null | null |
english/migrations/0001_initial.py
|
moumou1/English-Pronunciation
|
99f6fa231feb8e614ee5dc10ceaf536a77fc45cd
|
[
"MIT"
] | null | null | null |
english/migrations/0001_initial.py
|
moumou1/English-Pronunciation
|
99f6fa231feb8e614ee5dc10ceaf536a77fc45cd
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='database',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('vocabulary', models.CharField(max_length=20)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='exercise',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('vocabulary', models.CharField(max_length=20)),
('answer', models.CharField(max_length=20)),
('vowel', models.CharField(max_length=20)),
('history', models.CharField(max_length=100)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='exercisescratch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('topic1', models.CharField(max_length=20)),
('topic2', models.CharField(max_length=20)),
('topic3', models.CharField(max_length=20)),
('topic4', models.CharField(max_length=20)),
('topic5', models.CharField(max_length=20)),
('topic6', models.CharField(max_length=20)),
('topic7', models.CharField(max_length=20)),
('topic8', models.CharField(max_length=20)),
('topic9', models.CharField(max_length=20)),
('topic10', models.CharField(max_length=20)),
('topic11', models.CharField(max_length=20)),
('topic12', models.CharField(max_length=20)),
('topic13', models.CharField(max_length=20)),
('topic14', models.CharField(max_length=20)),
('topic15', models.CharField(max_length=20)),
('topic16', models.CharField(max_length=20)),
('topic17', models.CharField(max_length=20)),
('topic18', models.CharField(max_length=20)),
('topic19', models.CharField(max_length=20)),
('topic20', models.CharField(max_length=20)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='group',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('number', models.CharField(max_length=20)),
('name', models.CharField(max_length=20)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='kkdata',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('word', models.CharField(max_length=20, blank=True)),
('kk', models.CharField(max_length=20)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='phoneme',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('vowel', models.CharField(max_length=20)),
('packet', models.ForeignKey(to='english.group')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='posttestscratch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('topic1', models.CharField(max_length=20)),
('topic2', models.CharField(max_length=20)),
('topic3', models.CharField(max_length=20)),
('topic4', models.CharField(max_length=20)),
('topic5', models.CharField(max_length=20)),
('topic6', models.CharField(max_length=20)),
('topic7', models.CharField(max_length=20)),
('topic8', models.CharField(max_length=20)),
('topic9', models.CharField(max_length=20)),
('topic10', models.CharField(max_length=20)),
('topic11', models.CharField(max_length=20)),
('topic12', models.CharField(max_length=20)),
('topic13', models.CharField(max_length=20)),
('topic14', models.CharField(max_length=20)),
('topic15', models.CharField(max_length=20)),
('topic16', models.CharField(max_length=20)),
('topic17', models.CharField(max_length=20)),
('topic18', models.CharField(max_length=20)),
('topic19', models.CharField(max_length=20)),
('topic20', models.CharField(max_length=20)),
('topic21', models.CharField(max_length=20)),
('topic22', models.CharField(max_length=20)),
('topic23', models.CharField(max_length=20)),
('topic24', models.CharField(max_length=20)),
('topic25', models.CharField(max_length=20)),
('topic26', models.CharField(max_length=20)),
('topic27', models.CharField(max_length=20)),
('topic28', models.CharField(max_length=20)),
('topic29', models.CharField(max_length=20)),
('topic30', models.CharField(max_length=20)),
('topic31', models.CharField(max_length=20)),
('topic32', models.CharField(max_length=20)),
('topic33', models.CharField(max_length=20)),
('topic34', models.CharField(max_length=20)),
('topic35', models.CharField(max_length=20)),
('topic36', models.CharField(max_length=20)),
('topic37', models.CharField(max_length=20)),
('topic38', models.CharField(max_length=20)),
('topic39', models.CharField(max_length=20)),
('topic40', models.CharField(max_length=20)),
('topic41', models.CharField(max_length=20)),
('topic42', models.CharField(max_length=20)),
('topic43', models.CharField(max_length=20)),
('topic44', models.CharField(max_length=20)),
('topic45', models.CharField(max_length=20)),
('topic46', models.CharField(max_length=20)),
('topic47', models.CharField(max_length=20)),
('topic48', models.CharField(max_length=20)),
('topic49', models.CharField(max_length=20)),
('topic50', models.CharField(max_length=20)),
('topic51', models.CharField(max_length=20)),
('topic52', models.CharField(max_length=20)),
('topic53', models.CharField(max_length=20)),
('topic54', models.CharField(max_length=20)),
('topic55', models.CharField(max_length=20)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='posttestspeaking',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('vocabulary', models.CharField(max_length=20)),
('answer', models.CharField(max_length=20)),
('vowel', models.CharField(max_length=20)),
('time', models.DateTimeField()),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='pretestscratch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('topic1', models.CharField(max_length=20)),
('topic2', models.CharField(max_length=20)),
('topic3', models.CharField(max_length=20)),
('topic4', models.CharField(max_length=20)),
('topic5', models.CharField(max_length=20)),
('topic6', models.CharField(max_length=20)),
('topic7', models.CharField(max_length=20)),
('topic8', models.CharField(max_length=20)),
('topic9', models.CharField(max_length=20)),
('topic10', models.CharField(max_length=20)),
('topic11', models.CharField(max_length=20)),
('topic12', models.CharField(max_length=20)),
('topic13', models.CharField(max_length=20)),
('topic14', models.CharField(max_length=20)),
('topic15', models.CharField(max_length=20)),
('topic16', models.CharField(max_length=20)),
('topic17', models.CharField(max_length=20)),
('topic18', models.CharField(max_length=20)),
('topic19', models.CharField(max_length=20)),
('topic20', models.CharField(max_length=20)),
('topic21', models.CharField(max_length=20)),
('topic22', models.CharField(max_length=20)),
('topic23', models.CharField(max_length=20)),
('topic24', models.CharField(max_length=20)),
('topic25', models.CharField(max_length=20)),
('topic26', models.CharField(max_length=20)),
('topic27', models.CharField(max_length=20)),
('topic28', models.CharField(max_length=20)),
('topic29', models.CharField(max_length=20)),
('topic30', models.CharField(max_length=20)),
('topic31', models.CharField(max_length=20)),
('topic32', models.CharField(max_length=20)),
('topic33', models.CharField(max_length=20)),
('topic34', models.CharField(max_length=20)),
('topic35', models.CharField(max_length=20)),
('topic36', models.CharField(max_length=20)),
('topic37', models.CharField(max_length=20)),
('topic38', models.CharField(max_length=20)),
('topic39', models.CharField(max_length=20)),
('topic40', models.CharField(max_length=20)),
('topic41', models.CharField(max_length=20)),
('topic42', models.CharField(max_length=20)),
('topic43', models.CharField(max_length=20)),
('topic44', models.CharField(max_length=20)),
('topic45', models.CharField(max_length=20)),
('topic46', models.CharField(max_length=20)),
('topic47', models.CharField(max_length=20)),
('topic48', models.CharField(max_length=20)),
('topic49', models.CharField(max_length=20)),
('topic50', models.CharField(max_length=20)),
('topic51', models.CharField(max_length=20)),
('topic52', models.CharField(max_length=20)),
('topic53', models.CharField(max_length=20)),
('topic54', models.CharField(max_length=20)),
('topic55', models.CharField(max_length=20)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='pretestspeaking',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('vocabulary', models.CharField(max_length=20)),
('answer', models.CharField(max_length=20)),
('vowel', models.CharField(max_length=20)),
('time', models.DateTimeField()),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='exercisescratch',
name='groups',
field=models.ForeignKey(to='english.group'),
preserve_default=True,
),
migrations.AddField(
model_name='exercisescratch',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
migrations.AddField(
model_name='exercise',
name='groups',
field=models.ForeignKey(to='english.group'),
preserve_default=True,
),
migrations.AddField(
model_name='exercise',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
migrations.AddField(
model_name='database',
name='vowel',
field=models.ForeignKey(to='english.phoneme'),
preserve_default=True,
),
]
| 47.643599
| 114
| 0.534607
| 1,268
| 13,769
| 5.642744
| 0.100158
| 0.30608
| 0.367296
| 0.489727
| 0.937107
| 0.921454
| 0.910552
| 0.897135
| 0.897135
| 0.894899
| 0
| 0.05585
| 0.314692
| 13,769
| 288
| 115
| 47.809028
| 0.702416
| 0.001525
| 0
| 0.865248
| 0
| 0
| 0.094282
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010638
| 0
| 0.021277
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f6fba97dc0f445dda9e357555e783bda11ce2ca2
| 90
|
py
|
Python
|
apps/core/roles/__init__.py
|
Praetorian-Defence/praetorian-api
|
181fa22b043e58b2ac9c5f4eae4c3471a44c9bf4
|
[
"MIT"
] | 2
|
2020-06-29T15:12:04.000Z
|
2020-10-13T14:18:21.000Z
|
apps/core/roles/__init__.py
|
Praetorian-Defence/praetorian-api
|
181fa22b043e58b2ac9c5f4eae4c3471a44c9bf4
|
[
"MIT"
] | 10
|
2021-01-04T11:33:38.000Z
|
2021-05-07T10:23:48.000Z
|
apps/core/roles/__init__.py
|
zurek11/praetorian-api
|
181fa22b043e58b2ac9c5f4eae4c3471a44c9bf4
|
[
"MIT"
] | null | null | null |
from apps.core.roles.devops import Devops
from apps.core.roles.temporary import Temporary
| 30
| 47
| 0.844444
| 14
| 90
| 5.428571
| 0.5
| 0.210526
| 0.315789
| 0.447368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 90
| 2
| 48
| 45
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
63e058d8935d528c13d3ca6ee1183ca377c54d8a
| 240
|
py
|
Python
|
acousticsim/distance/__init__.py
|
JoFrhwld/python-acoustic-similarity
|
50f71835532010b2fedf14b0ca3a52d88a9ab380
|
[
"MIT"
] | 5
|
2018-01-15T22:06:20.000Z
|
2022-02-21T07:02:40.000Z
|
acousticsim/distance/__init__.py
|
JoFrhwld/python-acoustic-similarity
|
50f71835532010b2fedf14b0ca3a52d88a9ab380
|
[
"MIT"
] | null | null | null |
acousticsim/distance/__init__.py
|
JoFrhwld/python-acoustic-similarity
|
50f71835532010b2fedf14b0ca3a52d88a9ab380
|
[
"MIT"
] | 2
|
2019-11-28T17:06:27.000Z
|
2019-12-05T22:57:28.000Z
|
from acousticsim.distance.dtw import dtw_distance
from acousticsim.distance.dct import dct_distance
from acousticsim.distance.xcorr import xcorr_distance
from acousticsim.distance.point import vowel_midpoint_distance, vowel_third_distance
| 40
| 84
| 0.8875
| 32
| 240
| 6.4375
| 0.34375
| 0.291262
| 0.446602
| 0.451456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 240
| 5
| 85
| 48
| 0.927928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
12401ceb50975adb194298573f43a65c08d622f7
| 77
|
py
|
Python
|
2 Operations with Data Types/2_slices.py
|
narayanants/python-mega-course
|
2ba2980ab21dfbed5f86f00695559f7831b5c566
|
[
"MIT"
] | null | null | null |
2 Operations with Data Types/2_slices.py
|
narayanants/python-mega-course
|
2ba2980ab21dfbed5f86f00695559f7831b5c566
|
[
"MIT"
] | null | null | null |
2 Operations with Data Types/2_slices.py
|
narayanants/python-mega-course
|
2ba2980ab21dfbed5f86f00695559f7831b5c566
|
[
"MIT"
] | null | null | null |
num = [1,2,3,4,5,5,6,7,8]
print(num[:2])
print(num[2:])
print(num[-5:-1])
| 9.625
| 25
| 0.519481
| 20
| 77
| 2
| 0.5
| 0.6
| 0.45
| 0.7
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191176
| 0.116883
| 77
| 7
| 26
| 11
| 0.397059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
1245916a5bafcc423590e07321618413a8aa3eeb
| 4,804
|
py
|
Python
|
django_log_entry_audit/receivers.py
|
dukic-dev/django-log-entry-audit
|
bd1031fd7ec2daa09e36aa902ca645cc1072184e
|
[
"MIT"
] | null | null | null |
django_log_entry_audit/receivers.py
|
dukic-dev/django-log-entry-audit
|
bd1031fd7ec2daa09e36aa902ca645cc1072184e
|
[
"MIT"
] | null | null | null |
django_log_entry_audit/receivers.py
|
dukic-dev/django-log-entry-audit
|
bd1031fd7ec2daa09e36aa902ca645cc1072184e
|
[
"MIT"
] | null | null | null |
import json
from django.conf import settings
from django.core import serializers
from django.dispatch import receiver
from django_log_entry_audit.encoders import ModelEncoder
from django_log_entry_audit.models import LogEntry, StatusEnum
from django_log_entry_audit.settings import AUDIT_SIGNALS
def _get_object_serialized_fields(obj):
return json.loads(ModelEncoder().encode(obj))
try:
@receiver(AUDIT_SIGNALS["create"])
def create_log_entry(sender, **kwargs):
obj = kwargs["obj"]
user = kwargs["user"]
LogEntry.objects.create(
object_id=obj.pk,
app_label=obj._meta.app_label,
model_name=obj._meta.model_name,
fields=_get_object_serialized_fields(obj),
user=user,
status=StatusEnum.CREATED,
)
except KeyError:
pass
try:
@receiver(AUDIT_SIGNALS["update"])
def update_log_entries(sender, **kwargs):
objs = kwargs["objs"]
user = kwargs["user"]
if len(objs) == 1:
obj = objs[0]
LogEntry.objects.create(
object_id=obj.pk,
app_label=obj._meta.app_label,
model_name=obj._meta.model_name,
fields=_get_object_serialized_fields(obj),
user=user,
status=StatusEnum.UPDATED,
)
else:
log_entries = []
for obj in objs:
log_entries.append(
LogEntry(
object_id=obj.pk,
app_label=obj._meta.app_label,
model_name=obj._meta.model_name,
fields=_get_object_serialized_fields(obj),
user=user,
status=StatusEnum.UPDATED,
)
)
LogEntry.objects.bulk_create(log_entries)
except KeyError:
pass
try:
@receiver(AUDIT_SIGNALS["delete"])
def delete_log_entry(sender, **kwargs):
obj = kwargs["obj"]
user = kwargs["user"]
LogEntry.objects.create(
object_id=obj.pk,
app_label=obj._meta.app_label,
model_name=obj._meta.model_name,
fields=_get_object_serialized_fields(obj),
user=user,
status=StatusEnum.DELETED,
)
except KeyError:
pass
try:
@receiver(AUDIT_SIGNALS["bulk_create"])
def create_log_entries(sender, **kwargs):
objs = kwargs["objs"]
user = kwargs["user"]
log_entries = []
for obj in objs:
log_entries.append(
LogEntry(
object_id=obj.pk,
app_label=obj._meta.app_label,
model_name=obj._meta.model_name,
fields=_get_object_serialized_fields(obj),
user=user,
status=StatusEnum.CREATED,
)
)
LogEntry.objects.bulk_create(log_entries)
except KeyError:
pass
try:
@receiver(AUDIT_SIGNALS["bulk_delete"])
def delete_log_entries(sender, **kwargs):
objs = kwargs["objs"]
user = kwargs["user"]
log_entries = []
for obj in objs:
log_entries.append(
LogEntry(
object_id=obj.pk,
app_label=obj._meta.app_label,
model_name=obj._meta.model_name,
fields=_get_object_serialized_fields(obj),
user=user,
status=StatusEnum.DELETED,
)
)
LogEntry.objects.bulk_create(log_entries)
except KeyError:
pass
try:
@receiver(AUDIT_SIGNALS["bulk_update"])
def bulk_update_log_entries(sender, **kwargs):
objs = kwargs["objs"]
user = kwargs["user"]
if len(objs) == 1:
obj = objs[0]
LogEntry.objects.create(
object_id=obj.pk,
app_label=obj._meta.app_label,
model_name=obj._meta.model_name,
fields=_get_object_serialized_fields(obj),
user=user,
status=StatusEnum.UPDATED,
)
else:
log_entries = []
for obj in objs:
log_entries.append(
LogEntry(
object_id=obj.pk,
app_label=obj._meta.app_label,
model_name=obj._meta.model_name,
fields=_get_object_serialized_fields(obj),
user=user,
status=StatusEnum.UPDATED,
)
)
LogEntry.objects.bulk_create(log_entries)
except KeyError:
pass
| 25.827957
| 66
| 0.533514
| 487
| 4,804
| 4.975359
| 0.125257
| 0.052827
| 0.070574
| 0.09286
| 0.865043
| 0.82501
| 0.82501
| 0.808089
| 0.789517
| 0.789517
| 0
| 0.001344
| 0.380308
| 4,804
| 185
| 67
| 25.967568
| 0.812563
| 0
| 0
| 0.765957
| 0
| 0
| 0.020192
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049645
| false
| 0.042553
| 0.049645
| 0.007092
| 0.106383
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
89c8593b602603f86a24fc94f4a98892d5482852
| 112
|
py
|
Python
|
custom_classifier/__init__.py
|
jasperhajonides/Supervised_Learning_TB
|
80a163c43cb30ac698a411d7fcb327d3f5de954e
|
[
"MIT"
] | 1
|
2022-01-17T10:50:26.000Z
|
2022-01-17T10:50:26.000Z
|
custom_classifier/__init__.py
|
jasperhajonides/Supervised_Learning_TB
|
80a163c43cb30ac698a411d7fcb327d3f5de954e
|
[
"MIT"
] | null | null | null |
custom_classifier/__init__.py
|
jasperhajonides/Supervised_Learning_TB
|
80a163c43cb30ac698a411d7fcb327d3f5de954e
|
[
"MIT"
] | 3
|
2020-09-11T11:52:42.000Z
|
2022-01-17T10:50:29.000Z
|
#!/usr/bin/python
from custom_classifier import custom_classification
From custom_classifier import sample_data
| 28
| 51
| 0.875
| 15
| 112
| 6.266667
| 0.666667
| 0.212766
| 0.425532
| 0.553191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080357
| 112
| 3
| 52
| 37.333333
| 0.912621
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
89e9d1cda7e5475554c1deee3d334781bcd4c9f2
| 1,840
|
py
|
Python
|
features/steps/book.py
|
TestowanieAutomatyczneUG/laboratorium_14-maciejSzcz
|
b92186c574d3f21acd9f3e913e1a8ddcb5ec81fd
|
[
"MIT"
] | null | null | null |
features/steps/book.py
|
TestowanieAutomatyczneUG/laboratorium_14-maciejSzcz
|
b92186c574d3f21acd9f3e913e1a8ddcb5ec81fd
|
[
"MIT"
] | null | null | null |
features/steps/book.py
|
TestowanieAutomatyczneUG/laboratorium_14-maciejSzcz
|
b92186c574d3f21acd9f3e913e1a8ddcb5ec81fd
|
[
"MIT"
] | null | null | null |
from behave import *
from src.zad4.Book import Book
from src.zad4.BookStorage import BookStorage
use_step_matcher("re")
@given("Database")
def step_impl(context):
context.Book = Book(BookStorage())
@given("Baze danych")
def step_impl(context):
context.Book = Book(BookStorage())
@given("a book with a correct IBNS number")
def step_impl(context):
context.sample_book = {"title": "autobiografia andrzeja", "id": 1, "author": "Andrzej jakistam", "isbn": "978-0-1825-6947-2"}
@given("książke z poprawnym numerem isbn")
def step_impl(context):
context.sample_book = {"title": "autobiografia andrzeja", "id": 1, "author": "Andrzej jakistam", "isbn": "978-0-1825-6947-2"}
@when("we add a book")
def step_impl(context):
context.Book.add_book({"title": "autobiografia andrzeja", "id": 1, "author": "Andrzej jakistam", "isbn": "978-0-1825-6947-2"})
@when("dodamy książke")
def step_impl(context):
context.Book.add_book({"title": "autobiografia andrzeja", "id": 1, "author": "Andrzej jakistam", "isbn": "978-0-1825-6947-2"})
@then("the book can be added")
def step_impl(context):
assert context.Book.add_book(context.sample_book) == True
@then("książka sie dodaje")
def step_impl(context):
assert context.Book.add_book(context.sample_book) == True
@then("the book can be deleted")
def step_impl(context):
assert context.Book.delete_book(1) == True
@then("książka sie usuwa")
def step_impl(context):
assert context.Book.delete_book(1) == True
@then("the books author can be retrieved")
def step_impl(context):
print(context.Book.get_books_author(1))
assert context.Book.get_books_author(1) == "Andrzej jakistam"
@then("autor książki może być odczytany")
def step_impl(context):
print(context.Book.get_books_author(1))
assert context.Book.get_books_author(1) == "Andrzej jakistam"
| 31.724138
| 130
| 0.714674
| 269
| 1,840
| 4.769517
| 0.252788
| 0.065472
| 0.102884
| 0.168355
| 0.762276
| 0.740452
| 0.740452
| 0.740452
| 0.740452
| 0.664069
| 0
| 0.039851
| 0.127174
| 1,840
| 57
| 131
| 32.280702
| 0.759029
| 0
| 0
| 0.619048
| 0
| 0
| 0.313587
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.285714
| false
| 0
| 0.071429
| 0
| 0.357143
| 0.047619
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d63fcbc5b631dc62a2129da9295d720c4aaabe6c
| 918
|
py
|
Python
|
test/EMA_test.py
|
Ren634/GAN_zoo
|
2fa6c98be89aef70832a0e905737f2f4ada21e95
|
[
"MIT"
] | null | null | null |
test/EMA_test.py
|
Ren634/GAN_zoo
|
2fa6c98be89aef70832a0e905737f2f4ada21e95
|
[
"MIT"
] | null | null | null |
test/EMA_test.py
|
Ren634/GAN_zoo
|
2fa6c98be89aef70832a0e905737f2f4ada21e95
|
[
"MIT"
] | null | null | null |
from SAGAN import SAGAN
from gan_modules import EMA
# %%
sagan = SAGAN(
n_dims=128,
n_dis=2,
max_resolutions=128,
g_lr = 1e-5,#now 1e-6,
d_lr = 3e-5,#now 1.5e-6
g_betas=(0,0.999),
d_betas=(0,0.999),
initial_layer="linear",
upsampling_mode="pooling",
downsampling_mode="pooling",
loss_fn="hinge",
is_da = False
)
mvag_sagan = SAGAN(
n_dims=128,
n_dis=2,
max_resolutions=128,
g_lr = 1e-5,#now 1e-6,
d_lr = 3e-5,#now 1.5e-6
g_betas=(0,0.999),
d_betas=(0,0.999),
initial_layer="linear",
upsampling_mode="pooling",
downsampling_mode="pooling",
loss_fn="hinge",
is_da = False
)
ema = EMA()
ema.setup(mvag_sagan.netG)
# %%
print(next(mvag_sagan.netG.parameters()))
#%%
for i in range(300):
ema.apply(sagan.netG,mvag_sagan.netG)
#%%
print(next(mvag_sagan.netG.parameters()))
| 21.857143
| 42
| 0.599129
| 143
| 918
| 3.636364
| 0.356643
| 0.086538
| 0.053846
| 0.076923
| 0.823077
| 0.823077
| 0.823077
| 0.823077
| 0.823077
| 0.65
| 0
| 0.079251
| 0.244009
| 918
| 41
| 43
| 22.390244
| 0.670029
| 0.051198
| 0
| 0.722222
| 0
| 0
| 0.060901
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.055556
| 0.055556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3b4257e8f9e9bff091c0fc9de7679ea72fd71bf
| 108
|
py
|
Python
|
easyidp/io/__init__.py
|
HowcanoeWang/EasyRIC
|
a3420bc7b1e0f1013411565cf0e66dd2d2ba5371
|
[
"MIT"
] | 12
|
2021-01-25T07:11:52.000Z
|
2022-02-14T11:57:03.000Z
|
easyidp/io/__init__.py
|
HowcanoeWang/EasyRIC
|
a3420bc7b1e0f1013411565cf0e66dd2d2ba5371
|
[
"MIT"
] | null | null | null |
easyidp/io/__init__.py
|
HowcanoeWang/EasyRIC
|
a3420bc7b1e0f1013411565cf0e66dd2d2ba5371
|
[
"MIT"
] | null | null | null |
from easyidp.io.tests import test
import easyidp.io.metashape
import easyidp.io.pix4d
import easyidp.io.pcd
| 21.6
| 33
| 0.833333
| 18
| 108
| 5
| 0.5
| 0.4
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010204
| 0.092593
| 108
| 5
| 34
| 21.6
| 0.908163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c3c8f285df2bbe331555387f620d3a6575125b15
| 3,170
|
py
|
Python
|
boxdb/auth_boxdb.py
|
kshitij1235/boxdb
|
4aa121f856c148c5136368041a610a584fb1dbc6
|
[
"MIT"
] | 1
|
2022-01-31T17:21:02.000Z
|
2022-01-31T17:21:02.000Z
|
boxdb/auth_boxdb.py
|
kshitij1235/boxdb
|
4aa121f856c148c5136368041a610a584fb1dbc6
|
[
"MIT"
] | null | null | null |
boxdb/auth_boxdb.py
|
kshitij1235/boxdb
|
4aa121f856c148c5136368041a610a584fb1dbc6
|
[
"MIT"
] | null | null | null |
'''
boxdb/auth_boxbd -> v0.4
This file contain code for
1)single row auth, and all row auth
[ ]auth_details() improved speed
'''
from filemod import word_search_line, read_specific_line
from boxdb.support_litebase import get_columns, get_primary_column
def chech_rows(table_name, rowname, user_input):
"""
this function help you authenticate the single row data validity
"""
with open(f'./{table_name}/tables/{rowname}.txt', encoding='UTF-8') as file_data:
return user_input in file_data.read()
def auth_details(table_name, user_details):
"""
Authorize the data from the table
"""
# TODO improve and simplify the code
# get all the row and primary_keys and even their postion in list
primary_keys = get_primary_column(table_name)
if primary_keys == []:
print("PRIMARY KEY : need atleast one primary row")
return False
rows = get_columns(table_name)
key_postion = [rows.index(elements) for elements in primary_keys]
# verify the postion of the column with the help of primary_keys
similarty_data = [
word_search_line(
f"./{table_name}/tables/{rows[p_rows]}.txt", user_details[p_rows])
for p_rows in key_postion
]
# check for difference in primary_keys data
if similarty_data.count(similarty_data[0]) == len(similarty_data):
final_list=[]
# tally all the data from user and database
for index,element in enumerate(rows):
try:
final_list.append(read_specific_line(f"{table_name}/tables/{element}.txt", similarty_data[0]-1).strip()
== user_details[index])
except TypeError:
return False
else:
return False
return len(user_details) == final_list.count(True)
def specific_auth(table_name,rows,user_details):
"""
Authorize the data from the table
"""
# TODO improve and simplify the code
# get all the row and primary_keys and even their postion in list
primary_keys = get_primary_column(table_name)
for extra in [value for value in primary_keys if value not in rows]:
primary_keys.remove(extra)
if primary_keys == []:
print("PRIMARY KEY : need atleast one primary row")
return False
key_postion = [rows.index(elements) for elements in primary_keys]
# verify the postion of the column with the help of primary_keys
similarty_data = [
word_search_line(
f"./{table_name}/tables/{rows[p_rows]}.txt", user_details[p_rows])
for p_rows in key_postion
]
# check for difference in primary_keys data
if similarty_data.count(similarty_data[0]) == len(similarty_data):
final_list=[]
# tally all the data from user and database
for index,element in enumerate(rows):
try:
final_list.append(read_specific_line(f"{table_name}/tables/{element}.txt", similarty_data[0]-1).strip()
== user_details[index])
except TypeError:
return False
else:
return False
if len(user_details)==len(final_list):
return True
| 33.723404
| 119
| 0.660883
| 437
| 3,170
| 4.590389
| 0.231121
| 0.07677
| 0.024925
| 0.03988
| 0.703888
| 0.703888
| 0.703888
| 0.703888
| 0.703888
| 0.703888
| 0
| 0.004218
| 0.25205
| 3,170
| 94
| 120
| 33.723404
| 0.841839
| 0.235962
| 0
| 0.716981
| 0
| 0
| 0.114358
| 0.076662
| 0
| 0
| 0
| 0.021277
| 0
| 1
| 0.056604
| false
| 0
| 0.037736
| 0
| 0.264151
| 0.037736
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3d30aa743c173b954476f7b197b192a1c2c4880
| 8,438
|
py
|
Python
|
client/test/unit_tests/test_recording.py
|
tbienhoff/carla-rl
|
51960c8ce3b7e90cdd6c3ab5e18721d1969e1b50
|
[
"MIT"
] | 80
|
2019-01-30T13:14:11.000Z
|
2022-02-14T08:51:01.000Z
|
client/test/unit_tests/test_recording.py
|
tbienhoff/carla-rl
|
51960c8ce3b7e90cdd6c3ab5e18721d1969e1b50
|
[
"MIT"
] | 8
|
2019-02-03T18:21:36.000Z
|
2020-10-23T00:51:30.000Z
|
client/test/unit_tests/test_recording.py
|
tbienhoff/carla-rl
|
51960c8ce3b7e90cdd6c3ab5e18721d1969e1b50
|
[
"MIT"
] | 27
|
2019-03-15T08:22:19.000Z
|
2022-03-20T05:37:48.000Z
|
import unittest
from carla.driving_benchmark.recording import Recording
class testRecording(unittest.TestCase):
def test_init(self):
import os
"""
The recording should have a reasonable full name
"""
recording = Recording(name_to_save='Test1'
, continue_experiment=False, save_images=True
)
_ = open(os.path.join(recording._path,'summary.csv'), 'r')
_ = open(os.path.join(recording._path, 'measurements.csv'), 'r')
# There should be three files in any newly created case
self.assertEqual(len(os.listdir(recording._path)), 3)
def test_write_summary_results(self):
import os
from carla.driving_benchmark.experiment import Experiment
recording = Recording(name_to_save='Test1'
, continue_experiment=False, save_images=True
)
recording.write_summary_results( experiment=Experiment(), pose=[24,32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
with open(os.path.join(recording._path, 'summary.csv'), 'r') as f:
header = f.readline().split(',')
#Assert if header is header
self.assertIn('exp_id', header)
self.assertEqual(len(header), len(recording._dict_summary))
#Assert if there is something writen in the row
written_row = f.readline().split(',')
#Assert if the number of collums is correct
self.assertEqual(len(written_row), len(recording._dict_summary))
def teste_write_measurements_results(self):
import os
from carla.driving_benchmark.experiment import Experiment
from carla.carla_server_pb2 import Measurements
from carla.carla_server_pb2 import Control
recording = Recording(name_to_save='Test1'
, continue_experiment=False, save_images=True
)
reward_vec = [Measurements().player_measurements for x in range(20)]
control_vec = [Control() for x in range(25)]
recording.write_measurements_results(experiment=Experiment(),
rep=1, pose=[24, 32], reward_vec=reward_vec,
control_vec=control_vec)
with open(os.path.join(recording._path, 'measurements.csv'), 'r') as f:
header = f.readline().split(',')
#Assert if header is header
self.assertIn('exp_id', header)
self.assertEqual(len(header), len(recording._dict_measurements))
#Assert if there is something writen in the row
written_row = f.readline().split(',')
#Assert if the number of collums is correct
self.assertEqual(len(written_row), len(recording._dict_measurements))
def test_continue_experiment(self):
recording = Recording( name_to_save='Test1'
, continue_experiment=False, save_images=True
)
# A just started case should return the continue experiment case
self.assertEqual(recording._continue_experiment(True)[1], 1)
# If you don't want to continue, should return also one
self.assertEqual(recording._continue_experiment(False)[1], 1)
from carla.driving_benchmark.experiment import Experiment
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
# After writing two experiments it should return 2, so you could start writing os pos 3
self.assertEqual(recording._continue_experiment(True)[1], 3)
# If you dont want to continue, should return also one
self.assertEqual(recording._continue_experiment(False)[1], 1)
def test_get_pose_and_experiment(self):
recording = Recording( name_to_save='Test1'
, continue_experiment=False, save_images=True
)
from carla.driving_benchmark.experiment import Experiment
pose, experiment = recording.get_pose_and_experiment(25)
# An starting experiment should return zero zero
self.assertEqual(pose, 0)
self.assertEqual(experiment, 0)
recording.write_summary_results( experiment=Experiment(), pose=[24,32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
recording.write_summary_results( experiment=Experiment(), pose=[24,32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
pose, experiment = recording.get_pose_and_experiment(25)
self.assertEqual(pose, 2)
self.assertEqual(experiment, 0)
for i in range(23):
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
pose, experiment = recording.get_pose_and_experiment(25)
self.assertEqual(pose, 0)
self.assertEqual(experiment, 1)
for i in range(23):
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
pose, experiment = recording.get_pose_and_experiment(25)
self.assertEqual(pose, 23)
self.assertEqual(experiment, 1)
def test_get_pose_and_experiment_corner(self):
from carla.driving_benchmark.experiment import Experiment
recording = Recording( name_to_save='Test1'
, continue_experiment=False, save_images=True
)
pose, experiment = recording.get_pose_and_experiment(1)
# An starting experiment should return one
self.assertEqual(pose, 0)
self.assertEqual(experiment, 0)
pose, experiment = recording.get_pose_and_experiment(2)
self.assertEqual(pose, 0)
self.assertEqual(experiment, 0)
recording.write_summary_results( experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
pose, experiment = recording.get_pose_and_experiment(1)
print (pose, experiment)
self.assertEqual(pose, 0)
self.assertEqual(experiment, 1)
pose, experiment = recording.get_pose_and_experiment(2)
print (pose, experiment)
# An starting experiment should return one
self.assertEqual(pose, 1)
self.assertEqual(experiment, 0)
pose, experiment = recording.get_pose_and_experiment(3)
print (pose, experiment)
# An starting experiment should return one
self.assertEqual(pose, 1)
self.assertEqual(experiment, 0)
recording.write_summary_results( experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
pose, experiment = recording.get_pose_and_experiment(2)
self.assertEqual(pose, 0)
self.assertEqual(experiment, 1)
pose, experiment = recording.get_pose_and_experiment(3)
self.assertEqual(pose, 2)
self.assertEqual(experiment, 0)
if __name__ == '__main__':
unittest.main()
| 35.906383
| 95
| 0.58995
| 925
| 8,438
| 5.181622
| 0.139459
| 0.097016
| 0.027123
| 0.054246
| 0.838306
| 0.831629
| 0.819528
| 0.778218
| 0.727311
| 0.683705
| 0
| 0.034344
| 0.323655
| 8,438
| 235
| 96
| 35.906383
| 0.805502
| 0.083788
| 0
| 0.71875
| 0
| 0
| 0.014664
| 0
| 0
| 0
| 0
| 0
| 0.257813
| 1
| 0.046875
| false
| 0
| 0.09375
| 0
| 0.148438
| 0.023438
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3e3e030a43122b0dac26563cebf7c0c14779e98
| 6,011
|
py
|
Python
|
tests/endtoend/test_blob_functions.py
|
gohar94/azure-functions-python-worker
|
4322e53ddbcc1eea40c1b061b42653336d9003f6
|
[
"MIT"
] | null | null | null |
tests/endtoend/test_blob_functions.py
|
gohar94/azure-functions-python-worker
|
4322e53ddbcc1eea40c1b061b42653336d9003f6
|
[
"MIT"
] | null | null | null |
tests/endtoend/test_blob_functions.py
|
gohar94/azure-functions-python-worker
|
4322e53ddbcc1eea40c1b061b42653336d9003f6
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import time
from datetime import datetime
from azure_functions_worker import testutils
class TestBlobFunctions(testutils.WebHostTestCase):
@classmethod
def get_script_dir(cls):
return testutils.E2E_TESTS_FOLDER / 'blob_functions'
@testutils.retryable_test(3, 5)
def test_blob_io_str(self):
r = self.webhost.request('POST', 'put_blob_str', data='test-data')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'OK')
r = self.webhost.request('GET', 'get_blob_str')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'test-data')
r = self.webhost.request('GET', 'get_blob_as_str')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'test-data')
@testutils.retryable_test(3, 5)
def test_blob_io_large_str(self):
datetime_iso = datetime.utcnow().isoformat()
large_string = datetime_iso * 1024 * 1024 # 26 MB
r = self.webhost.request('POST', 'put_blob_str', data=large_string)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'OK')
r = self.webhost.request('GET', 'get_blob_str')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, large_string)
r = self.webhost.request('GET', 'get_blob_as_str')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, large_string)
@testutils.retryable_test(3, 5)
def test_blob_io_bytes(self):
r = self.webhost.request('POST', 'put_blob_bytes',
data='test-dată'.encode('utf-8'))
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'OK')
r = self.webhost.request('POST', 'get_blob_bytes')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'test-dată')
r = self.webhost.request('POST', 'get_blob_as_bytes')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'test-dată')
@testutils.retryable_test(3, 5)
def test_blob_io_large_bytes(self):
datetime_iso = datetime.utcnow().isoformat()
large_string = datetime_iso * 1024 * 1024 # 26 MB
r = self.webhost.request('POST', 'put_blob_bytes',
data=large_string.encode('utf-8'))
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'OK')
r = self.webhost.request('POST', 'get_blob_bytes')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, large_string)
r = self.webhost.request('POST', 'get_blob_as_bytes')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, large_string)
@testutils.retryable_test(3, 5)
def test_blob_io_filelike(self):
r = self.webhost.request('POST', 'put_blob_filelike')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'OK')
r = self.webhost.request('POST', 'get_blob_filelike')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'filelike')
@testutils.retryable_test(3, 5)
def test_blob_io_return(self):
r = self.webhost.request('POST', 'put_blob_return')
self.assertEqual(r.status_code, 200)
r = self.webhost.request('POST', 'get_blob_return')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'FROM RETURN')
@testutils.retryable_test(3, 5)
def test_blob_trigger(self):
datetime_iso = datetime.utcnow().isoformat()
data = datetime_iso
r = self.webhost.request('POST', 'put_blob_trigger',
data=data.encode('utf-8'))
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'OK')
# Blob trigger may be processed after some delay
# We check it every 2 seconds to allow the trigger to be fired
max_retries = 10
for try_no in range(max_retries):
time.sleep(2)
try:
# Check that the trigger has fired
r = self.webhost.request('GET', 'get_blob_triggered')
self.assertEqual(r.status_code, 200)
response = r.json()
self.assertEqual(
response,
{
'name': 'python-worker-tests/test-blob-trigger.txt',
'length': len(data),
'content': data
}
)
break
except AssertionError:
if try_no == max_retries - 1:
raise
@testutils.retryable_test(3, 5)
def test_blob_trigger_with_large_content(self):
datetime_iso = datetime.utcnow().isoformat()
data = datetime_iso * 1024 * 1024 # 26 MB
r = self.webhost.request('POST', 'put_blob_trigger', data=data.
encode('utf-8'))
self.assertEqual(r.status_code, 200)
self.assertEqual(r.text, 'OK')
# Blob trigger may be processed after some delay
# We check it every 2 seconds to allow the trigger to be fired
max_retries = 10
for try_no in range(max_retries):
time.sleep(2)
try:
# Check that the trigger has fired
r = self.webhost.request('GET', 'get_blob_triggered')
self.assertEqual(r.status_code, 200)
response = r.json()
self.assertEqual(
response,
{
'name': 'python-worker-tests/test-blob-trigger.txt',
'length': len(data),
'content': data
}
)
break
except AssertionError:
if try_no == max_retries - 1:
raise
| 35.994012
| 76
| 0.58343
| 724
| 6,011
| 4.661602
| 0.153315
| 0.173333
| 0.175407
| 0.112593
| 0.891259
| 0.891259
| 0.891259
| 0.883556
| 0.848889
| 0.746667
| 0
| 0.028934
| 0.304275
| 6,011
| 166
| 77
| 36.210843
| 0.778097
| 0.064715
| 0
| 0.706349
| 0
| 0
| 0.108913
| 0.014617
| 0
| 0
| 0
| 0
| 0.325397
| 1
| 0.071429
| false
| 0
| 0.02381
| 0.007937
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3fb1f937bbb1a1c85c7d4202bc1da54ba1f5783
| 78,919
|
py
|
Python
|
tests/archives.py
|
UuuNyaa/x7zipfile
|
5701758d39878a23bd0eae9a64cfabf7d1504c15
|
[
"BSD-3-Clause"
] | null | null | null |
tests/archives.py
|
UuuNyaa/x7zipfile
|
5701758d39878a23bd0eae9a64cfabf7d1504c15
|
[
"BSD-3-Clause"
] | null | null | null |
tests/archives.py
|
UuuNyaa/x7zipfile
|
5701758d39878a23bd0eae9a64cfabf7d1504c15
|
[
"BSD-3-Clause"
] | 1
|
2021-08-05T04:40:42.000Z
|
2021-08-05T04:40:42.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2021 UuuNyaa <UuuNyaa@gmail.com>
# This file is part of x7zipfile.
import dataclasses
import os
from datetime import datetime, timezone
from operator import attrgetter
from tests import x7ZipInfo
ARCHIVES_PATH = os.path.join(os.path.dirname(__file__), 'archives')
# The time zone of date_time is UTC.
_ARCHIVES = [
('bcj.bin', '', '', [
x7ZipInfo(filename='vmp0', file_size=0, compress_size=0, date_time=None, CRC=None, mode=None, encrypted=None, compress_type=None, block=None),
x7ZipInfo(filename='vmp1', file_size=11264, compress_size=11264, date_time=None, CRC=None, mode=None, encrypted=None, compress_type=None, block=None),
x7ZipInfo(filename='vmp2', file_size=512, compress_size=512, date_time=None, CRC=None, mode=None, encrypted=None, compress_type=None, block=None),
]),
('bugzilla_4.7z', '', '', [
x7ZipInfo(filename='copying.txt', file_size=26948, compress_size=15971, date_time=(2001, 8, 29, 20, 19, 26), CRC=4090972253, mode='An', encrypted='-', compress_type='LZMA:16', block=0),
x7ZipInfo(filename='History.txt', file_size=26317, compress_size=None, date_time=(2005, 12, 4, 13, 57, 48), CRC=223166815, mode='An', encrypted='-', compress_type='LZMA:16', block=0),
x7ZipInfo(filename='License.txt', file_size=938, compress_size=None, date_time=(2005, 1, 10, 11, 31, 50), CRC=3341001762, mode='An', encrypted='-', compress_type='LZMA:16', block=0),
x7ZipInfo(filename='readme.txt', file_size=3500, compress_size=None, date_time=(2005, 12, 4, 13, 57, 26), CRC=3932089770, mode='An', encrypted='-', compress_type='LZMA:16', block=0),
]),
('bzip2_2.7z', '', '', [
x7ZipInfo(filename='10000SalesRecords.csv', file_size=1247263, compress_size=272525, date_time=(2017, 7, 28, 13, 50, 45), CRC=2832084052, mode='A', encrypted='-', compress_type='BZip2', block=0),
]),
('copy.7z', '', '', [
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2006, 3, 15, 21, 54, 41), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=33, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='Copy', block=0),
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=33, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='Copy', block=1),
]),
('copy_2.7z', '', '', [
x7ZipInfo(filename='assemblies/content/0000/Empty.sbsasm', file_size=136, compress_size=136, date_time=(2020, 7, 9, 3, 58, 47), CRC=2680137582, mode='', encrypted='-', compress_type='Copy', block=0),
x7ZipInfo(filename='assemblies/content/0000/Empty.xml', file_size=895, compress_size=895, date_time=(2020, 7, 9, 3, 58, 47), CRC=2361726829, mode='', encrypted='-', compress_type='Copy', block=1),
]),
('copy_bcj_1.7z', '', '', [
x7ZipInfo(filename='test_bcj2.bin', file_size=10000, compress_size=10000, date_time=(2020, 9, 29, 23, 54, 47), CRC=2770753338, mode='A_ -rw-rw-r--', encrypted='-', compress_type='Copy BCJ', block=0),
]),
('crc_corrupted.7z', '', 'Fatal error: ERROR: CRC Failed', [
x7ZipInfo(filename='src', file_size=0, compress_size=0, date_time=(2020, 6, 3, 0, 18, 55), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='src/scripts', file_size=0, compress_size=0, date_time=(2019, 3, 14, 0, 10, 8), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='src/scripts/py7zr', file_size=111, compress_size=441, date_time=(2019, 3, 14, 0, 10, 8), CRC=3010113244, mode='A_ 0rwxr-xr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='src/setup.cfg', file_size=58, compress_size=None, date_time=(2019, 3, 14, 0, 7, 13), CRC=3703541000, mode='A_ 0rw-r--r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='src/setup.py', file_size=559, compress_size=None, date_time=(2019, 3, 14, 0, 9, 1), CRC=2164028095, mode='A_ 0rw-r--r--', encrypted='-', compress_type='LZMA2:24', block=0),
]),
('deflate.7z', '', '', [
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=49, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='Deflate', block=0),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='Deflate', block=0),
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2010, 4, 24, 23, 45, 54), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
]),
('deflate64.7z', '', '', [
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=49, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='Deflate64', block=0),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='Deflate64', block=0),
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2010, 4, 24, 23, 45, 54), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
]),
('empty.7z', '', '', [
]),
('encrypted_1.7z', 'secret', '', [
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=48, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA:16 7zAES:19', block=0),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA:16 7zAES:19', block=0),
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2010, 4, 24, 23, 25, 39), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
]),
('encrypted_2.7z', 'secret', '', [
x7ZipInfo(filename='mingw64', file_size=0, compress_size=0, date_time=(2017, 1, 23, 6, 2, 46), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/bin', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 18), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/include', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 18), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/lib', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 18), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/share', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 26), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/share/doc', file_size=0, compress_size=0, date_time=(2017, 1, 23, 6, 2, 43), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/share/doc/szip', file_size=0, compress_size=0, date_time=(2017, 1, 23, 6, 2, 43), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/include/SZconfig.h', file_size=2289, compress_size=28160, date_time=(2017, 1, 23, 6, 2, 34), CRC=885497089, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/include/ricehdf.h', file_size=3470, compress_size=None, date_time=(2004, 3, 16, 16, 14, 27), CRC=1176446038, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/include/szip_adpt.h', file_size=1774, compress_size=None, date_time=(2010, 7, 2, 21, 31, 38), CRC=3524555922, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/include/szlib.h', file_size=5282, compress_size=None, date_time=(2008, 11, 11, 16, 12, 56), CRC=2232561848, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/lib/libszip.a', file_size=60008, compress_size=None, date_time=(2017, 1, 23, 6, 2, 47), CRC=312095774, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/lib/libszip.dll.a', file_size=10900, compress_size=None, date_time=(2017, 1, 23, 6, 2, 39), CRC=2563440788, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/COPYING', file_size=1986, compress_size=None, date_time=(2008, 1, 24, 23, 8, 43), CRC=4216910981, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/HISTORY.txt', file_size=1544, compress_size=None, date_time=(2010, 7, 14, 13, 43, 15), CRC=4115460176, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/INSTALL', file_size=3544, compress_size=None, date_time=(2008, 11, 11, 16, 12, 56), CRC=2370272233, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/README', file_size=564, compress_size=None, date_time=(2007, 8, 20, 18, 47, 21), CRC=2439857018, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/RELEASE.txt', file_size=513, compress_size=None, date_time=(2010, 7, 14, 13, 43, 15), CRC=2315733265, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:192k 7zAES:19', block=0),
x7ZipInfo(filename='mingw64/bin/libszip-0.dll', file_size=66352, compress_size=25968, date_time=(2017, 1, 23, 6, 2, 47), CRC=3176356340, mode='A_ -rwxr-xr-x', encrypted='+', compress_type='BCJ LZMA2:192k 7zAES:19', block=1),
]),
('encrypted_3.7z', 'secret', '', [
x7ZipInfo(filename='scripts', file_size=0, compress_size=0, date_time=(2019, 3, 14, 0, 10, 8), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='scripts/py7zr', file_size=111, compress_size=448, date_time=(2019, 3, 14, 0, 10, 8), CRC=3010113243, mode='A_ -rwxr-xr-x', encrypted='+', compress_type='LZMA2:12 7zAES:19', block=0),
x7ZipInfo(filename='setup.cfg', file_size=58, compress_size=None, date_time=(2019, 3, 14, 0, 7, 13), CRC=3703540999, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:12 7zAES:19', block=0),
x7ZipInfo(filename='setup.py', file_size=559, compress_size=None, date_time=(2019, 3, 14, 0, 9, 1), CRC=2164028094, mode='A_ -rw-r--r--', encrypted='+', compress_type='LZMA2:12 7zAES:19', block=0),
]),
('filename_encryption.7z', 'hello', '', [
x7ZipInfo(filename='New Text Document.TXT', file_size=0, compress_size=0, date_time=(2020, 6, 2, 22, 7, 57), CRC=None, mode='A', encrypted='-', compress_type=None, block=None),
]),
('github_14.7z', '', '', [
x7ZipInfo(filename='github_14', file_size=24, compress_size=30, date_time=(2014, 3, 12, 23, 2, 31), CRC=3515136674, mode='', encrypted='-', compress_type='LZMA:24', block=0),
]),
('github_14_multi.7z', '', '', [
x7ZipInfo(filename='github_14_multi', file_size=28, compress_size=34, date_time=(2014, 3, 12, 23, 9, 11), CRC=2017676314, mode='', encrypted='-', compress_type='LZMA:24', block=0),
x7ZipInfo(filename='github_14_multi', file_size=28, compress_size=34, date_time=(2014, 3, 12, 23, 9, 15), CRC=1071855306, mode='', encrypted='-', compress_type='LZMA:24', block=1),
]),
('longpath.7z', '', 'ERROR: Can not open output file : File name too long', [
x7ZipInfo(filename='Users\\AnthonyRabon\\Downloads\\CJ_WS_Spectre-v040920R1_2020-04-09_23-40-44 (1)\\CJ_WS_Spectre-v040920R1_2020-04-09_23-40-44\\Suspicious Files\\Program Files\\WindowsApps\\AD2F1837.HPPrinterControl_110.1.671.0_x64__v10z8vjag6ke6\\HP.Framework.Extensions.ScanCapture\\Assets\\Arrow.png\\Arrow.png\\Arrow.png', file_size=332, compress_size=333, date_time=(2020, 4, 17, 20, 37, 57), CRC=732802182, mode='A', encrypted='-', compress_type='LZMA2:12', block=0),
x7ZipInfo(filename='Users/AnthonyRabon/Downloads/CJ_WS_Spectre-v040920R1_2020-04-09_23-40-44 (1)/CJ_WS_Spectre-v040920R1_2020-04-09_23-40-44/Arrow.png', file_size=332, compress_size=333, date_time=(2020, 4, 17, 20, 37, 57), CRC=732802182, mode='A', encrypted='-', compress_type='LZMA2:12', block=1),
]),
('lz4.7z', '', 'ERROR: Unsupported Method', [
x7ZipInfo(filename='scripts', file_size=0, compress_size=0, date_time=(2019, 3, 14, 0, 10, 8), CRC=None, mode='D', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='scripts/py7zr', file_size=111, compress_size=584, date_time=(2019, 3, 14, 0, 10, 8), CRC=3010113243, mode='A', encrypted='-', compress_type='04F71104', block=0),
x7ZipInfo(filename='setup.cfg', file_size=58, compress_size=None, date_time=(2019, 3, 14, 0, 7, 13), CRC=3703540999, mode='A', encrypted='-', compress_type='04F71104', block=0),
x7ZipInfo(filename='setup.py', file_size=559, compress_size=None, date_time=(2019, 3, 14, 0, 9, 1), CRC=2164028094, mode='A', encrypted='-', compress_type='04F71104', block=0),
]),
('lzma2_1.7z', '', '', [
x7ZipInfo(filename='scripts', file_size=0, compress_size=0, date_time=(2019, 3, 14, 0, 10, 8), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='scripts/py7zr', file_size=111, compress_size=441, date_time=(2019, 3, 14, 0, 10, 8), CRC=3010113243, mode='A_ -rwxr-xr-x', encrypted='-', compress_type='LZMA2:12', block=0),
x7ZipInfo(filename='setup.cfg', file_size=58, compress_size=None, date_time=(2019, 3, 14, 0, 7, 13), CRC=3703540999, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:12', block=0),
x7ZipInfo(filename='setup.py', file_size=559, compress_size=None, date_time=(2019, 3, 14, 0, 9, 1), CRC=2164028094, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:12', block=0),
]),
('lzma2_bcj_arm.7z', '', '', [
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2010, 4, 24, 23, 25, 39), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=55, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='ARM LZMA2:12', block=0),
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='ARM LZMA2:12', block=0),
]),
('lzma2_bcj_armt.7z', '', '', [
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2010, 4, 24, 23, 25, 39), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=55, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='ARMT LZMA2:12', block=0),
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='ARMT LZMA2:12', block=0),
]),
('lzma2_bcj_ia64.7z', '', '', [
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2010, 4, 24, 23, 25, 39), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=55, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='IA64 LZMA2:12', block=0),
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='IA64 LZMA2:12', block=0),
]),
('lzma2_bcj_ppc.7z', '', '', [
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2010, 4, 24, 23, 25, 39), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=55, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='PPC LZMA2:12', block=0),
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='PPC LZMA2:12', block=0),
]),
('lzma2_bcj_sparc.7z', '', '', [
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2010, 4, 24, 23, 25, 39), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=55, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='SPARC LZMA2:12', block=0),
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='SPARC LZMA2:12', block=0),
]),
('lzma2bcj.7z', '', '', [
x7ZipInfo(filename='mingw64', file_size=0, compress_size=0, date_time=(2017, 1, 23, 6, 2, 46), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/bin', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 18), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/include', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 18), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/lib', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 18), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/share', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 26), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/share/doc', file_size=0, compress_size=0, date_time=(2017, 1, 23, 6, 2, 43), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/share/doc/szip', file_size=0, compress_size=0, date_time=(2017, 1, 23, 6, 2, 43), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/include/SZconfig.h', file_size=2289, compress_size=26895, date_time=(2017, 1, 23, 6, 2, 34), CRC=885497089, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/include/ricehdf.h', file_size=3470, compress_size=None, date_time=(2004, 3, 16, 16, 14, 27), CRC=1176446038, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/include/szip_adpt.h', file_size=1774, compress_size=None, date_time=(2010, 7, 2, 21, 31, 38), CRC=3524555922, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/include/szlib.h', file_size=5282, compress_size=None, date_time=(2008, 11, 11, 16, 12, 56), CRC=2232561848, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/lib/libszip.a', file_size=60008, compress_size=None, date_time=(2017, 1, 23, 6, 2, 47), CRC=312095774, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/lib/libszip.dll.a', file_size=10900, compress_size=None, date_time=(2017, 1, 23, 6, 2, 39), CRC=2563440788, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/COPYING', file_size=1986, compress_size=None, date_time=(2008, 1, 24, 23, 8, 43), CRC=4216910981, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/HISTORY.txt', file_size=1544, compress_size=None, date_time=(2010, 7, 14, 13, 43, 15), CRC=4115460176, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/INSTALL', file_size=3544, compress_size=None, date_time=(2008, 11, 11, 16, 12, 56), CRC=2370272233, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/README', file_size=564, compress_size=None, date_time=(2007, 8, 20, 18, 47, 21), CRC=2439857018, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/RELEASE.txt', file_size=513, compress_size=None, date_time=(2010, 7, 14, 13, 43, 15), CRC=2315733265, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/bin/libszip-0.dll', file_size=66352, compress_size=24924, date_time=(2017, 1, 23, 6, 2, 47), CRC=3176356340, mode='A_ -rwxr-xr-x', encrypted='-', compress_type='BCJ LZMA2:192k', block=1),
]),
('lzma2bcj2.7z', '', '', [
x7ZipInfo(filename='mingw64', file_size=0, compress_size=0, date_time=(2017, 1, 23, 6, 2, 46), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/bin', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 18), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/include', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 18), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/lib', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 18), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/share', file_size=0, compress_size=0, date_time=(2020, 6, 7, 2, 45, 26), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/share/doc', file_size=0, compress_size=0, date_time=(2017, 1, 23, 6, 2, 43), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/share/doc/szip', file_size=0, compress_size=0, date_time=(2017, 1, 23, 6, 2, 43), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='mingw64/include/SZconfig.h', file_size=2289, compress_size=26837, date_time=(2017, 1, 23, 6, 2, 34), CRC=885497089, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/include/ricehdf.h', file_size=3470, compress_size=None, date_time=(2004, 3, 16, 16, 14, 27), CRC=1176446038, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/include/szip_adpt.h', file_size=1774, compress_size=None, date_time=(2010, 7, 2, 21, 31, 38), CRC=3524555922, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/include/szlib.h', file_size=5282, compress_size=None, date_time=(2008, 11, 11, 16, 12, 56), CRC=2232561848, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/lib/libszip.a', file_size=60008, compress_size=None, date_time=(2017, 1, 23, 6, 2, 47), CRC=312095774, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/lib/libszip.dll.a', file_size=10900, compress_size=None, date_time=(2017, 1, 23, 6, 2, 39), CRC=2563440788, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/COPYING', file_size=1986, compress_size=None, date_time=(2008, 1, 24, 23, 8, 43), CRC=4216910981, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/HISTORY.txt', file_size=1544, compress_size=None, date_time=(2010, 7, 14, 13, 43, 15), CRC=4115460176, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/INSTALL', file_size=3544, compress_size=None, date_time=(2008, 11, 11, 16, 12, 56), CRC=2370272233, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/README', file_size=564, compress_size=None, date_time=(2007, 8, 20, 18, 47, 21), CRC=2439857018, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/share/doc/szip/RELEASE.txt', file_size=513, compress_size=None, date_time=(2010, 7, 14, 13, 43, 15), CRC=2315733265, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:192k', block=0),
x7ZipInfo(filename='mingw64/bin/libszip-0.dll', file_size=66352, compress_size=23793, date_time=(2017, 1, 23, 6, 2, 47), CRC=3176356340, mode='A_ -rwxr-xr-x', encrypted='-', compress_type='BCJ2 LZMA2:192k LZMA:192k:lc0:lp2 LZMA:192k:lc0:lp2', block=1),
]),
('lzma2delta_1.7z', '', '', [
x7ZipInfo(filename='src', file_size=0, compress_size=0, date_time=(2020, 4, 12, 8, 3, 28), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='src/bra.txt', file_size=11, compress_size=15, date_time=(2020, 4, 12, 8, 3, 28), CRC=295790896, mode='A_ -rw-r--r--', encrypted='-', compress_type='Delta:1 LZMA2:12', block=0),
]),
('lzma_1.7z', '', '', [
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=37, date_time=(2020, 4, 12, 8, 3, 28), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA:12', block=0),
]),
('lzma_bcj2_1.7z', '', '', [
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=52, date_time=(2019, 11, 30, 0, 29, 19), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='BCJ2 LZMA:12 LZMA:12:lc0:lp2 LZMA:12:lc0:lp2', block=0),
]),
('lzma_bcj_arm.7z', '', '', [
x7ZipInfo(filename='xclock', file_size=48507, compress_size=20336, date_time=(2016, 5, 22, 15, 24, 13), CRC=1633252794, mode='RA_ -r-xr-xr-x', encrypted='-', compress_type='ARM LZMA:48k', block=0),
]),
('lzma_bcj_armt.7z', '', '', [
x7ZipInfo(filename='xclock', file_size=58224, compress_size=28819, date_time=(2020, 2, 14, 0, 6, 28), CRC=771792826, mode='RA_ -r-xr-xr-x', encrypted='-', compress_type='ARMT LZMA:16', block=0),
]),
('lzma_bcj_ppc.7z', '', '', [
x7ZipInfo(filename='xclock', file_size=45254, compress_size=20026, date_time=(2016, 5, 22, 16, 20, 49), CRC=3219272118, mode='RA_ -r-xr-xr-x', encrypted='-', compress_type='PPC LZMA:48k', block=0),
]),
('lzma_bcj_sparc.7z', '', '', [
x7ZipInfo(filename='xclock', file_size=42545, compress_size=18656, date_time=(2020, 7, 18, 10, 9, 21), CRC=3048035462, mode='RA_ -r-xr-xr-x', encrypted='-', compress_type='SPARC LZMA:48k', block=0),
]),
('lzma_bcj_x86.7z', '', '', [
x7ZipInfo(filename='c++obf7.exe', file_size=12800, compress_size=11327, date_time=(2019, 8, 2, 12, 17, 32), CRC=196103774, mode='A', encrypted='-', compress_type='BCJ LZMA:16', block=0),
]),
('mblock_1.7z', '', '', [
x7ZipInfo(filename='C', file_size=0, compress_size=0, date_time=(2019, 6, 2, 23, 50, 27), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='C/Util', file_size=0, compress_size=0, date_time=(2019, 6, 2, 23, 50, 27), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='C/Util/7z', file_size=0, compress_size=0, date_time=(2019, 6, 2, 23, 50, 27), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='C/Util/Lzma', file_size=0, compress_size=0, date_time=(2019, 6, 2, 23, 50, 27), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='C/Util/LzmaLib', file_size=0, compress_size=0, date_time=(2019, 6, 2, 23, 50, 27), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='C/Util/SfxSetup', file_size=0, compress_size=0, date_time=(2019, 6, 2, 23, 50, 27), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='DOC', file_size=0, compress_size=0, date_time=(2019, 6, 2, 23, 50, 27), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='bin', file_size=0, compress_size=0, date_time=(2019, 6, 3, 0, 9, 57), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='bin/installer', file_size=0, compress_size=0, date_time=(2019, 6, 3, 0, 14, 29), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='bin/x64', file_size=0, compress_size=0, date_time=(2019, 6, 2, 23, 50, 27), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='C/7z.h', file_size=5263, compress_size=94014, date_time=(2015, 11, 18, 9, 4, 24), CRC=213572677, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zAlloc.c', file_size=1548, compress_size=None, date_time=(2015, 11, 9, 9, 41, 8), CRC=1015659052, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zAlloc.h', file_size=403, compress_size=None, date_time=(2015, 3, 25, 16, 7, 58), CRC=1255145334, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zArcIn.c', file_size=45342, compress_size=None, date_time=(2016, 5, 16, 8, 17, 2), CRC=3346864249, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zBuf.c', file_size=563, compress_size=None, date_time=(2013, 1, 23, 17, 42, 47), CRC=3729966553, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zBuf.h', file_size=647, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=895974893, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zBuf2.c', file_size=940, compress_size=None, date_time=(2014, 8, 22, 8, 2, 4), CRC=4123429520, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zCrc.c', file_size=3213, compress_size=None, date_time=(2015, 3, 10, 9, 31, 14), CRC=230919468, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zCrc.h', file_size=637, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=2275962497, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zCrcOpt.c', file_size=3441, compress_size=None, date_time=(2015, 3, 1, 7, 48, 42), CRC=1400225652, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zDec.c', file_size=15734, compress_size=None, date_time=(2015, 11, 18, 11, 9, 54), CRC=2764226048, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zFile.c', file_size=6966, compress_size=None, date_time=(2013, 1, 23, 17, 42, 51), CRC=2840637246, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zFile.h', file_size=1581, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=3953630047, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zStream.c', file_size=4188, compress_size=None, date_time=(2013, 11, 12, 5, 50, 22), CRC=3167721463, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zTypes.h', file_size=5737, compress_size=None, date_time=(2013, 11, 12, 5, 54, 41), CRC=600932062, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zVersion.h', file_size=582, compress_size=None, date_time=(2016, 10, 4, 12, 10, 40), CRC=2891635936, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/7zVersion.rc', file_size=1530, compress_size=None, date_time=(2011, 4, 18, 17, 30, 17), CRC=4188477837, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Aes.c', file_size=8639, compress_size=None, date_time=(2016, 5, 21, 8, 6, 41), CRC=2414648125, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Aes.h', file_size=1364, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=717903915, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/AesOpt.c', file_size=4618, compress_size=None, date_time=(2013, 11, 12, 6, 14, 17), CRC=957820189, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Alloc.c', file_size=3272, compress_size=None, date_time=(2015, 2, 21, 14, 56, 8), CRC=813178831, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Alloc.h', file_size=675, compress_size=None, date_time=(2015, 2, 21, 14, 56, 8), CRC=2885256274, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Bcj2.c', file_size=6177, compress_size=None, date_time=(2015, 8, 1, 4, 33, 34), CRC=1548656025, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Bcj2.h', file_size=3287, compress_size=None, date_time=(2014, 11, 10, 17, 45, 56), CRC=1485330193, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Bcj2Enc.c', file_size=7524, compress_size=None, date_time=(2014, 11, 10, 17, 59, 38), CRC=363376878, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Bra.c', file_size=3301, compress_size=None, date_time=(2013, 1, 23, 17, 43, 38), CRC=3813179900, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Bra.h', file_size=1926, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=4198414260, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Bra86.c', file_size=1799, compress_size=None, date_time=(2013, 11, 12, 5, 36, 13), CRC=583675425, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/BraIA64.c', file_size=1839, compress_size=None, date_time=(2013, 11, 12, 6, 18, 9), CRC=556709688, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Compiler.h', file_size=1168, compress_size=None, date_time=(2015, 8, 2, 11, 52, 23), CRC=3769352475, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/CpuArch.c', file_size=4339, compress_size=None, date_time=(2016, 2, 25, 7, 42, 8), CRC=4072626732, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/CpuArch.h', file_size=5909, compress_size=None, date_time=(2016, 6, 9, 5, 39, 31), CRC=986540367, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Delta.c', file_size=1337, compress_size=None, date_time=(2013, 1, 23, 17, 48, 23), CRC=3663178081, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Delta.h', file_size=395, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=3296496221, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/DllSecur.c', file_size=1894, compress_size=None, date_time=(2016, 10, 4, 14, 21, 8), CRC=2865161222, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/DllSecur.h', file_size=259, compress_size=None, date_time=(2016, 6, 8, 13, 53, 28), CRC=261298518, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzFind.c', file_size=25218, compress_size=None, date_time=(2015, 10, 15, 15, 16, 26), CRC=3792255594, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzFind.h', file_size=3437, compress_size=None, date_time=(2015, 10, 15, 15, 13, 59), CRC=2180220588, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzFindMt.c', file_size=23018, compress_size=None, date_time=(2015, 10, 15, 15, 38, 6), CRC=4136230841, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzFindMt.h', file_size=2509, compress_size=None, date_time=(2015, 5, 3, 10, 8, 44), CRC=148789859, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzHash.h', file_size=1824, compress_size=None, date_time=(2015, 4, 12, 8, 28, 1), CRC=2828726620, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Lzma2Dec.c', file_size=10713, compress_size=None, date_time=(2015, 11, 9, 9, 39, 32), CRC=2684159462, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Lzma2Dec.h', file_size=2248, compress_size=None, date_time=(2015, 5, 13, 8, 59, 25), CRC=2219260484, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Lzma2Enc.c', file_size=12989, compress_size=None, date_time=(2015, 10, 4, 9, 47, 8), CRC=2254226617, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Lzma2Enc.h', file_size=1827, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=1708258329, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Lzma86.h', file_size=3266, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=2619347907, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Lzma86Dec.c', file_size=1299, compress_size=None, date_time=(2016, 5, 16, 8, 17, 44), CRC=407617727, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Lzma86Enc.c', file_size=2712, compress_size=None, date_time=(2016, 5, 16, 8, 17, 44), CRC=3196967704, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzmaDec.c', file_size=30886, compress_size=None, date_time=(2016, 5, 16, 8, 11, 31), CRC=3204345610, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzmaDec.h', file_size=7055, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=3349257961, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzmaEnc.c', file_size=66505, compress_size=None, date_time=(2016, 5, 16, 8, 32, 35), CRC=4289683091, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzmaEnc.h', file_size=3117, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=50714080, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzmaLib.c', file_size=1323, compress_size=None, date_time=(2015, 6, 13, 16, 43, 33), CRC=2410469055, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/LzmaLib.h', file_size=4438, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=2661842185, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/MtCoder.c', file_size=8317, compress_size=None, date_time=(2015, 10, 13, 7, 43, 2), CRC=692166661, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/MtCoder.h', file_size=2072, compress_size=None, date_time=(2009, 11, 19, 17, 16, 39), CRC=433647918, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Ppmd.h', file_size=2119, compress_size=None, date_time=(2016, 5, 16, 8, 40, 47), CRC=729921407, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Ppmd7.c', file_size=17593, compress_size=None, date_time=(2016, 5, 21, 7, 49, 15), CRC=3311129979, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Ppmd7.h', file_size=3842, compress_size=None, date_time=(2016, 5, 21, 7, 49, 15), CRC=2800200453, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Ppmd7Dec.c', file_size=4884, compress_size=None, date_time=(2013, 1, 23, 17, 43, 54), CRC=607753273, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Ppmd7Enc.c', file_size=4557, compress_size=None, date_time=(2015, 9, 28, 13, 26, 27), CRC=4231787260, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Precomp.h', file_size=182, compress_size=None, date_time=(2013, 11, 12, 6, 5, 19), CRC=1384480688, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/RotateDefs.h', file_size=656, compress_size=None, date_time=(2015, 3, 25, 12, 21, 10), CRC=3567974844, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Sha256.c', file_size=5529, compress_size=None, date_time=(2015, 11, 14, 7, 46, 27), CRC=166729251, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Sha256.h', file_size=464, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=2591283809, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Sort.c', file_size=2589, compress_size=None, date_time=(2014, 4, 5, 11, 17, 8), CRC=2948294581, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Sort.h', file_size=335, compress_size=None, date_time=(2014, 4, 5, 11, 17, 8), CRC=2463338015, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Threads.c', file_size=2765, compress_size=None, date_time=(2014, 9, 21, 12, 12, 8), CRC=392510048, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Threads.h', file_size=2071, compress_size=None, date_time=(2013, 11, 12, 6, 12, 49), CRC=1961453104, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/7z/7z.dsp', file_size=6636, compress_size=None, date_time=(2015, 6, 10, 9, 43, 13), CRC=2154603229, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/7z/7z.dsw', file_size=527, compress_size=None, date_time=(2008, 3, 17, 7, 52, 11), CRC=737115856, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/7z/7zMain.c', file_size=14639, compress_size=None, date_time=(2016, 5, 16, 8, 44, 27), CRC=3874348523, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/7z/Precomp.c', file_size=93, compress_size=None, date_time=(2013, 1, 23, 17, 42, 30), CRC=2198459834, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/7z/Precomp.h', file_size=188, compress_size=None, date_time=(2014, 6, 16, 5, 58, 24), CRC=4096952509, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/7z/makefile', file_size=678, compress_size=None, date_time=(2015, 6, 10, 9, 39, 22), CRC=3158846268, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/7z/makefile.gcc', file_size=1629, compress_size=None, date_time=(2015, 11, 15, 10, 24, 34), CRC=527533880, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/Lzma/LzmaUtil.c', file_size=6429, compress_size=None, date_time=(2015, 11, 8, 11, 32, 13), CRC=2879148874, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/Lzma/LzmaUtil.dsp', file_size=5382, compress_size=None, date_time=(2014, 6, 17, 4, 55, 20), CRC=4154607167, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/Lzma/LzmaUtil.dsw', file_size=539, compress_size=None, date_time=(2008, 3, 10, 8, 59, 46), CRC=2518857833, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/Lzma/makefile', file_size=430, compress_size=None, date_time=(2014, 6, 23, 8, 50, 55), CRC=3296792401, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/Lzma/makefile.gcc', file_size=756, compress_size=None, date_time=(2009, 12, 1, 17, 2, 37), CRC=3355576455, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/LzmaLib/LzmaLib.def', file_size=45, compress_size=None, date_time=(2008, 3, 9, 9, 56, 31), CRC=2416257379, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/LzmaLib/LzmaLib.dsp', file_size=5249, compress_size=None, date_time=(2013, 1, 18, 8, 31, 0), CRC=2268538934, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/LzmaLib/LzmaLib.dsw', file_size=537, compress_size=None, date_time=(2008, 3, 9, 11, 6, 56), CRC=960800602, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/LzmaLib/LzmaLibExports.c', file_size=331, compress_size=None, date_time=(2015, 11, 8, 11, 34, 1), CRC=2775141516, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/LzmaLib/makefile', file_size=563, compress_size=None, date_time=(2009, 12, 1, 16, 57, 42), CRC=3885108108, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/LzmaLib/resource.rc', file_size=78, compress_size=None, date_time=(2011, 4, 18, 17, 32, 26), CRC=2174381550, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/SfxSetup/Precomp.c', file_size=93, compress_size=None, date_time=(2013, 1, 23, 17, 42, 30), CRC=2198459834, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/SfxSetup/Precomp.h', file_size=188, compress_size=None, date_time=(2014, 6, 16, 5, 58, 24), CRC=4096952509, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/SfxSetup/SfxSetup.c', file_size=15057, compress_size=None, date_time=(2016, 6, 8, 13, 57, 32), CRC=3305175054, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/SfxSetup/SfxSetup.dsp', file_size=5944, compress_size=None, date_time=(2016, 6, 3, 10, 47, 49), CRC=3015262048, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/SfxSetup/SfxSetup.dsw', file_size=539, compress_size=None, date_time=(2010, 10, 8, 5, 19, 13), CRC=588030063, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/SfxSetup/makefile', file_size=588, compress_size=None, date_time=(2016, 6, 3, 10, 47, 45), CRC=2798836900, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/SfxSetup/makefile_con', file_size=622, compress_size=None, date_time=(2016, 9, 28, 17, 13, 19), CRC=2038280809, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/SfxSetup/resource.rc', file_size=111, compress_size=None, date_time=(2011, 4, 18, 17, 31, 56), CRC=273950753, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Util/SfxSetup/setup.ico', file_size=1078, compress_size=None, date_time=(2010, 10, 7, 10, 4, 12), CRC=4015181052, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Xz.c', file_size=2014, compress_size=None, date_time=(2015, 5, 1, 10, 59, 34), CRC=200871913, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/Xz.h', file_size=7696, compress_size=None, date_time=(2015, 5, 1, 10, 57, 5), CRC=2178524246, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/XzCrc64.c', file_size=2192, compress_size=None, date_time=(2015, 3, 1, 7, 27, 52), CRC=600441896, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/XzCrc64.h', file_size=641, compress_size=None, date_time=(2013, 1, 18, 9, 0, 0), CRC=3185457607, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/XzCrc64Opt.c', file_size=2031, compress_size=None, date_time=(2015, 3, 1, 7, 27, 52), CRC=348212229, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/XzDec.c', file_size=23560, compress_size=None, date_time=(2015, 11, 9, 9, 40, 2), CRC=1802180405, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/XzEnc.c', file_size=13454, compress_size=None, date_time=(2015, 9, 16, 11, 45, 12), CRC=1334930234, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/XzEnc.h', file_size=691, compress_size=None, date_time=(2011, 2, 7, 8, 16, 29), CRC=3958224979, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='C/XzIn.c', file_size=8556, compress_size=None, date_time=(2015, 11, 8, 11, 1, 42), CRC=457134308, mode='RA_ -r--r--r--', encrypted='-', compress_type='LZMA2:768k', block=0),
x7ZipInfo(filename='DOC/7zC.txt', file_size=5522, compress_size=25204, date_time=(2014, 12, 3, 14, 35, 44), CRC=3344251926, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:96k', block=1),
x7ZipInfo(filename='DOC/7zFormat.txt', file_size=7573, compress_size=None, date_time=(2010, 9, 16, 12, 57, 16), CRC=1066192742, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:96k', block=1),
x7ZipInfo(filename='DOC/Methods.txt', file_size=3031, compress_size=None, date_time=(2016, 9, 27, 10, 42, 33), CRC=990912801, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:96k', block=1),
x7ZipInfo(filename='DOC/installer.txt', file_size=5380, compress_size=None, date_time=(2015, 1, 3, 17, 57, 26), CRC=3902812455, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:96k', block=1),
x7ZipInfo(filename='DOC/lzma-history.txt', file_size=10683, compress_size=None, date_time=(2016, 10, 4, 14, 26, 39), CRC=1171188700, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:96k', block=1),
x7ZipInfo(filename='DOC/lzma-sdk.txt', file_size=12809, compress_size=None, date_time=(2016, 10, 4, 15, 27, 55), CRC=1406880680, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:96k', block=1),
x7ZipInfo(filename='DOC/lzma-specification.txt', file_size=36761, compress_size=None, date_time=(2015, 6, 14, 18, 31, 54), CRC=2869454184, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:96k', block=1),
x7ZipInfo(filename='DOC/lzma.txt', file_size=10355, compress_size=None, date_time=(2014, 12, 5, 15, 33, 44), CRC=2052410383, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:96k', block=1),
x7ZipInfo(filename='bin/installer/config.txt', file_size=141, compress_size=None, date_time=(2013, 4, 17, 3, 15, 45), CRC=2746174580, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:96k', block=1),
x7ZipInfo(filename='bin/installer/cr.bat', file_size=139, compress_size=None, date_time=(2014, 12, 7, 13, 58, 9), CRC=475598991, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:96k', block=1),
x7ZipInfo(filename='bin/7zS2.sfx', file_size=35328, compress_size=510167, date_time=(2016, 10, 4, 15, 13, 34), CRC=2909877524, mode='A_ -rw-r--r--', encrypted='-', compress_type='BCJ LZMA2:1536k', block=2),
x7ZipInfo(filename='bin/7zS2con.sfx', file_size=35328, compress_size=None, date_time=(2016, 10, 4, 15, 13, 32), CRC=1948651580, mode='A_ -rw-r--r--', encrypted='-', compress_type='BCJ LZMA2:1536k', block=2),
x7ZipInfo(filename='bin/7zSD.sfx', file_size=113152, compress_size=None, date_time=(2016, 10, 4, 15, 12, 31), CRC=257232511, mode='A_ -rw-r--r--', encrypted='-', compress_type='BCJ LZMA2:1536k', block=2),
x7ZipInfo(filename='bin/7zdec.exe', file_size=42496, compress_size=None, date_time=(2016, 10, 4, 15, 13, 8), CRC=409487131, mode='A_ -rw-r--r--', encrypted='-', compress_type='BCJ LZMA2:1536k', block=2),
x7ZipInfo(filename='bin/7zr.exe', file_size=454144, compress_size=None, date_time=(2016, 10, 4, 15, 12, 27), CRC=259414835, mode='A_ -rw-r--r--', encrypted='-', compress_type='BCJ LZMA2:1536k', block=2),
x7ZipInfo(filename='bin/lzma.exe', file_size=97280, compress_size=None, date_time=(2016, 10, 4, 15, 12, 30), CRC=3637418033, mode='A_ -rw-r--r--', encrypted='-', compress_type='BCJ LZMA2:1536k', block=2),
x7ZipInfo(filename='bin/x64/7zr.exe', file_size=742400, compress_size=None, date_time=(2016, 10, 4, 14, 58, 29), CRC=2893471509, mode='A_ -rw-r--r--', encrypted='-', compress_type='BCJ LZMA2:1536k', block=2),
]),
('p7zip-zstd.7z', '', 'ERROR: Unsupported Method', [
x7ZipInfo(filename='bin', file_size=0, compress_size=0, date_time=(2006, 2, 6, 20, 1, 1), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='doc', file_size=0, compress_size=0, date_time=(2006, 2, 6, 20, 1, 8), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='doc/copying.txt', file_size=26948, compress_size=10492, date_time=(2001, 8, 30, 0, 19, 26), CRC=4090972253, mode='A_ -rwxr-xr-x', encrypted='-', compress_type='04F71101', block=0),
x7ZipInfo(filename='readme.txt', file_size=1601, compress_size=None, date_time=(2006, 2, 5, 11, 57, 18), CRC=3113337885, mode='A_ -rwxr-xr-x', encrypted='-', compress_type='04F71101', block=0),
x7ZipInfo(filename='bin/7za.exe', file_size=462336, compress_size=212033, date_time=(2006, 2, 5, 12, 1, 18), CRC=2934338533, mode='A_ -rwxr-xr-x', encrypted='-', compress_type='BCJ 04F71101', block=1),
]),
('ppmd.7z', '', '', [
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2010, 4, 24, 23, 25, 39), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=41, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='PPMD:o6:mem16', block=0),
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='PPMD:o6:mem16', block=0),
]),
('solid.7z', '', '', [
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2006, 3, 15, 21, 54, 41), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=55, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:12', block=0),
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:12', block=0),
]),
('symlink.7z', '', '', [
x7ZipInfo(filename='lib', file_size=0, compress_size=0, date_time=(2019, 3, 28, 0, 7, 51), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='lib/libabc.so', file_size=11, compress_size=1532, date_time=(2019, 3, 28, 0, 7, 21), CRC=4262439050, mode='A_ lrwxrwxrwx', encrypted='-', compress_type='LZMA2:13', block=0),
x7ZipInfo(filename='lib/libabc.so.1', file_size=13, compress_size=None, date_time=(2019, 3, 28, 0, 7, 21), CRC=2607345479, mode='A_ lrwxrwxrwx', encrypted='-', compress_type='LZMA2:13', block=0),
x7ZipInfo(filename='lib/libabc.so.1.2', file_size=15, compress_size=None, date_time=(2019, 3, 28, 0, 7, 21), CRC=2055456646, mode='A_ lrwxrwxrwx', encrypted='-', compress_type='LZMA2:13', block=0),
x7ZipInfo(filename='lib/libabc.so.1.2.3', file_size=6536, compress_size=None, date_time=(2019, 3, 27, 22, 49, 29), CRC=437637236, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:13', block=0),
x7ZipInfo(filename='lib64', file_size=3, compress_size=None, date_time=(2019, 3, 28, 0, 7, 57), CRC=2836347852, mode='A_ lrwxrwxrwx', encrypted='-', compress_type='LZMA2:13', block=0),
]),
('test_1.7z', '', '', [
x7ZipInfo(filename='scripts', file_size=0, compress_size=0, date_time=(2019, 3, 14, 0, 10, 8), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='scripts/py7zr', file_size=111, compress_size=441, date_time=(2019, 3, 14, 0, 10, 8), CRC=3010113243, mode='A_ -rwxr-xr-x', encrypted='-', compress_type='LZMA2:12', block=0),
x7ZipInfo(filename='setup.cfg', file_size=58, compress_size=None, date_time=(2019, 3, 14, 0, 7, 13), CRC=3703540999, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:12', block=0),
x7ZipInfo(filename='setup.py', file_size=559, compress_size=None, date_time=(2019, 3, 14, 0, 9, 1), CRC=2164028094, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:12', block=0),
]),
('test_2.7z', '', '', [
x7ZipInfo(filename='qt.qt5.597.gcc_64', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 53, 53), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='qt.qt5.597.gcc_64/installscript.qs', file_size=4326, compress_size=1460, date_time=(2018, 10, 18, 14, 53, 53), CRC=1855634885, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:6k', block=0),
]),
('test_3.7z', '', '', [
x7ZipInfo(filename='5.9.7', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 43), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/include', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/lib', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/lib/cmake', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/lib/cmake/Qt5X11Extras', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/lib/pkgconfig', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/mkspecs', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/mkspecs/modules', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/QX11Info', file_size=26, compress_size=8472, date_time=(2018, 10, 16, 10, 26, 21), CRC=4157161445, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/QtX11Extras', file_size=176, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=114277149, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/QtX11ExtrasDepends', file_size=201, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=4070405527, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/QtX11ExtrasVersion', file_size=32, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=2010279675, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.la', file_size=722, compress_size=None, date_time=(2018, 10, 16, 10, 26, 27), CRC=3863409080, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/qtx11extrasglobal.h', file_size=2280, compress_size=None, date_time=(2018, 10, 16, 10, 26, 21), CRC=2358042432, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/qtx11extrasversion.h', file_size=222, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=1994071976, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/qx11info_x11.h', file_size=2890, compress_size=None, date_time=(2018, 10, 16, 10, 26, 21), CRC=3448172875, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.so', file_size=24, compress_size=None, date_time=(2018, 10, 18, 14, 52, 42), CRC=2139696644, mode='A_ lrwxrwxrwx', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.so.5', file_size=24, compress_size=None, date_time=(2018, 10, 18, 14, 52, 42), CRC=2139696644, mode='A_ lrwxrwxrwx', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.so.5.9.7', file_size=14568, compress_size=None, date_time=(2018, 10, 16, 10, 26, 27), CRC=2743318329, mode='A_ -rwxrwxr-x', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.so.5.9', file_size=24, compress_size=None, date_time=(2018, 10, 18, 14, 52, 42), CRC=2139696644, mode='A_ lrwxrwxrwx', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/cmake/Qt5X11Extras/Qt5X11ExtrasConfig.cmake', file_size=6704, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=2709408657, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/cmake/Qt5X11Extras/Qt5X11ExtrasConfigVersion.cmake', file_size=287, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=184431719, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/pkgconfig/Qt5X11Extras.pc', file_size=283, compress_size=None, date_time=(2018, 10, 16, 10, 26, 27), CRC=764253401, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/mkspecs/modules/qt_lib_x11extras.pri', file_size=555, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=2591675318, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/mkspecs/modules/qt_lib_x11extras_private.pri', file_size=526, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=675001080, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.prl', file_size=1064, compress_size=None, date_time=(2018, 10, 18, 10, 28, 16), CRC=2164250057, mode='A_ -rw-rw-r--', encrypted='-', compress_type='LZMA2:15', block=0),
]),
('test_5.7z', '', '', [
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2006, 3, 15, 21, 54, 41), CRC=None, mode='D', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=48, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A', encrypted='-', compress_type='LZMA:25', block=0),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A', encrypted='-', compress_type='LZMA:25', block=0),
]),
('test_6.7z', '', '', [
x7ZipInfo(filename='5.9.7', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 43), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/include', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras', file_size=0, compress_size=0, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/QX11Info', file_size=26, compress_size=8457, date_time=(2018, 10, 16, 10, 26, 21), CRC=4157161445, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/QtX11Extras', file_size=176, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=114277149, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/QtX11ExtrasDepends', file_size=201, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=4070405527, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/QtX11ExtrasVersion', file_size=32, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=2010279675, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/qtx11extrasglobal.h', file_size=2280, compress_size=None, date_time=(2018, 10, 16, 10, 26, 21), CRC=2358042432, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/qtx11extrasversion.h', file_size=222, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=1994071976, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/include/QtX11Extras/qx11info_x11.h', file_size=2890, compress_size=None, date_time=(2018, 10, 16, 10, 26, 21), CRC=3448172875, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib', file_size=0, compress_size=None, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/cmake', file_size=0, compress_size=None, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/cmake/Qt5X11Extras', file_size=0, compress_size=None, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/cmake/Qt5X11Extras/Qt5X11ExtrasConfig.cmake', file_size=6704, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=2709408657, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/cmake/Qt5X11Extras/Qt5X11ExtrasConfigVersion.cmake', file_size=287, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=184431719, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.la', file_size=722, compress_size=None, date_time=(2018, 10, 16, 10, 26, 27), CRC=3863409080, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.prl', file_size=1064, compress_size=None, date_time=(2018, 10, 18, 10, 28, 16), CRC=2164250057, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.so', file_size=24, compress_size=None, date_time=(2018, 10, 16, 10, 26, 27), CRC=2139696644, mode='A_ lrwxrwxr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.so.5', file_size=24, compress_size=None, date_time=(2018, 10, 16, 10, 26, 27), CRC=2139696644, mode='A_ lrwxrwxr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.so.5.9', file_size=24, compress_size=None, date_time=(2018, 10, 16, 10, 26, 27), CRC=2139696644, mode='A_ lrwxrwxr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/libQt5X11Extras.so.5.9.7', file_size=14568, compress_size=None, date_time=(2018, 10, 16, 10, 26, 27), CRC=2743318329, mode='A_ 0rwxrwxr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/pkgconfig', file_size=0, compress_size=None, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/lib/pkgconfig/Qt5X11Extras.pc', file_size=283, compress_size=None, date_time=(2018, 10, 16, 10, 26, 27), CRC=764253401, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/mkspecs', file_size=0, compress_size=None, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/mkspecs/modules', file_size=0, compress_size=None, date_time=(2018, 10, 18, 14, 52, 42), CRC=None, mode='D_ drwxrwxr-x', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/mkspecs/modules/qt_lib_x11extras.pri', file_size=555, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=2591675318, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
x7ZipInfo(filename='5.9.7/gcc_64/mkspecs/modules/qt_lib_x11extras_private.pri', file_size=526, compress_size=None, date_time=(2018, 10, 16, 10, 26, 24), CRC=675001080, mode='A_ 0rw-rw-r--', encrypted='-', compress_type='LZMA2:24', block=0),
]),
('test_folder.7z', '', '', [
x7ZipInfo(filename='test1', file_size=0, compress_size=0, date_time=(2019, 10, 11, 10, 8, 44), CRC=None, mode='D', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test1/test2', file_size=0, compress_size=0, date_time=(2019, 10, 11, 14, 49, 34), CRC=None, mode='D', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test1/test2/test1', file_size=0, compress_size=0, date_time=(2019, 10, 11, 14, 49, 34), CRC=None, mode='D', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test2', file_size=0, compress_size=0, date_time=(2019, 10, 11, 10, 3, 32), CRC=None, mode='D', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test2/test1', file_size=0, compress_size=0, date_time=(2019, 10, 11, 14, 49, 34), CRC=None, mode='D', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test1/test2/test1/testfile1.txt', file_size=0, compress_size=0, date_time=(2019, 9, 13, 13, 58, 30), CRC=None, mode='A', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test1/test2/testfile2.txt', file_size=0, compress_size=0, date_time=(2019, 9, 13, 13, 58, 30), CRC=None, mode='A', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test1/testfile1.txt', file_size=0, compress_size=0, date_time=(2019, 9, 13, 13, 58, 30), CRC=None, mode='A', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test2/test1/testfile1.txt', file_size=0, compress_size=0, date_time=(2019, 9, 13, 13, 58, 30), CRC=None, mode='A', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test2/testfile2.txt', file_size=0, compress_size=0, date_time=(2019, 9, 13, 13, 58, 30), CRC=None, mode='A', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='testfile.txt', file_size=0, compress_size=0, date_time=(2019, 9, 13, 13, 58, 30), CRC=None, mode='A', encrypted='-', compress_type=None, block=None),
]),
('test_lzma2bcj2.7z', '', '', [
x7ZipInfo(filename='test', file_size=0, compress_size=0, date_time=(2006, 3, 15, 21, 54, 41), CRC=None, mode='D_ drwx------', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='test/test2.txt', file_size=33, compress_size=60, date_time=(2006, 3, 15, 21, 43, 36), CRC=2293734094, mode='A_ -rw-r--r--', encrypted='-', compress_type='BCJ2 LZMA2:12', block=0),
x7ZipInfo(filename='test1.txt', file_size=33, compress_size=None, date_time=(2006, 3, 15, 21, 43, 48), CRC=140667454, mode='A_ -rw-r--r--', encrypted='-', compress_type='BCJ2 LZMA2:12', block=0),
]),
('umlaut-non_solid.7z', '', '', [
x7ZipInfo(filename='täst.txt', file_size=51, compress_size=51, date_time=(2006, 3, 15, 22, 42, 17), CRC=2149857894, mode='A', encrypted='-', compress_type='LZMA:25', block=0),
]),
('umlaut-solid.7z', '', '', [
x7ZipInfo(filename='täst.txt', file_size=51, compress_size=51, date_time=(2006, 3, 15, 22, 42, 17), CRC=2149857894, mode='A', encrypted='-', compress_type='LZMA:25', block=0),
]),
('x86.bin', '', '', [
x7ZipInfo(filename='vmp0', file_size=0, compress_size=0, date_time=None, CRC=None, mode=None, encrypted=None, compress_type=None, block=None),
x7ZipInfo(filename='vmp1', file_size=11264, compress_size=11264, date_time=None, CRC=None, mode=None, encrypted=None, compress_type=None, block=None),
x7ZipInfo(filename='vmp2', file_size=512, compress_size=512, date_time=None, CRC=None, mode=None, encrypted=None, compress_type=None, block=None),
]),
('zerosize.7z', '', '', [
x7ZipInfo(filename='one', file_size=0, compress_size=0, date_time=(2019, 5, 27, 22, 46, 35), CRC=None, mode='D_ drwxr-xr-x', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='one/zero', file_size=0, compress_size=0, date_time=(2019, 5, 27, 22, 46, 18), CRC=None, mode='A_ -rw-r--r--', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='one/one', file_size=2, compress_size=6, date_time=(2019, 5, 27, 22, 46, 35), CRC=1733426259, mode='A_ -rw-r--r--', encrypted='-', compress_type='LZMA2:12', block=0),
]),
('zstd.7z', '', 'ERROR: Unsupported Method', [
x7ZipInfo(filename='scripts', file_size=0, compress_size=0, date_time=(2019, 3, 14, 0, 10, 8), CRC=None, mode='D', encrypted='-', compress_type=None, block=None),
x7ZipInfo(filename='scripts/py7zr', file_size=111, compress_size=436, date_time=(2019, 3, 14, 0, 10, 8), CRC=3010113243, mode='A', encrypted='-', compress_type='04F71101', block=0),
x7ZipInfo(filename='setup.cfg', file_size=58, compress_size=None, date_time=(2019, 3, 14, 0, 7, 13), CRC=3703540999, mode='A', encrypted='-', compress_type='04F71101', block=0),
x7ZipInfo(filename='setup.py', file_size=559, compress_size=None, date_time=(2019, 3, 14, 0, 9, 1), CRC=2164028094, mode='A', encrypted='-', compress_type='04F71101', block=0),
]),
]
datetime_attributes = ('year', 'month', 'day', 'hour', 'minute', 'second')
# The time zone of date_time is local.
ARCHIVES = [
(_[0], _[1], _[2], [
dataclasses.replace(
i, date_time=attrgetter(*datetime_attributes)(
datetime(*list(i.date_time), tzinfo=timezone.utc).astimezone(tz=None)
) if i.date_time else None
) for i in _[3]
]) for _ in _ARCHIVES
]
| 158.154309
| 483
| 0.669813
| 12,486
| 78,919
| 4.077046
| 0.06207
| 0.058461
| 0.148922
| 0.096374
| 0.876046
| 0.869171
| 0.855813
| 0.836169
| 0.825305
| 0.788119
| 0
| 0.159389
| 0.117488
| 78,919
| 498
| 484
| 158.471888
| 0.571525
| 0.002129
| 0
| 0.268994
| 0
| 0.004107
| 0.194095
| 0.053565
| 0.034908
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010267
| 0
| 0.010267
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61aa1d3adbf7944f1cb7fa4bd3fc18573a6ac72e
| 38,392
|
py
|
Python
|
sdk/python/pulumi_sakuracloud/database.py
|
sacloud/pulumi-sakuracloud
|
3eff14c6ec8ef4ad6422e0cdf15585df67eb4d6e
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2019-12-07T07:46:05.000Z
|
2020-12-19T02:41:42.000Z
|
sdk/python/pulumi_sakuracloud/database.py
|
sacloud/pulumi-sakuracloud
|
3eff14c6ec8ef4ad6422e0cdf15585df67eb4d6e
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2019-09-11T04:41:06.000Z
|
2021-10-19T07:50:34.000Z
|
sdk/python/pulumi_sakuracloud/database.py
|
sacloud/pulumi-sakuracloud
|
3eff14c6ec8ef4ad6422e0cdf15585df67eb4d6e
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2019-09-08T05:38:16.000Z
|
2021-06-24T01:32:47.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DatabaseArgs', 'Database']
@pulumi.input_type
class DatabaseArgs:
def __init__(__self__, *,
network_interface: pulumi.Input['DatabaseNetworkInterfaceArgs'],
password: pulumi.Input[str],
username: pulumi.Input[str],
backup: Optional[pulumi.Input['DatabaseBackupArgs']] = None,
database_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
icon_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parameters: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
plan: Optional[pulumi.Input[str]] = None,
replica_password: Optional[pulumi.Input[str]] = None,
replica_user: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
zone: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Database resource.
:param pulumi.Input['DatabaseNetworkInterfaceArgs'] network_interface: An `network_interface` block as defined below.
:param pulumi.Input[str] password: The password of default user on the database.
:param pulumi.Input[str] username: The name of default user on the database. The length of this value must be in the range [`3`-`20`]. Changing this forces a new resource to be created.
:param pulumi.Input['DatabaseBackupArgs'] backup: A `backup` block as defined below.
:param pulumi.Input[str] database_type: The type of the database. This must be one of [`mariadb`/`postgres`]. Changing this forces a new resource to be created. Default:`postgres`.
:param pulumi.Input[str] description: The description of the Database. The length of this value must be in the range [`1`-`512`].
:param pulumi.Input[str] icon_id: The icon id to attach to the Database.
:param pulumi.Input[str] name: The name of the Database. The length of this value must be in the range [`1`-`64`].
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] parameters: The map for setting RDBMS-specific parameters. Valid keys can be found with the `usacloud database list-parameters` command.
:param pulumi.Input[str] plan: The plan name of the Database. This must be one of [`10g`/`30g`/`90g`/`240g`/`500g`/`1t`]. Changing this forces a new resource to be created. Default:`10g`.
:param pulumi.Input[str] replica_password: The password of user that processing a replication.
:param pulumi.Input[str] replica_user: The name of user that processing a replication. Default:`replica`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Any tags to assign to the Database.
:param pulumi.Input[str] zone: The name of zone that the Database will be created. (e.g. `is1a`, `tk1a`). Changing this forces a new resource to be created.
"""
pulumi.set(__self__, "network_interface", network_interface)
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
if backup is not None:
pulumi.set(__self__, "backup", backup)
if database_type is not None:
pulumi.set(__self__, "database_type", database_type)
if description is not None:
pulumi.set(__self__, "description", description)
if icon_id is not None:
pulumi.set(__self__, "icon_id", icon_id)
if name is not None:
pulumi.set(__self__, "name", name)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if plan is not None:
pulumi.set(__self__, "plan", plan)
if replica_password is not None:
pulumi.set(__self__, "replica_password", replica_password)
if replica_user is not None:
pulumi.set(__self__, "replica_user", replica_user)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if zone is not None:
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="networkInterface")
def network_interface(self) -> pulumi.Input['DatabaseNetworkInterfaceArgs']:
"""
An `network_interface` block as defined below.
"""
return pulumi.get(self, "network_interface")
@network_interface.setter
def network_interface(self, value: pulumi.Input['DatabaseNetworkInterfaceArgs']):
pulumi.set(self, "network_interface", value)
@property
@pulumi.getter
def password(self) -> pulumi.Input[str]:
"""
The password of default user on the database.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: pulumi.Input[str]):
pulumi.set(self, "password", value)
@property
@pulumi.getter
def username(self) -> pulumi.Input[str]:
"""
The name of default user on the database. The length of this value must be in the range [`3`-`20`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: pulumi.Input[str]):
pulumi.set(self, "username", value)
@property
@pulumi.getter
def backup(self) -> Optional[pulumi.Input['DatabaseBackupArgs']]:
"""
A `backup` block as defined below.
"""
return pulumi.get(self, "backup")
@backup.setter
def backup(self, value: Optional[pulumi.Input['DatabaseBackupArgs']]):
pulumi.set(self, "backup", value)
@property
@pulumi.getter(name="databaseType")
def database_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the database. This must be one of [`mariadb`/`postgres`]. Changing this forces a new resource to be created. Default:`postgres`.
"""
return pulumi.get(self, "database_type")
@database_type.setter
def database_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database_type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the Database. The length of this value must be in the range [`1`-`512`].
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="iconId")
def icon_id(self) -> Optional[pulumi.Input[str]]:
"""
The icon id to attach to the Database.
"""
return pulumi.get(self, "icon_id")
@icon_id.setter
def icon_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "icon_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Database. The length of this value must be in the range [`1`-`64`].
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def parameters(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The map for setting RDBMS-specific parameters. Valid keys can be found with the `usacloud database list-parameters` command.
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "parameters", value)
@property
@pulumi.getter
def plan(self) -> Optional[pulumi.Input[str]]:
"""
The plan name of the Database. This must be one of [`10g`/`30g`/`90g`/`240g`/`500g`/`1t`]. Changing this forces a new resource to be created. Default:`10g`.
"""
return pulumi.get(self, "plan")
@plan.setter
def plan(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "plan", value)
@property
@pulumi.getter(name="replicaPassword")
def replica_password(self) -> Optional[pulumi.Input[str]]:
"""
The password of user that processing a replication.
"""
return pulumi.get(self, "replica_password")
@replica_password.setter
def replica_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "replica_password", value)
@property
@pulumi.getter(name="replicaUser")
def replica_user(self) -> Optional[pulumi.Input[str]]:
"""
The name of user that processing a replication. Default:`replica`.
"""
return pulumi.get(self, "replica_user")
@replica_user.setter
def replica_user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "replica_user", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Any tags to assign to the Database.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def zone(self) -> Optional[pulumi.Input[str]]:
"""
The name of zone that the Database will be created. (e.g. `is1a`, `tk1a`). Changing this forces a new resource to be created.
"""
return pulumi.get(self, "zone")
@zone.setter
def zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone", value)
@pulumi.input_type
class _DatabaseState:
def __init__(__self__, *,
backup: Optional[pulumi.Input['DatabaseBackupArgs']] = None,
database_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
icon_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_interface: Optional[pulumi.Input['DatabaseNetworkInterfaceArgs']] = None,
parameters: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
password: Optional[pulumi.Input[str]] = None,
plan: Optional[pulumi.Input[str]] = None,
replica_password: Optional[pulumi.Input[str]] = None,
replica_user: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
username: Optional[pulumi.Input[str]] = None,
zone: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Database resources.
:param pulumi.Input['DatabaseBackupArgs'] backup: A `backup` block as defined below.
:param pulumi.Input[str] database_type: The type of the database. This must be one of [`mariadb`/`postgres`]. Changing this forces a new resource to be created. Default:`postgres`.
:param pulumi.Input[str] description: The description of the Database. The length of this value must be in the range [`1`-`512`].
:param pulumi.Input[str] icon_id: The icon id to attach to the Database.
:param pulumi.Input[str] name: The name of the Database. The length of this value must be in the range [`1`-`64`].
:param pulumi.Input['DatabaseNetworkInterfaceArgs'] network_interface: An `network_interface` block as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] parameters: The map for setting RDBMS-specific parameters. Valid keys can be found with the `usacloud database list-parameters` command.
:param pulumi.Input[str] password: The password of default user on the database.
:param pulumi.Input[str] plan: The plan name of the Database. This must be one of [`10g`/`30g`/`90g`/`240g`/`500g`/`1t`]. Changing this forces a new resource to be created. Default:`10g`.
:param pulumi.Input[str] replica_password: The password of user that processing a replication.
:param pulumi.Input[str] replica_user: The name of user that processing a replication. Default:`replica`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Any tags to assign to the Database.
:param pulumi.Input[str] username: The name of default user on the database. The length of this value must be in the range [`3`-`20`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] zone: The name of zone that the Database will be created. (e.g. `is1a`, `tk1a`). Changing this forces a new resource to be created.
"""
if backup is not None:
pulumi.set(__self__, "backup", backup)
if database_type is not None:
pulumi.set(__self__, "database_type", database_type)
if description is not None:
pulumi.set(__self__, "description", description)
if icon_id is not None:
pulumi.set(__self__, "icon_id", icon_id)
if name is not None:
pulumi.set(__self__, "name", name)
if network_interface is not None:
pulumi.set(__self__, "network_interface", network_interface)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if password is not None:
pulumi.set(__self__, "password", password)
if plan is not None:
pulumi.set(__self__, "plan", plan)
if replica_password is not None:
pulumi.set(__self__, "replica_password", replica_password)
if replica_user is not None:
pulumi.set(__self__, "replica_user", replica_user)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if username is not None:
pulumi.set(__self__, "username", username)
if zone is not None:
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter
def backup(self) -> Optional[pulumi.Input['DatabaseBackupArgs']]:
"""
A `backup` block as defined below.
"""
return pulumi.get(self, "backup")
@backup.setter
def backup(self, value: Optional[pulumi.Input['DatabaseBackupArgs']]):
pulumi.set(self, "backup", value)
@property
@pulumi.getter(name="databaseType")
def database_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the database. This must be one of [`mariadb`/`postgres`]. Changing this forces a new resource to be created. Default:`postgres`.
"""
return pulumi.get(self, "database_type")
@database_type.setter
def database_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database_type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the Database. The length of this value must be in the range [`1`-`512`].
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="iconId")
def icon_id(self) -> Optional[pulumi.Input[str]]:
"""
The icon id to attach to the Database.
"""
return pulumi.get(self, "icon_id")
@icon_id.setter
def icon_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "icon_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Database. The length of this value must be in the range [`1`-`64`].
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkInterface")
def network_interface(self) -> Optional[pulumi.Input['DatabaseNetworkInterfaceArgs']]:
"""
An `network_interface` block as defined below.
"""
return pulumi.get(self, "network_interface")
@network_interface.setter
def network_interface(self, value: Optional[pulumi.Input['DatabaseNetworkInterfaceArgs']]):
pulumi.set(self, "network_interface", value)
@property
@pulumi.getter
def parameters(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The map for setting RDBMS-specific parameters. Valid keys can be found with the `usacloud database list-parameters` command.
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "parameters", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
The password of default user on the database.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter
def plan(self) -> Optional[pulumi.Input[str]]:
"""
The plan name of the Database. This must be one of [`10g`/`30g`/`90g`/`240g`/`500g`/`1t`]. Changing this forces a new resource to be created. Default:`10g`.
"""
return pulumi.get(self, "plan")
@plan.setter
def plan(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "plan", value)
@property
@pulumi.getter(name="replicaPassword")
def replica_password(self) -> Optional[pulumi.Input[str]]:
"""
The password of user that processing a replication.
"""
return pulumi.get(self, "replica_password")
@replica_password.setter
def replica_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "replica_password", value)
@property
@pulumi.getter(name="replicaUser")
def replica_user(self) -> Optional[pulumi.Input[str]]:
"""
The name of user that processing a replication. Default:`replica`.
"""
return pulumi.get(self, "replica_user")
@replica_user.setter
def replica_user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "replica_user", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Any tags to assign to the Database.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def username(self) -> Optional[pulumi.Input[str]]:
"""
The name of default user on the database. The length of this value must be in the range [`3`-`20`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username", value)
@property
@pulumi.getter
def zone(self) -> Optional[pulumi.Input[str]]:
"""
The name of zone that the Database will be created. (e.g. `is1a`, `tk1a`). Changing this forces a new resource to be created.
"""
return pulumi.get(self, "zone")
@zone.setter
def zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone", value)
class Database(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backup: Optional[pulumi.Input[pulumi.InputType['DatabaseBackupArgs']]] = None,
database_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
icon_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_interface: Optional[pulumi.Input[pulumi.InputType['DatabaseNetworkInterfaceArgs']]] = None,
parameters: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
password: Optional[pulumi.Input[str]] = None,
plan: Optional[pulumi.Input[str]] = None,
replica_password: Optional[pulumi.Input[str]] = None,
replica_user: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
username: Optional[pulumi.Input[str]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a SakuraCloud Database.
## Example Usage
```python
import pulumi
import pulumi_sakuracloud as sakuracloud
config = pulumi.Config()
username = config.require_object("username")
password = config.require_object("password")
replica_password = config.require_object("replicaPassword")
foobar_switch = sakuracloud.Switch("foobarSwitch")
foobar_database = sakuracloud.Database("foobarDatabase",
database_type="mariadb",
plan="30g",
username=username,
password=password,
replica_password=replica_password,
network_interface=sakuracloud.DatabaseNetworkInterfaceArgs(
switch_id=foobar_switch.id,
ip_address="192.168.11.11",
netmask=24,
gateway="192.168.11.1",
port=3306,
source_ranges=[
"192.168.11.0/24",
"192.168.12.0/24",
],
),
backup=sakuracloud.DatabaseBackupArgs(
time="00:00",
weekdays=[
"mon",
"tue",
],
),
parameters={
"max_connections": "100",
},
description="description",
tags=[
"tag1",
"tag2",
])
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['DatabaseBackupArgs']] backup: A `backup` block as defined below.
:param pulumi.Input[str] database_type: The type of the database. This must be one of [`mariadb`/`postgres`]. Changing this forces a new resource to be created. Default:`postgres`.
:param pulumi.Input[str] description: The description of the Database. The length of this value must be in the range [`1`-`512`].
:param pulumi.Input[str] icon_id: The icon id to attach to the Database.
:param pulumi.Input[str] name: The name of the Database. The length of this value must be in the range [`1`-`64`].
:param pulumi.Input[pulumi.InputType['DatabaseNetworkInterfaceArgs']] network_interface: An `network_interface` block as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] parameters: The map for setting RDBMS-specific parameters. Valid keys can be found with the `usacloud database list-parameters` command.
:param pulumi.Input[str] password: The password of default user on the database.
:param pulumi.Input[str] plan: The plan name of the Database. This must be one of [`10g`/`30g`/`90g`/`240g`/`500g`/`1t`]. Changing this forces a new resource to be created. Default:`10g`.
:param pulumi.Input[str] replica_password: The password of user that processing a replication.
:param pulumi.Input[str] replica_user: The name of user that processing a replication. Default:`replica`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Any tags to assign to the Database.
:param pulumi.Input[str] username: The name of default user on the database. The length of this value must be in the range [`3`-`20`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] zone: The name of zone that the Database will be created. (e.g. `is1a`, `tk1a`). Changing this forces a new resource to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DatabaseArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a SakuraCloud Database.
## Example Usage
```python
import pulumi
import pulumi_sakuracloud as sakuracloud
config = pulumi.Config()
username = config.require_object("username")
password = config.require_object("password")
replica_password = config.require_object("replicaPassword")
foobar_switch = sakuracloud.Switch("foobarSwitch")
foobar_database = sakuracloud.Database("foobarDatabase",
database_type="mariadb",
plan="30g",
username=username,
password=password,
replica_password=replica_password,
network_interface=sakuracloud.DatabaseNetworkInterfaceArgs(
switch_id=foobar_switch.id,
ip_address="192.168.11.11",
netmask=24,
gateway="192.168.11.1",
port=3306,
source_ranges=[
"192.168.11.0/24",
"192.168.12.0/24",
],
),
backup=sakuracloud.DatabaseBackupArgs(
time="00:00",
weekdays=[
"mon",
"tue",
],
),
parameters={
"max_connections": "100",
},
description="description",
tags=[
"tag1",
"tag2",
])
```
:param str resource_name: The name of the resource.
:param DatabaseArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DatabaseArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backup: Optional[pulumi.Input[pulumi.InputType['DatabaseBackupArgs']]] = None,
database_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
icon_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_interface: Optional[pulumi.Input[pulumi.InputType['DatabaseNetworkInterfaceArgs']]] = None,
parameters: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
password: Optional[pulumi.Input[str]] = None,
plan: Optional[pulumi.Input[str]] = None,
replica_password: Optional[pulumi.Input[str]] = None,
replica_user: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
username: Optional[pulumi.Input[str]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DatabaseArgs.__new__(DatabaseArgs)
__props__.__dict__["backup"] = backup
__props__.__dict__["database_type"] = database_type
__props__.__dict__["description"] = description
__props__.__dict__["icon_id"] = icon_id
__props__.__dict__["name"] = name
if network_interface is None and not opts.urn:
raise TypeError("Missing required property 'network_interface'")
__props__.__dict__["network_interface"] = network_interface
__props__.__dict__["parameters"] = parameters
if password is None and not opts.urn:
raise TypeError("Missing required property 'password'")
__props__.__dict__["password"] = password
__props__.__dict__["plan"] = plan
__props__.__dict__["replica_password"] = replica_password
__props__.__dict__["replica_user"] = replica_user
__props__.__dict__["tags"] = tags
if username is None and not opts.urn:
raise TypeError("Missing required property 'username'")
__props__.__dict__["username"] = username
__props__.__dict__["zone"] = zone
super(Database, __self__).__init__(
'sakuracloud:index/database:Database',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
backup: Optional[pulumi.Input[pulumi.InputType['DatabaseBackupArgs']]] = None,
database_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
icon_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_interface: Optional[pulumi.Input[pulumi.InputType['DatabaseNetworkInterfaceArgs']]] = None,
parameters: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
password: Optional[pulumi.Input[str]] = None,
plan: Optional[pulumi.Input[str]] = None,
replica_password: Optional[pulumi.Input[str]] = None,
replica_user: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
username: Optional[pulumi.Input[str]] = None,
zone: Optional[pulumi.Input[str]] = None) -> 'Database':
"""
Get an existing Database resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['DatabaseBackupArgs']] backup: A `backup` block as defined below.
:param pulumi.Input[str] database_type: The type of the database. This must be one of [`mariadb`/`postgres`]. Changing this forces a new resource to be created. Default:`postgres`.
:param pulumi.Input[str] description: The description of the Database. The length of this value must be in the range [`1`-`512`].
:param pulumi.Input[str] icon_id: The icon id to attach to the Database.
:param pulumi.Input[str] name: The name of the Database. The length of this value must be in the range [`1`-`64`].
:param pulumi.Input[pulumi.InputType['DatabaseNetworkInterfaceArgs']] network_interface: An `network_interface` block as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] parameters: The map for setting RDBMS-specific parameters. Valid keys can be found with the `usacloud database list-parameters` command.
:param pulumi.Input[str] password: The password of default user on the database.
:param pulumi.Input[str] plan: The plan name of the Database. This must be one of [`10g`/`30g`/`90g`/`240g`/`500g`/`1t`]. Changing this forces a new resource to be created. Default:`10g`.
:param pulumi.Input[str] replica_password: The password of user that processing a replication.
:param pulumi.Input[str] replica_user: The name of user that processing a replication. Default:`replica`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Any tags to assign to the Database.
:param pulumi.Input[str] username: The name of default user on the database. The length of this value must be in the range [`3`-`20`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] zone: The name of zone that the Database will be created. (e.g. `is1a`, `tk1a`). Changing this forces a new resource to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DatabaseState.__new__(_DatabaseState)
__props__.__dict__["backup"] = backup
__props__.__dict__["database_type"] = database_type
__props__.__dict__["description"] = description
__props__.__dict__["icon_id"] = icon_id
__props__.__dict__["name"] = name
__props__.__dict__["network_interface"] = network_interface
__props__.__dict__["parameters"] = parameters
__props__.__dict__["password"] = password
__props__.__dict__["plan"] = plan
__props__.__dict__["replica_password"] = replica_password
__props__.__dict__["replica_user"] = replica_user
__props__.__dict__["tags"] = tags
__props__.__dict__["username"] = username
__props__.__dict__["zone"] = zone
return Database(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def backup(self) -> pulumi.Output[Optional['outputs.DatabaseBackup']]:
"""
A `backup` block as defined below.
"""
return pulumi.get(self, "backup")
@property
@pulumi.getter(name="databaseType")
def database_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of the database. This must be one of [`mariadb`/`postgres`]. Changing this forces a new resource to be created. Default:`postgres`.
"""
return pulumi.get(self, "database_type")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the Database. The length of this value must be in the range [`1`-`512`].
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="iconId")
def icon_id(self) -> pulumi.Output[Optional[str]]:
"""
The icon id to attach to the Database.
"""
return pulumi.get(self, "icon_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the Database. The length of this value must be in the range [`1`-`64`].
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkInterface")
def network_interface(self) -> pulumi.Output['outputs.DatabaseNetworkInterface']:
"""
An `network_interface` block as defined below.
"""
return pulumi.get(self, "network_interface")
@property
@pulumi.getter
def parameters(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
The map for setting RDBMS-specific parameters. Valid keys can be found with the `usacloud database list-parameters` command.
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter
def password(self) -> pulumi.Output[str]:
"""
The password of default user on the database.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def plan(self) -> pulumi.Output[Optional[str]]:
"""
The plan name of the Database. This must be one of [`10g`/`30g`/`90g`/`240g`/`500g`/`1t`]. Changing this forces a new resource to be created. Default:`10g`.
"""
return pulumi.get(self, "plan")
@property
@pulumi.getter(name="replicaPassword")
def replica_password(self) -> pulumi.Output[Optional[str]]:
"""
The password of user that processing a replication.
"""
return pulumi.get(self, "replica_password")
@property
@pulumi.getter(name="replicaUser")
def replica_user(self) -> pulumi.Output[Optional[str]]:
"""
The name of user that processing a replication. Default:`replica`.
"""
return pulumi.get(self, "replica_user")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Any tags to assign to the Database.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def username(self) -> pulumi.Output[str]:
"""
The name of default user on the database. The length of this value must be in the range [`3`-`20`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "username")
@property
@pulumi.getter
def zone(self) -> pulumi.Output[str]:
"""
The name of zone that the Database will be created. (e.g. `is1a`, `tk1a`). Changing this forces a new resource to be created.
"""
return pulumi.get(self, "zone")
| 44.955504
| 197
| 0.627318
| 4,544
| 38,392
| 5.151188
| 0.050616
| 0.099628
| 0.094502
| 0.078951
| 0.921007
| 0.912505
| 0.893109
| 0.878113
| 0.871833
| 0.859743
| 0
| 0.010738
| 0.25771
| 38,392
| 853
| 198
| 45.008206
| 0.810618
| 0.366196
| 0
| 0.826923
| 1
| 0
| 0.095258
| 0.015461
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16453
| false
| 0.108974
| 0.014957
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
f60423cab00c86fbe56f600023d2ba99248c166d
| 135
|
py
|
Python
|
rl/online_learners/__init__.py
|
Leonardo-H/DR-PG
|
dc5467ddd73a7b89938aac9d44735b019a3fd7d7
|
[
"Apache-2.0"
] | 2
|
2019-11-25T01:56:49.000Z
|
2021-05-20T03:22:59.000Z
|
rl/online_learners/__init__.py
|
Leonardo-H/DR-PG
|
dc5467ddd73a7b89938aac9d44735b019a3fd7d7
|
[
"Apache-2.0"
] | null | null | null |
rl/online_learners/__init__.py
|
Leonardo-H/DR-PG
|
dc5467ddd73a7b89938aac9d44735b019a3fd7d7
|
[
"Apache-2.0"
] | 1
|
2019-11-24T02:30:38.000Z
|
2019-11-24T02:30:38.000Z
|
from rl.online_learners.online_optimizer import rlOnlineOptimizer
from rl.online_learners.online_optimizer import BasicOnlineOptimizer
| 45
| 68
| 0.911111
| 16
| 135
| 7.4375
| 0.5
| 0.10084
| 0.201681
| 0.336134
| 0.689076
| 0.689076
| 0.689076
| 0
| 0
| 0
| 0
| 0
| 0.059259
| 135
| 2
| 69
| 67.5
| 0.937008
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1424e141b73154c45c20c42b2137a231077e11df
| 37
|
py
|
Python
|
src/lib/email/__init__.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/email/__init__.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/email/__init__.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("email")
| 18.5
| 36
| 0.756757
| 6
| 37
| 3.833333
| 0.666667
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 37
| 1
| 37
| 37
| 0.676471
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
147c66967d4425be4eb6fe4c88aba64d37c98de2
| 10,832
|
py
|
Python
|
tests/test_hyperband_stopping.py
|
ashzblum/sweeps
|
b05b9ead042569c157b95fae53177f8fabbf9760
|
[
"MIT"
] | null | null | null |
tests/test_hyperband_stopping.py
|
ashzblum/sweeps
|
b05b9ead042569c157b95fae53177f8fabbf9760
|
[
"MIT"
] | null | null | null |
tests/test_hyperband_stopping.py
|
ashzblum/sweeps
|
b05b9ead042569c157b95fae53177f8fabbf9760
|
[
"MIT"
] | null | null | null |
from .. import stop_runs, next_run, RunState, SweepRun
def test_hyperband_min_iter_bands():
sweep_config = {
"method": "grid",
"metric": {"name": "loss", "goal": "minimize"},
"early_terminate": {
"type": "hyperband",
"min_iter": 3,
"eta": 3,
},
"parameters": {"a": {"values": [1, 2, 3]}},
}
run = next_run(sweep_config, [])
run.state = RunState.running
run.history = [{"loss": 10} for _ in range(4)]
run2 = next_run(sweep_config, [run])
run2.state = RunState.running
run2.history = [{"loss": 10 - i} for i in range(10)]
to_stop = stop_runs(sweep_config, [run, run2])
assert to_stop[0] is run
assert to_stop[0].early_terminate_info["bands"][:3] == [3, 9, 27]
def test_hyperband_min_iter_bands_max():
sweep_config = {
"method": "grid",
"metric": {"name": "accuracy", "goal": "maximize"},
"early_terminate": {
"type": "hyperband",
"min_iter": 3,
"eta": 3,
},
"parameters": {"a": {"values": [1, 2, 3]}},
}
run = next_run(sweep_config, [])
run.state = RunState.running
run.history = [{"accuracy": 10} for _ in range(4)]
run2 = next_run(sweep_config, [run])
run2.state = RunState.running
run2.history = [{"accuracy": 10 + i} for i in range(10)]
to_stop = stop_runs(sweep_config, [run, run2])
assert to_stop[0] is run
assert to_stop[0].early_terminate_info["bands"][:3] == [3, 9, 27]
def test_hyperband_max_iter_bands():
sweep_config = {
"method": "grid",
"metric": {"name": "loss", "goal": "minimize"},
"early_terminate": {
"type": "hyperband",
"max_iter": 81,
"eta": 3,
"s": 3,
},
"parameters": {"a": {"values": [1, 2, 3]}},
}
run = next_run(sweep_config, [])
run.state = RunState.running
run.history = [{"loss": 10} for _ in range(4)]
run2 = next_run(sweep_config, [run])
run2.state = RunState.running
run2.history = [{"loss": 10 - i} for i in range(10)]
to_stop = stop_runs(sweep_config, [run, run2])
assert to_stop[0] is run
assert to_stop[0].early_terminate_info["bands"][:3] == [3, 9, 27]
def test_init_from_max_iter():
sweep_config = {
"method": "grid",
"metric": {"name": "loss", "goal": "minimize"},
"early_terminate": {
"type": "hyperband",
"max_iter": 18,
"eta": 3,
"s": 2,
},
"parameters": {"a": {"values": [1, 2, 3]}},
}
run = next_run(sweep_config, [])
run.state = RunState.running
run.history = [{"loss": 10} for _ in range(4)]
run2 = next_run(sweep_config, [run])
run2.state = RunState.running
run2.history = [{"loss": 10 - i} for i in range(10)]
to_stop = stop_runs(sweep_config, [run, run2])
assert to_stop[0] is run
assert to_stop[0].early_terminate_info["bands"] == [2, 6]
def test_single_run():
sweep_config = {
"method": "grid",
"metric": {"name": "loss", "goal": "minimize"},
"early_terminate": {
"type": "hyperband",
"max_iter": 18,
"eta": 3,
"s": 2,
},
"parameters": {"a": {"values": [1, 2, 3]}},
}
run = next_run(sweep_config, [])
run.state = RunState.running
run.history = [{"loss": 10 - i} for i in range(10)]
to_stop = stop_runs(sweep_config, [run])
assert len(to_stop) == 0
def test_2runs_band1_pass():
sweep_config = {
"method": "grid",
"metric": {"name": "loss", "goal": "minimize"},
"early_terminate": {
"type": "hyperband",
"max_iter": 18,
"eta": 3,
"s": 2,
},
"parameters": {"a": {"values": [1, 2, 3]}},
}
run = next_run(sweep_config, [])
run.state = RunState.running
run.history = [{"loss": 10}, {"loss": 10}, {"loss": 6}]
run2 = next_run(sweep_config, [run])
run2.state = RunState.running
run2.history = [{"loss": 10 - i} for i in range(10)]
to_stop = stop_runs(sweep_config, [run, run2])
assert len(to_stop) == 0
def test_5runs_band1_stop_2():
sweep_config = {
"method": "grid",
"metric": {"name": "loss", "goal": "minimize"},
"early_terminate": {
"type": "hyperband",
"max_iter": 5,
"eta": 2,
"s": 2,
},
"parameters": {"a": {"values": [1, 2, 3]}},
}
runs = [
SweepRun(
name="a",
state=RunState.finished, # This wont be stopped because already stopped
history=[
{"loss": 10},
{"loss": 9},
],
),
SweepRun(
name="b",
state=RunState.running, # This should be stopped
history=[
{"loss": 10},
{"loss": 10},
],
),
SweepRun(
name="c",
state=RunState.running, # This passes band 1 but not band 2
history=[
{"loss": 10},
{"loss": 8},
{"loss": 8},
],
),
SweepRun(
name="d",
state=RunState.running,
history=[
{"loss": 10},
{"loss": 7},
{"loss": 7},
],
),
SweepRun(
name="e",
state=RunState.finished,
history=[
{"loss": 10},
{"loss": 6},
{"loss": 6},
],
),
]
to_stop = stop_runs(sweep_config, runs)
assert to_stop == runs[1:3]
def test_5runs_band1_stop_2_1stnoband():
sweep_config = {
"method": "grid",
"metric": {"name": "loss", "goal": "minimize"},
"early_terminate": {
"type": "hyperband",
"max_iter": 5,
"eta": 2,
"s": 2,
},
"parameters": {"a": {"values": [1, 2, 3]}},
}
runs = [
SweepRun(
name="a",
state=RunState.finished, # This wont be stopped because already stopped
history=[
{"loss": 10},
],
),
SweepRun(
name="b",
state=RunState.running, # This should be stopped
history=[
{"loss": 10},
{"loss": 10},
],
),
SweepRun(
name="c",
state=RunState.running, # This passes band 1 but not band 2
history=[
{"loss": 10},
{"loss": 8},
{"loss": 8},
],
),
SweepRun(
name="d",
state=RunState.running,
history=[
{"loss": 10},
{"loss": 7},
{"loss": 7},
],
),
SweepRun(
name="e",
state=RunState.finished,
history=[
{"loss": 10},
{"loss": 6},
{"loss": 6},
],
),
]
to_stop = stop_runs(sweep_config, runs)
assert to_stop == runs[1:3]
def test_eta_3():
sweep_config = {
"method": "grid",
"metric": {"name": "loss", "goal": "minimize"},
"early_terminate": {
"type": "hyperband",
"max_iter": 9,
"eta": 3,
"s": 2,
},
"parameters": {"a": {"values": [1, 2, 3]}},
}
runs = [
SweepRun(
name="a",
state=RunState.finished, # This wont be stopped because already stopped
history=[
{"loss": 10},
{"loss": 9},
],
),
SweepRun(
name="b",
state=RunState.running, # This should be stopped
history=[
{"loss": 10},
{"loss": 10},
],
),
SweepRun(
name="c",
state=RunState.running, # This fails the first threeshold but snuck in so we wont kill
history=[
{"loss": 10},
{"loss": 8},
{"loss": 8},
{"loss": 3},
],
),
SweepRun(
name="d",
state=RunState.running,
history=[
{"loss": 10},
{"loss": 7},
{"loss": 7},
{"loss": 4},
],
),
SweepRun(
name="e",
state=RunState.running, # this passes band 1 but doesn't pass band 2
history=[
{"loss": 10},
{"loss": 6},
{"loss": 6},
{"loss": 6},
],
),
]
# bands are at 1 and 3, thresholds are 7 and 4
to_stop = stop_runs(sweep_config, runs)
assert to_stop == [runs[1], runs[-1]]
def test_eta_3_max():
sweep_config = {
"method": "grid",
"metric": {"name": "loss", "goal": "maximize"},
"early_terminate": {
"type": "hyperband",
"max_iter": 9,
"eta": 3,
"s": 2,
},
"parameters": {"a": {"values": [1, 2, 3]}},
}
runs = [
SweepRun(
name="a",
state=RunState.finished, # This wont be stopped because already stopped
history=[
{"loss": -10},
{"loss": -9},
],
),
SweepRun(
name="b",
state=RunState.running, # This should be stopped
history=[
{"loss": -10},
{"loss": -10},
],
),
SweepRun(
name="c",
state=RunState.running, # This fails the first threeshold but snuck in so we wont kill
history=[
{"loss": -10},
{"loss": -8},
{"loss": -8},
{"loss": -3},
],
),
SweepRun(
name="d",
state=RunState.running,
history=[
{"loss": -10},
{"loss": -7},
{"loss": -7},
{"loss": -4},
],
),
SweepRun(
name="e",
state=RunState.running, # this passes band 1 but doesn't pass band 2
history=[
{"loss": -10},
{"loss": -6},
{"loss": -6},
{"loss": -6},
],
),
]
# bands are at 1 and 3, thresholds are 7 and 4
to_stop = stop_runs(sweep_config, runs)
assert to_stop == [runs[1], runs[-1]]
| 26.54902
| 99
| 0.426699
| 1,089
| 10,832
| 4.110193
| 0.089991
| 0.045576
| 0.084227
| 0.075961
| 0.966265
| 0.964924
| 0.946828
| 0.929625
| 0.920912
| 0.920912
| 0
| 0.041575
| 0.409343
| 10,832
| 407
| 100
| 26.614251
| 0.658018
| 0.058807
| 0
| 0.861496
| 0
| 0
| 0.123796
| 0
| 0
| 0
| 0
| 0
| 0.038781
| 1
| 0.027701
| false
| 0.00277
| 0.00277
| 0
| 0.030471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14aa938784f32d09230b31734bffa0e89b116aab
| 14,892
|
py
|
Python
|
lib/installed_clients/AbstractHandleClient.py
|
Tianhao-Gu/kb_gtdbtk
|
41ea2c98f2553e6ef795ea703ffa34b703058720
|
[
"MIT"
] | 3
|
2020-03-27T09:55:53.000Z
|
2021-12-08T07:44:57.000Z
|
lib/installed_clients/AbstractHandleClient.py
|
Tianhao-Gu/kb_gtdbtk
|
41ea2c98f2553e6ef795ea703ffa34b703058720
|
[
"MIT"
] | 32
|
2020-02-06T01:10:08.000Z
|
2021-10-15T05:44:07.000Z
|
lib/installed_clients/AbstractHandleClient.py
|
Tianhao-Gu/kb_gtdbtk
|
41ea2c98f2553e6ef795ea703ffa34b703058720
|
[
"MIT"
] | 5
|
2020-02-04T22:22:35.000Z
|
2020-10-30T19:07:54.000Z
|
# -*- coding: utf-8 -*-
############################################################
#
# Autogenerated by the KBase type compiler -
# any changes made here will be overwritten
#
############################################################
from __future__ import print_function
from .baseclient import BaseClient as _BaseClient # @UnusedImport
class AbstractHandle(object):
def __init__(
self, url=None, timeout=30 * 60, user_id=None,
password=None, token=None, ignore_authrc=False,
trust_all_ssl_certificates=False,
auth_svc='https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login'):
if url is None:
raise ValueError('A url is required')
self._service_ver = None
self._client = _BaseClient(
url, timeout=timeout, user_id=user_id, password=password,
token=token, ignore_authrc=ignore_authrc,
trust_all_ssl_certificates=trust_all_ssl_certificates,
auth_svc=auth_svc)
def persist_handle(self, handle, context=None):
"""
The persist_handle writes the handle to a persistent store that can be later retrieved using the list_handles function.
:param handle: instance of type "Handle" -> structure: parameter
"hid" of type "HandleId" (Handle provides a unique reference that
enables access to the data files through functions provided as
part of the HandleService. In the case of using shock, the id is
the node id. In the case of using shock the value of type is
shock. In the future these values should enumerated. The value of
url is the http address of the shock server, including the
protocol (http or https) and if necessary the port. The values of
remote_md5 and remote_sha1 are those computed on the file in the
remote data store. These can be used to verify uploads and
downloads.), parameter "file_name" of String, parameter "id" of
type "NodeId", parameter "type" of String, parameter "url" of
String, parameter "remote_md5" of String, parameter "remote_sha1"
of String
:returns: instance of String
"""
return self._client.call_method('AbstractHandle.persist_handle',
[handle], self._service_ver, context)
def hids_to_handles(self, hids, context=None):
"""
Given a list of handle ids, this function returns a list of handles.
This method is replaced by fetch_handles_by.
:param hids: instance of list of type "HandleId" (Handle provides a
unique reference that enables access to the data files through
functions provided as part of the HandleService. In the case of
using shock, the id is the node id. In the case of using shock the
value of type is shock. In the future these values should
enumerated. The value of url is the http address of the shock
server, including the protocol (http or https) and if necessary
the port. The values of remote_md5 and remote_sha1 are those
computed on the file in the remote data store. These can be used
to verify uploads and downloads.)
:returns: instance of list of type "Handle" -> structure: parameter
"hid" of type "HandleId" (Handle provides a unique reference that
enables access to the data files through functions provided as
part of the HandleService. In the case of using shock, the id is
the node id. In the case of using shock the value of type is
shock. In the future these values should enumerated. The value of
url is the http address of the shock server, including the
protocol (http or https) and if necessary the port. The values of
remote_md5 and remote_sha1 are those computed on the file in the
remote data store. These can be used to verify uploads and
downloads.), parameter "file_name" of String, parameter "id" of
type "NodeId", parameter "type" of String, parameter "url" of
String, parameter "remote_md5" of String, parameter "remote_sha1"
of String
"""
return self._client.call_method('AbstractHandle.hids_to_handles',
[hids], self._service_ver, context)
def ids_to_handles(self, ids, context=None):
"""
Given a list of ids, this function returns a list of handles.
In case of Shock, the list of ids are shock node ids.
This method is replaced by fetch_handles_by.
:param ids: instance of list of type "NodeId"
:returns: instance of list of type "Handle" -> structure: parameter
"hid" of type "HandleId" (Handle provides a unique reference that
enables access to the data files through functions provided as
part of the HandleService. In the case of using shock, the id is
the node id. In the case of using shock the value of type is
shock. In the future these values should enumerated. The value of
url is the http address of the shock server, including the
protocol (http or https) and if necessary the port. The values of
remote_md5 and remote_sha1 are those computed on the file in the
remote data store. These can be used to verify uploads and
downloads.), parameter "file_name" of String, parameter "id" of
type "NodeId", parameter "type" of String, parameter "url" of
String, parameter "remote_md5" of String, parameter "remote_sha1"
of String
"""
return self._client.call_method('AbstractHandle.ids_to_handles',
[ids], self._service_ver, context)
def fetch_handles_by(self, params, context=None):
"""
This function select records if field column entry is in elements and returns a list of handles.
:param params: instance of type "FetchHandlesParams" -> structure:
parameter "elements" of list of String, parameter "field_name" of
String
:returns: instance of list of type "Handle" -> structure: parameter
"hid" of type "HandleId" (Handle provides a unique reference that
enables access to the data files through functions provided as
part of the HandleService. In the case of using shock, the id is
the node id. In the case of using shock the value of type is
shock. In the future these values should enumerated. The value of
url is the http address of the shock server, including the
protocol (http or https) and if necessary the port. The values of
remote_md5 and remote_sha1 are those computed on the file in the
remote data store. These can be used to verify uploads and
downloads.), parameter "file_name" of String, parameter "id" of
type "NodeId", parameter "type" of String, parameter "url" of
String, parameter "remote_md5" of String, parameter "remote_sha1"
of String
"""
return self._client.call_method('AbstractHandle.fetch_handles_by',
[params], self._service_ver, context)
def is_owner(self, hids, context=None):
"""
Given a list of handle ids, this function determines if the underlying data is owned by the caller.
If any one of the handle ids reference unreadable data this function returns false.
:param hids: instance of list of type "HandleId" (Handle provides a
unique reference that enables access to the data files through
functions provided as part of the HandleService. In the case of
using shock, the id is the node id. In the case of using shock the
value of type is shock. In the future these values should
enumerated. The value of url is the http address of the shock
server, including the protocol (http or https) and if necessary
the port. The values of remote_md5 and remote_sha1 are those
computed on the file in the remote data store. These can be used
to verify uploads and downloads.)
:returns: instance of Long
"""
return self._client.call_method('AbstractHandle.is_owner',
[hids], self._service_ver, context)
def delete_handles(self, handles, context=None):
"""
The delete_handles function takes a list of handles and deletes them on the handle service server.
:param handles: instance of list of type "Handle" -> structure:
parameter "hid" of type "HandleId" (Handle provides a unique
reference that enables access to the data files through functions
provided as part of the HandleService. In the case of using shock,
the id is the node id. In the case of using shock the value of
type is shock. In the future these values should enumerated. The
value of url is the http address of the shock server, including
the protocol (http or https) and if necessary the port. The values
of remote_md5 and remote_sha1 are those computed on the file in
the remote data store. These can be used to verify uploads and
downloads.), parameter "file_name" of String, parameter "id" of
type "NodeId", parameter "type" of String, parameter "url" of
String, parameter "remote_md5" of String, parameter "remote_sha1"
of String
:returns: instance of Long
"""
return self._client.call_method('AbstractHandle.delete_handles',
[handles], self._service_ver, context)
def are_readable(self, hids, context=None):
"""
Given a list of handle ids, this function determines if the underlying data is readable by the caller.
If any one of the handle ids reference unreadable data this function returns false.
:param hids: instance of list of type "HandleId" (Handle provides a
unique reference that enables access to the data files through
functions provided as part of the HandleService. In the case of
using shock, the id is the node id. In the case of using shock the
value of type is shock. In the future these values should
enumerated. The value of url is the http address of the shock
server, including the protocol (http or https) and if necessary
the port. The values of remote_md5 and remote_sha1 are those
computed on the file in the remote data store. These can be used
to verify uploads and downloads.)
:returns: instance of Long
"""
return self._client.call_method('AbstractHandle.are_readable',
[hids], self._service_ver, context)
def is_readable(self, hid, context=None):
"""
Given a handle id, this function queries the underlying data store to see if the data being referred to is readable to by the caller.
:param hid: instance of type "HandleId" (Handle provides a unique
reference that enables access to the data files through functions
provided as part of the HandleService. In the case of using shock,
the id is the node id. In the case of using shock the value of
type is shock. In the future these values should enumerated. The
value of url is the http address of the shock server, including
the protocol (http or https) and if necessary the port. The values
of remote_md5 and remote_sha1 are those computed on the file in
the remote data store. These can be used to verify uploads and
downloads.)
:returns: instance of Long
"""
return self._client.call_method('AbstractHandle.is_readable',
[hid], self._service_ver, context)
def add_read_acl(self, hids, username, context=None):
"""
The add_read_acl function will update the acl of the shock node that the handle references.
The function is only accessible to a specific list of users specified at startup time.
The underlying shock node will be made readable to the user requested.
:param hids: instance of list of type "HandleId" (Handle provides a
unique reference that enables access to the data files through
functions provided as part of the HandleService. In the case of
using shock, the id is the node id. In the case of using shock the
value of type is shock. In the future these values should
enumerated. The value of url is the http address of the shock
server, including the protocol (http or https) and if necessary
the port. The values of remote_md5 and remote_sha1 are those
computed on the file in the remote data store. These can be used
to verify uploads and downloads.)
:param username: instance of String
:returns: instance of Long
"""
return self._client.call_method('AbstractHandle.add_read_acl',
[hids, username], self._service_ver, context)
def set_public_read(self, hids, context=None):
"""
The set_public_read function will update the acl of the shock node that the handle references to make the node globally readable.
The function is only accessible to a specific list of users specified at startup time.
:param hids: instance of list of type "HandleId" (Handle provides a
unique reference that enables access to the data files through
functions provided as part of the HandleService. In the case of
using shock, the id is the node id. In the case of using shock the
value of type is shock. In the future these values should
enumerated. The value of url is the http address of the shock
server, including the protocol (http or https) and if necessary
the port. The values of remote_md5 and remote_sha1 are those
computed on the file in the remote data store. These can be used
to verify uploads and downloads.)
:returns: instance of Long
"""
return self._client.call_method('AbstractHandle.set_public_read',
[hids], self._service_ver, context)
def status(self, context=None):
return self._client.call_method('AbstractHandle.status',
[], self._service_ver, context)
| 59.095238
| 141
| 0.648402
| 2,049
| 14,892
| 4.636896
| 0.090776
| 0.023155
| 0.02084
| 0.025471
| 0.820124
| 0.802337
| 0.78097
| 0.78097
| 0.770129
| 0.762025
| 0
| 0.003523
| 0.294722
| 14,892
| 251
| 142
| 59.330677
| 0.901076
| 0.690975
| 0
| 0.081633
| 1
| 0.020408
| 0.126067
| 0.099146
| 0
| 0
| 0
| 0
| 0
| 1
| 0.244898
| false
| 0.040816
| 0.040816
| 0.020408
| 0.530612
| 0.020408
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
210e2bb2574a26237312c87319ce6a7e243b4fdb
| 153
|
py
|
Python
|
sendfilestome/utils.py
|
gpocentek/sendfilestome
|
ea41b220ad702350c20690a475c1de0a00702d32
|
[
"Apache-2.0"
] | null | null | null |
sendfilestome/utils.py
|
gpocentek/sendfilestome
|
ea41b220ad702350c20690a475c1de0a00702d32
|
[
"Apache-2.0"
] | 1
|
2019-05-01T16:28:38.000Z
|
2019-05-01T16:28:38.000Z
|
sendfilestome/utils.py
|
gpocentek/sendfilestome
|
ea41b220ad702350c20690a475c1de0a00702d32
|
[
"Apache-2.0"
] | null | null | null |
from django.conf import settings
def auth_enabled():
return (settings.SFTM_UPLOAD_AUTH_ENABLED or
settings.SFTM_DOWNLOAD_AUTH_ENABLED)
| 21.857143
| 48
| 0.764706
| 20
| 153
| 5.5
| 0.65
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183007
| 153
| 6
| 49
| 25.5
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
21143d56199f7bd4245ae25e033f2bd8ed606782
| 118
|
py
|
Python
|
integers/tests/test_reverse_number.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | null | null | null |
integers/tests/test_reverse_number.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | null | null | null |
integers/tests/test_reverse_number.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | 3
|
2020-10-07T20:24:45.000Z
|
2020-12-16T04:53:19.000Z
|
from integers.reverse_number import reverse_number
def test_reverse_number():
assert reverse_number(123) == 321
| 19.666667
| 50
| 0.79661
| 16
| 118
| 5.5625
| 0.625
| 0.58427
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.135593
| 118
| 5
| 51
| 23.6
| 0.813725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2121446b3fde005fd228f7804fda24fb08695744
| 1,628
|
py
|
Python
|
tests/test_dataloader.py
|
chris4540/DT2119-Final-Project
|
a9e665d2fcc91442bcd80171fe557b09fcd71d00
|
[
"MIT"
] | 1
|
2020-08-19T17:29:53.000Z
|
2020-08-19T17:29:53.000Z
|
tests/test_dataloader.py
|
chris4540/DT2119-Final-Project
|
a9e665d2fcc91442bcd80171fe557b09fcd71d00
|
[
"MIT"
] | null | null | null |
tests/test_dataloader.py
|
chris4540/DT2119-Final-Project
|
a9e665d2fcc91442bcd80171fe557b09fcd71d00
|
[
"MIT"
] | null | null | null |
from utils.dataloader import pad_seqs_to_batch
from utils.dataloader import TIMITDataset
from torch.utils.data import DataLoader
from torch.nn.utils.rnn import pad_packed_sequence
if __name__ == "__main__":
# make data set
print("==============================================")
dataset = TIMITDataset(root="./data", split="test")
dataloader = DataLoader(dataset=dataset, batch_size=100, shuffle=False, collate_fn=pad_seqs_to_batch)
for packed_features, packed_labels in dataloader:
features, _ = pad_packed_sequence(packed_features)
labels, _ = pad_packed_sequence(packed_labels, padding_value=-1)
print(features.shape)
print(labels.shape)
print("==============================================")
dataset = TIMITDataset(root="./data", split="valid")
dataloader = DataLoader(dataset=dataset, batch_size=100, shuffle=False, collate_fn=pad_seqs_to_batch)
for packed_features, packed_labels in dataloader:
features, _ = pad_packed_sequence(packed_features)
labels, _ = pad_packed_sequence(packed_labels, padding_value=-1)
print(features.shape)
print(labels.shape)
print("==============================================")
dataset = TIMITDataset(root="./data", split="train")
dataloader = DataLoader(dataset=dataset, batch_size=100, shuffle=False, collate_fn=pad_seqs_to_batch)
for packed_features, packed_labels in dataloader:
features, _ = pad_packed_sequence(packed_features)
labels, _ = pad_packed_sequence(packed_labels, padding_value=-1)
print(features.shape)
print(labels.shape)
| 50.875
| 105
| 0.668305
| 184
| 1,628
| 5.581522
| 0.222826
| 0.061344
| 0.115871
| 0.134372
| 0.817916
| 0.817916
| 0.781889
| 0.781889
| 0.781889
| 0.781889
| 0
| 0.008734
| 0.15602
| 1,628
| 32
| 106
| 50.875
| 0.738719
| 0.007985
| 0
| 0.724138
| 0
| 0
| 0.110285
| 0.085502
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.137931
| 0
| 0.137931
| 0.310345
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2155152ffc769c09ffe15bf9b0dd13912fc5703e
| 185
|
py
|
Python
|
plugins/confluence/komand_confluence/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/confluence/komand_confluence/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/confluence/komand_confluence/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .get_page.action import GetPage
from .get_page_content.action import GetPageContent
from .store_page_content.action import StorePageContent
| 37
| 55
| 0.843243
| 27
| 185
| 5.592593
| 0.62963
| 0.238411
| 0.145695
| 0.304636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113514
| 185
| 4
| 56
| 46.25
| 0.920732
| 0.2
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2161caa71244149c02317a5d3859412cf83a99a9
| 35,209
|
py
|
Python
|
tests/test_configuration.py
|
sergey-aganezov-jr/gos
|
fb4d210284f3037c5321250cb95f3901754feb6b
|
[
"MIT"
] | null | null | null |
tests/test_configuration.py
|
sergey-aganezov-jr/gos
|
fb4d210284f3037c5321250cb95f3901754feb6b
|
[
"MIT"
] | null | null | null |
tests/test_configuration.py
|
sergey-aganezov-jr/gos
|
fb4d210284f3037c5321250cb95f3901754feb6b
|
[
"MIT"
] | null | null | null |
import os
import unittest
from copy import deepcopy
from gos.configuration import Configuration
class ConfigurationTestCase(unittest.TestCase):
def setUp(self):
self.init_config = Configuration()
def test_initialization_top_level(self):
""" in simple initialization the top level section must be properly configured """
config = Configuration()
self.assertIn(config.DIR, config)
self.assertIn(config.LOGGER, config)
self.assertIn(config.IOSF, config)
self.assertIn(config.INPUT, config)
self.assertIn(config.ALGORITHM, config)
self.assertIn(config.OUTPUT, config)
self.assertIsInstance(config[config.LOGGER], dict)
self.assertIsInstance(config[config.INPUT], dict)
self.assertIsInstance(config[config.ALGORITHM], dict)
self.assertIsInstance(config[config.OUTPUT], dict)
def test_initialization_input_section(self):
""" input section of the overall configuration must have some default init values and is predefined with them """
config = Configuration()
input_section = config[config.INPUT]
self.assertIn(config.DIR, input_section)
self.assertIn(config.LOGGER, input_section)
self.assertIn(config.IOSF, input_section)
self.assertIn(config.SOURCE, input_section)
self.assertIsInstance(input_section[config.SOURCE], list)
self.assertIsInstance(input_section[config.LOGGER], dict)
def test_initialization_logger_section(self):
""" logger section is a top level configuration for GOS wide logger """
config = Configuration()
logger_section = config[config.LOGGER]
self.assertIn(config.NAME, logger_section)
self.assertIn(config.LEVEL, logger_section)
self.assertIn(config.FORMAT, logger_section)
self.assertIn(config.DESTINATION, logger_section)
def test_initialization_output_section(self):
""" output section configuration for GOS results to be put in"""
config = Configuration()
output_section = config[config.OUTPUT]
self.assertIn(config.DIR, output_section)
self.assertIn(config.LOGGER, output_section)
self.assertIn(config.IOSF, output_section)
self.assertIn(config.ASSEMBLY_POINTS, output_section)
self.assertIn(config.GENOMES, output_section)
self.assertIn(config.STATS, output_section)
self.assertIsInstance(output_section[config.STATS], dict)
self.assertIsInstance(output_section[config.ASSEMBLY_POINTS], dict)
self.assertIsInstance(output_section[config.GENOMES], dict)
def test_initialization_algorithm_section_executable_containers(self):
config = Configuration()
algorithm_section = config[config.ALGORITHM]
self.assertIn(config.EXECUTABLE_CONTAINERS, algorithm_section)
def test_initialization_algorithm_section(self):
""" algorithm section configuration for GOS workflow """
config = Configuration()
algorithm_section = config[config.ALGORITHM]
self.assertIn(config.IOSF, algorithm_section)
self.assertIn(config.LOGGER, algorithm_section)
self.assertIn(config.TASKS, algorithm_section)
self.assertIn(config.PIPELINE, algorithm_section)
self.assertIsInstance(algorithm_section[config.TASKS], dict)
self.assertIsInstance(algorithm_section[config.PIPELINE], dict)
def test_update_with_default_top_level_dir_empty(self):
""" top level configuration field "dir" default fallback when it is not specified """
self.init_config[self.init_config.DIR] = None
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.DIR], os.getcwd())
self.init_config[self.init_config.DIR] = ""
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.DIR], os.getcwd())
def test_update_with_default_to_level_dir_predefined(self):
""" top level configuration field "dir" default fallback when it is specified """
self.init_config[self.init_config.DIR] = os.path.join("dir1", "dir2")
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.DIR], os.path.join("dir1", "dir2"))
def test_update_with_default_top_level_io_silent_fail_empty(self):
""" top level configuration field "io_silent_fail" default fallback when its not specified """
self.init_config[self.init_config.IOSF] = None
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.IOSF], self.init_config.DEFAULT_IOSF)
self.init_config[self.init_config.IOSF] = ""
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.IOSF], self.init_config.DEFAULT_IOSF)
def test_update_with_default_top_level_io_silent_fail_predefined(self):
""" top level configuration field "io_silent_fail" default fallback when its specified """
self.init_config[self.init_config.IOSF] = True
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.IOSF], True)
self.init_config[self.init_config.IOSF] = False
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.IOSF], False)
self.init_config[self.init_config.IOSF] = "CustomValue" # anything that works for if
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.IOSF], "CustomValue")
def test_update_with_default_logger_name_empty(self):
self.init_config[self.init_config.LOGGER][self.init_config.NAME] = ""
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.NAME],
self.init_config.DEFAULT_LOGGER_NAME)
self.init_config[self.init_config.LOGGER][self.init_config.NAME] = None
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.NAME],
self.init_config.DEFAULT_LOGGER_NAME)
def test_update_with_default_logger_name_predefined(self):
self.init_config[self.init_config.LOGGER][self.init_config.NAME] = True
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.NAME],
str(True))
self.init_config[self.init_config.LOGGER][self.init_config.NAME] = "MyName"
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.NAME],
"MyName")
def test_update_with_default_logger_level_empty(self):
self.init_config[self.init_config.LOGGER][self.init_config.LEVEL] = ""
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.LEVEL],
self.init_config.DEFAULT_LOGGER_LEVEL)
self.init_config[self.init_config.LOGGER][self.init_config.LEVEL] = None
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.LEVEL],
self.init_config.DEFAULT_LOGGER_LEVEL)
def test_update_with_default_logger_level_predefined(self):
self.init_config[self.init_config.LOGGER][self.init_config.LEVEL] = "MyLevel"
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.LEVEL],
"MyLevel")
self.init_config[self.init_config.LOGGER][self.init_config.LEVEL] = True
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.LEVEL],
str(True))
def test_update_with_default_logger_format_empty(self):
self.init_config[self.init_config.LOGGER][self.init_config.FORMAT] = ""
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.FORMAT],
self.init_config.DEFAULT_LOGGER_FORMAT)
self.init_config[self.init_config.LOGGER][self.init_config.FORMAT] = None
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.FORMAT],
self.init_config.DEFAULT_LOGGER_FORMAT)
def test_update_with_default_logger_format_predefined(self):
self.init_config[self.init_config.LOGGER][self.init_config.FORMAT] = "MyFormat"
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.FORMAT],
"MyFormat")
self.init_config[self.init_config.LOGGER][self.init_config.FORMAT] = True
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.LOGGER][self.init_config.FORMAT],
str(True))
def test_update_with_default_input_source_empty(self):
for empty_value in (None, ""):
self.init_config[self.init_config.INPUT][self.init_config.SOURCE] = empty_value
self.init_config.update_with_default_values()
self.assertListEqual(self.init_config[self.init_config.INPUT][self.init_config.SOURCE],
[])
def test_update_with_default_input_source_predefined(self):
for source_value in [["path1", "path2"], ["path3", "path4", "path5"]]:
self.init_config[self.init_config.INPUT][self.init_config.SOURCE] = source_value
self.init_config.update_with_default_values()
self.assertListEqual(source_value,
self.init_config[self.init_config.INPUT][self.init_config.SOURCE])
def test_update_with_default_input_dir_empty(self):
for empty_value in (None, ""):
self.init_config[self.init_config.INPUT][self.init_config.DIR] = empty_value
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.INPUT][self.init_config.DIR],
self.init_config.DEFAULT_INPUT_DIR)
def test_update_with_default_input_io_silent_fail_empty(self):
for empty_value in (None, ""):
for top_level_iosf_value in (True, False):
self.init_config[self.init_config.INPUT][self.init_config.IOSF] = empty_value
self.init_config[self.init_config.IOSF] = top_level_iosf_value
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.INPUT][self.init_config.IOSF],
top_level_iosf_value)
def get_list_of_logger_configurations(self):
return [{
self.init_config.NAME: "Logger Name 1",
self.init_config.LEVEL: "info 1",
self.init_config.FORMAT: "format 1",
self.init_config.DESTINATION: "destination 1"
}, {
self.init_config.NAME: "Logger Name 2",
self.init_config.LEVEL: "info 2",
self.init_config.FORMAT: "format 2",
self.init_config.DESTINATION: "destination 2"
}]
def test_update_with_default_input_logger_empty(self):
top_level_loggers = self.get_list_of_logger_configurations()
for logger_config in top_level_loggers:
self.init_config = Configuration()
self.init_config[self.init_config.INPUT][self.init_config.LOGGER] = {}
self.init_config[self.init_config.LOGGER] = logger_config
self.init_config.update_with_default_values()
self.assertDictEqual(self.init_config[self.init_config.INPUT][self.init_config.LOGGER],
logger_config)
def test_update_with_default_input_logger_partially_predefined(self):
partial_logger_configs = [
{self.init_config.NAME: "My name",
self.init_config.LEVEL: "My level"},
{self.init_config.LEVEL: "My level 2"},
{self.init_config.FORMAT: "My format",
self.init_config.DESTINATION: "My destination"}
]
for partial_logger_config in partial_logger_configs:
for full_logger_config in self.get_list_of_logger_configurations():
self.init_config[self.init_config.INPUT][self.init_config.LOGGER] = deepcopy(partial_logger_config)
self.init_config[self.init_config.LOGGER] = full_logger_config
self.init_config.update_with_default_values()
for key, value in full_logger_config.items():
if key not in partial_logger_config:
self.assertEqual(full_logger_config[key],
self.init_config[self.init_config.INPUT][self.init_config.LOGGER][key])
else:
self.assertEqual(partial_logger_config[key],
self.init_config[self.init_config.INPUT][self.init_config.LOGGER][key])
def test_update_with_default_input_logger_specified(self):
for full_logger_spec in self.get_list_of_logger_configurations():
self.init_config[self.init_config.INPUT][self.init_config.LOGGER] = deepcopy(full_logger_spec)
self.init_config.update_with_default_values()
self.assertDictEqual(full_logger_spec,
self.init_config[self.init_config.INPUT][self.init_config.LOGGER])
def test_update_with_default_output_dir_empty(self):
for empty_value in (None, ""):
self.init_config[self.init_config.OUTPUT][self.init_config.DIR] = empty_value
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.DIR],
os.path.join(self.init_config[self.init_config.DIR],
self.init_config.DEFAULT_OUTPUT_DIR))
def test_update_with_default_output_io_silent_fail_empty(self):
for empty_value in (None, ""):
for top_level_iosf_value in (True, False):
self.init_config[self.init_config.OUTPUT][self.init_config.IOSF] = empty_value
self.init_config[self.init_config.IOSF] = top_level_iosf_value
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.IOSF],
top_level_iosf_value)
def tet_update_with_default_output_logger_empty(self):
for empty_value in (None, "", {}):
top_level_loggers = self.get_list_of_logger_configurations()
for logger_config in top_level_loggers:
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER] = empty_value
self.init_config[self.init_config.LOGGER] = logger_config
self.init_config.update_with_default_values()
self.assertDictEqual(self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER],
logger_config)
def test_update_with_default_output_logger_partially_predefined(self):
partial_logger_configs = [
{self.init_config.NAME: "My name",
self.init_config.LEVEL: "My level"},
{self.init_config.LEVEL: "My level 2"},
{self.init_config.FORMAT: "My format",
self.init_config.DESTINATION: "My destination"}
]
for partial_logger_config in partial_logger_configs:
for full_logger_config in self.get_list_of_logger_configurations():
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER] = deepcopy(partial_logger_config)
self.init_config[self.init_config.LOGGER] = full_logger_config
self.init_config.update_with_default_values()
for key, value in full_logger_config.items():
if key not in partial_logger_config:
self.assertEqual(full_logger_config[key],
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER][key])
else:
self.assertEqual(partial_logger_config[key],
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER][key])
def test_update_with_default_output_logger_specified(self):
for full_logger_spec in self.get_list_of_logger_configurations():
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER] = deepcopy(full_logger_spec)
self.init_config.update_with_default_values()
self.assertDictEqual(full_logger_spec,
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER])
def test_update_with_default_output_stats_empty(self):
for dir_name in ("output_dir1", "output_dir2", "output_dir3"):
for iosf_value in (True, False):
for logger_value in self.get_list_of_logger_configurations():
self.init_config[self.init_config.OUTPUT][self.init_config.STATS] = {}
self.init_config[self.init_config.OUTPUT][self.init_config.DIR] = dir_name
self.init_config[self.init_config.OUTPUT][self.init_config.IOSF] = iosf_value
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER] = logger_value
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.STATS][self.init_config.FILE],
self.init_config.DEFAULT_OUTPUT_STATS_FILE)
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.STATS][self.init_config.DIR],
self.init_config.DEFAULT_OUTPUT_STATS_DIR)
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.STATS][self.init_config.IOSF],
iosf_value)
self.assertDictEqual(self.init_config[self.init_config.OUTPUT][self.init_config.STATS][self.init_config.LOGGER],
logger_value)
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.STATS][self.init_config.FILE],
self.init_config.DEFAULT_OUTPUT_STATS_FILE)
def get_full_stats_configs(self):
return [
{self.init_config.DIR: "stat_dir_predefined_1",
self.init_config.FILE: "file_predefined_1.txt",
self.init_config.LOGGER: self.get_list_of_logger_configurations()[0],
self.init_config.IOSF: True},
{self.init_config.DIR: "stat_dir_predefined_2",
self.init_config.FILE: "file_predefined_2.txt",
self.init_config.LOGGER: self.get_list_of_logger_configurations()[0],
self.init_config.IOSF: False},
]
def test_update_with_default_output_stats_partially_predefined(self):
partial_stats_configs = [
{self.init_config.DIR: "stats_dir",
self.init_config.FILE: "my_file_name.txt"},
{self.init_config.IOSF: True},
{self.init_config.DIR: "my_dir",
self.init_config.FILE: "my_file_name2.txt",
self.init_config.LOGGER: self.get_list_of_logger_configurations()[0]}
]
for partial_stats_config in partial_stats_configs:
for full_stats_config in self.get_full_stats_configs():
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER] = full_stats_config[self.init_config.LOGGER]
self.init_config[self.init_config.OUTPUT][self.init_config.STATS] = deepcopy(partial_stats_config)
self.init_config[self.init_config.OUTPUT][self.init_config.IOSF] = full_stats_config[self.init_config.IOSF]
self.init_config.update_with_default_values()
for key, value in partial_stats_config.items():
self.assertEqual(partial_stats_config[key],
self.init_config[self.init_config.OUTPUT][self.init_config.STATS][key])
def test_update_with_default_output_stats_predefined(self):
for full_stats_config in self.get_full_stats_configs():
self.init_config[self.init_config.OUTPUT][self.init_config.STATS] = deepcopy(full_stats_config)
self.init_config.update_with_default_values()
self.assertDictEqual(self.init_config[self.init_config.OUTPUT][self.init_config.STATS],
full_stats_config)
def test_update_with_default_output_assembly_points_empty(self):
self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS] = {}
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.FILE],
self.init_config.DEFAULT_OUTPUT_AP_FILE)
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.DIR],
self.init_config.DEFAULT_OUTPUT_AP_DIR)
self.assertDictEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.LOGGER],
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER])
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.GENOME_SPECIFIC],
self.init_config.DEFAULT_OUTPUT_AP_GENOME_SPECIFIC)
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.GENOME_SPECIFIC_FNP],
self.init_config.DEFAULT_OUTPUT_AP_GSFNP)
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.IOSF],
self.init_config[self.init_config.OUTPUT][self.init_config.IOSF])
def test_update_with_default_output_assembly_points_partially_predefined(self):
partial_ap_config = {
self.init_config.DIR: "my_ap_dir",
self.init_config.GENOME_SPECIFIC: True,
self.init_config.IOSF: False
}
self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS] = partial_ap_config
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.FILE],
self.init_config.DEFAULT_OUTPUT_AP_FILE)
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.DIR],
partial_ap_config[self.init_config.DIR])
self.assertDictEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.LOGGER],
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER])
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.GENOME_SPECIFIC],
partial_ap_config[self.init_config.GENOME_SPECIFIC])
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.GENOME_SPECIFIC_FNP],
self.init_config.DEFAULT_OUTPUT_AP_GSFNP)
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][self.init_config.IOSF],
partial_ap_config[self.init_config.IOSF])
def test_update_with_default_output_assembly_points_predefined(self):
full_ap_config = {
self.init_config.FILE: "my_ap_file.txt",
self.init_config.DIR: "my_ap_dir",
self.init_config.IOSF: True,
self.init_config.GENOME_SPECIFIC: True,
self.init_config.GENOME_SPECIFIC_FNP: "my_patter_string_{genome_name}.txt",
self.init_config.LOGGER: self.get_list_of_logger_configurations()[0]
}
self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS] = full_ap_config
self.init_config.update_with_default_values()
for key, value in full_ap_config.items():
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.ASSEMBLY_POINTS][key],
full_ap_config[key])
def test_update_with_default_output_genomes_empty(self):
self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES] = {}
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES][self.init_config.DIR],
self.init_config.DEFAULT_OUTPUT_GENOMES_DIR)
self.assertDictEqual(self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES][self.init_config.LOGGER],
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER])
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES][self.init_config.IOSF],
self.init_config[self.init_config.OUTPUT][self.init_config.IOSF])
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES][self.init_config.OUTPUT_NG_FRAGMENTS],
self.init_config.DEFAULT_OUTPUT_GENOMES_ONGF)
def test_update_with_default_output_genomes_partially_predefined(self):
partial_genomes_config = {
self.init_config.OUTPUT_NG_FRAGMENTS: True,
self.init_config.IOSF: False
}
self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES] = partial_genomes_config
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES][self.init_config.DIR],
self.init_config.DEFAULT_OUTPUT_GENOMES_DIR)
self.assertDictEqual(self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES][self.init_config.LOGGER],
self.init_config[self.init_config.OUTPUT][self.init_config.LOGGER])
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES][self.init_config.IOSF],
partial_genomes_config[self.init_config.IOSF])
self.assertEqual(self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES][self.init_config.OUTPUT_NG_FRAGMENTS],
partial_genomes_config[self.init_config.OUTPUT_NG_FRAGMENTS])
def test_update_with_default_output_genomes_predefined(self):
predefined_genome_config = {
self.init_config.OUTPUT_NG_FRAGMENTS: True,
self.init_config.IOSF: False,
self.init_config.LOGGER: self.get_list_of_logger_configurations()[0],
self.init_config.DIR: "my_genome_dir"
}
self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES] = predefined_genome_config
self.init_config.update_with_default_values()
self.assertDictEqual(self.init_config[self.init_config.OUTPUT][self.init_config.GENOMES],
predefined_genome_config)
def test_update_with_default_algorithm_empty(self):
self.init_config[self.init_config.ALGORITHM] = {}
self.init_config.update_with_default_values()
self.assertDictEqual(self.init_config[self.init_config.ALGORITHM][self.init_config.LOGGER],
self.init_config[self.init_config.LOGGER])
self.assertDictEqual(self.init_config[self.init_config.ALGORITHM][self.init_config.TASKS], {
self.init_config.PATHS: [self.init_config.DEFAULT_ALGORITHM_TASKS_PATH]})
self.assertEqual(self.init_config[self.init_config.ALGORITHM][self.init_config.EXECUTABLE_CONTAINERS], [])
expected_pipeline_config = {
self.init_config.LOGGER: self.init_config[self.init_config.ALGORITHM][self.init_config.LOGGER],
self.init_config.SELF_LOOP: self.init_config.DEFAULT_ALGORITHM_PIPELINE_SELF_LOOP,
self.init_config.ENTRIES: [],
self.init_config.IOSF: self.init_config[self.init_config.ALGORITHM][self.init_config.IOSF]
}
self.assertDictEqual(self.init_config[self.init_config.ALGORITHM][self.init_config.PIPELINE],
expected_pipeline_config)
def test_update_with_default_algorithm_predefined_tasks_paths(self):
my_path_list = ["my_path1", "my_path2"]
self.init_config[self.init_config.ALGORITHM] = {
self.init_config.TASKS: {
self.init_config.PATHS: deepcopy(my_path_list)
}
}
self.init_config.update_with_default_values()
self.assertIn(self.init_config.DEFAULT_ALGORITHM_TASKS_PATH,
self.init_config[self.init_config.ALGORITHM][self.init_config.TASKS][self.init_config.PATHS])
for my_path in my_path_list:
self.assertIn(my_path,
self.init_config[self.init_config.ALGORITHM][self.init_config.TASKS][self.init_config.PATHS])
def test_update_with_default_algorithm_pipeline_logger(self):
self.init_config[self.init_config.ALGORITHM] = {
self.init_config.PIPELINE: {
self.init_config.ENTRIES: []
}
}
self.init_config.update_with_default_values()
self.assertDictEqual(self.init_config[self.init_config.ALGORITHM][self.init_config.PIPELINE][self.init_config.LOGGER],
self.init_config[self.init_config.ALGORITHM][self.init_config.LOGGER])
def test_update_with_default_algorithm_predefined(self):
predefined_algorithm_config = {
self.init_config.IOSF: False,
self.init_config.LOGGER: self.get_list_of_logger_configurations()[0],
self.init_config.TASKS: {
self.init_config.PATHS: ["my_path_1", "my_path_2"]
},
self.init_config.PIPELINE: {
self.init_config.LOGGER: self.get_list_of_logger_configurations()[1],
self.init_config.SELF_LOOP: False,
self.init_config.ENTRIES: ["round1", "round2"]
}
}
self.init_config[self.init_config.ALGORITHM] = deepcopy(predefined_algorithm_config)
self.init_config.update_with_default_values()
self.assertEqual(self.init_config[self.init_config.ALGORITHM][self.init_config.IOSF],
predefined_algorithm_config[self.init_config.IOSF])
self.assertDictEqual(self.init_config[self.init_config.ALGORITHM][self.init_config.LOGGER],
predefined_algorithm_config[self.init_config.LOGGER])
self.assertListEqual(self.init_config[self.init_config.ALGORITHM][self.init_config.TASKS][self.init_config.PATHS],
[self.init_config.DEFAULT_ALGORITHM_TASKS_PATH] +
predefined_algorithm_config[self.init_config.TASKS][self.init_config.PATHS])
predefined_algorithm_config[self.init_config.PIPELINE][self.init_config.IOSF] = self.init_config[self.init_config.ALGORITHM][self.init_config.IOSF]
self.assertDictEqual(self.init_config[self.init_config.ALGORITHM][self.init_config.PIPELINE],
predefined_algorithm_config[self.init_config.PIPELINE])
def test_update_with_default_algorithm_specified_executable_container_instantiation(self):
self.set_up_executable_containers_for_algorithm_section()
self.init_config.update_with_default_values()
self.assertIsInstance(self.init_config[self.init_config.ALGORITHM]["stages"], list)
def set_up_executable_containers_for_algorithm_section(self):
ecs = [
{
"name": "stage",
"reference": "stages",
"entry_type_name": "task"
}
]
self.init_config[self.init_config.ALGORITHM][self.init_config.EXECUTABLE_CONTAINERS] = ecs
def test_update_with_default_algorithm_automatically_generated_reference_for_executable_container(self):
self.init_config[self.init_config.ALGORITHM][self.init_config.EXECUTABLE_CONTAINERS] = [{
"name": "stage",
"entry_type_name": "task"
}]
self.init_config.update_with_default_values()
ecs = self.init_config[self.init_config.ALGORITHM][self.init_config.EXECUTABLE_CONTAINERS]
self.assertEqual(ecs[0]["reference"], "stages")
def test_update_with_default_algorithm_specified_executable_container_partially_specification(self):
self.set_up_executable_containers_for_algorithm_section()
self.init_config[self.init_config.ALGORITHM]["stages"] = [
{
self.init_config.NAME: "stage1",
},
{
self.init_config.NAME: "stage2",
self.init_config.ENTRIES: ["task1", "task2"]
},
{
self.init_config.NAME: "stage3",
self.init_config.SELF_LOOP: False,
self.init_config.ENTRIES: ["task1", "task2", "task3"]
}
]
self.init_config.update_with_default_values()
stages = self.init_config[self.init_config.ALGORITHM]["stages"]
self.assertIsInstance(stages, list)
self.assertEqual(len(stages), 3)
stage1, stage2, stage3 = stages
self.assertEqual(stage1[self.init_config.NAME], "stage1")
self.assertEqual(stage1[self.init_config.SELF_LOOP], self.init_config.DEFAULT_ALGORITHM_EC_SELF_LOOP)
self.assertListEqual(stage1[self.init_config.ENTRIES], [])
self.assertEqual(stage2[self.init_config.NAME], "stage2")
self.assertEqual(stage2[self.init_config.SELF_LOOP], self.init_config.DEFAULT_ALGORITHM_EC_SELF_LOOP)
self.assertListEqual(stage2[self.init_config.ENTRIES], ["task1", "task2"])
self.assertEqual(stage3[self.init_config.NAME], "stage3")
self.assertEqual(stage3[self.init_config.SELF_LOOP], False)
self.assertListEqual(stage3[self.init_config.ENTRIES], ["task1", "task2", "task3"])
if __name__ == '__main__':
unittest.main()
| 59.879252
| 155
| 0.68616
| 4,318
| 35,209
| 5.243168
| 0.039138
| 0.216608
| 0.379064
| 0.158127
| 0.895495
| 0.841299
| 0.798277
| 0.742624
| 0.715283
| 0.690857
| 0
| 0.00254
| 0.217274
| 35,209
| 587
| 156
| 59.981261
| 0.81897
| 0.020137
| 0
| 0.408829
| 0
| 0
| 0.022325
| 0.003426
| 0
| 0
| 0
| 0
| 0.236084
| 1
| 0.092131
| false
| 0
| 0.007678
| 0.003839
| 0.105566
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0d013e57ff4d47d38f1e451d5a25719e25f5b941
| 147,934
|
py
|
Python
|
tests/testflows/datetime64_extended_range/requirements/requirements.py
|
mga-chka/ClickHouse
|
4fe722d1a460aea63de883299790ae8671424fae
|
[
"Apache-2.0"
] | 5
|
2021-05-14T02:46:44.000Z
|
2021-11-23T04:58:20.000Z
|
tests/testflows/datetime64_extended_range/requirements/requirements.py
|
mga-chka/ClickHouse
|
4fe722d1a460aea63de883299790ae8671424fae
|
[
"Apache-2.0"
] | 5
|
2021-05-21T06:26:01.000Z
|
2021-08-04T04:57:36.000Z
|
tests/testflows/datetime64_extended_range/requirements/requirements.py
|
mga-chka/ClickHouse
|
4fe722d1a460aea63de883299790ae8671424fae
|
[
"Apache-2.0"
] | 8
|
2021-05-12T01:38:18.000Z
|
2022-02-10T06:08:41.000Z
|
# These requirements were auto generated
# from software requirements specification (SRS)
# document by TestFlows v1.6.210505.1133630.
# Do not edit by hand but re-generate instead
# using 'tfs requirements generate' command.
from testflows.core import Specification
from testflows.core import Requirement
Heading = Specification.Heading
RQ_SRS_010_DateTime64_ExtendedRange = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support extended range for the [DateTime64] data type that includes dates from the year **1925** to **2238**.\n'
'\n'
),
link=None,
level=4,
num='4.1.0.1')
RQ_SRS_010_DateTime64_ExtendedRange_NormalRange_Start = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.Start',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper time handling around the normal date range that starts at `1970-01-01 00:00:00.000`\n'
'expressed using the [ISO 8601 format].\n'
'\n'
),
link=None,
level=4,
num='4.1.0.2')
RQ_SRS_010_DateTime64_ExtendedRange_NormalRange_Start_BeforeEpochForTimeZone = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.Start.BeforeEpochForTimeZone',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper time handling around the start of the [normal date range]\n'
'when this time for the time zone is before the start of the [normal date range].\n'
'\n'
),
link=None,
level=4,
num='4.1.0.3')
RQ_SRS_010_DateTime64_ExtendedRange_NormalRange_End = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.End',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper time handling around the normal date range that ends at `2105-12-31T23:59:59.99999`\n'
'expressed using the [ISO 8601 format].\n'
'\n'
),
link=None,
level=4,
num='4.1.0.4')
RQ_SRS_010_DateTime64_ExtendedRange_NormalRange_End_AfterEpochForTimeZone = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.End.AfterEpochForTimeZone',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper time handling around the end of the [normal date range]\n'
'when this time for the time zone is after the end of the [normal date range].\n'
'\n'
),
link=None,
level=4,
num='4.1.0.5')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper conversion to and from [DateTime64] data type from other data types.\n'
'\n'
),
link=None,
level=4,
num='4.1.0.6')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [Dates and Times Functions] with the [DateTime64] data type\n'
'when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=4,
num='4.1.0.7')
RQ_SRS_010_DateTime64_ExtendedRange_TimeZones = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TimeZones',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation with the [DateTime64] extended range data type\n'
'when combined with a supported time zone.\n'
'\n'
),
link=None,
level=4,
num='4.1.0.8')
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper handling of non-existent times when using [DateTime64] extended range data type.\n'
'\n'
),
link=None,
level=4,
num='4.1.0.9')
RQ_SRS_010_DateTime64_ExtendedRange_Comparison = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.Comparison',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper handling of time comparison when using [DateTime64] extended range data type.\n'
"For example, `SELECT toDateTime64('2019-05-05 20:20:12.050', 3) < now()`.\n"
'\n'
),
link=None,
level=4,
num='4.1.0.10')
RQ_SRS_010_DateTime64_ExtendedRange_SpecificTimestamps = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.SpecificTimestamps',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL properly work with the following timestamps in all supported timezones:\n'
'```\n'
'[9961200,73476000,325666800,354675600,370400400,386125200,388566010,401850000,417574811,496803600,528253200,624423614,636516015,671011200,717555600,752047218,859683600,922582800,1018173600,1035705600,1143334800,1162105223,1174784400,1194156000,1206838823,1224982823,1236495624,1319936400,1319936424,1425798025,1459040400,1509872400,2090451627,2140668000]\n'
'```\n'
'\n'
'\n'
),
link=None,
level=4,
num='4.1.0.11')
RQ_SRS_010_DateTime64_ExtendedRange_Start = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.Start',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support extended range for the [DateTime64] data type that starts at `1925-01-01T00:00:00.000000`\n'
'expressed using the [ISO 8601 format].\n'
'\n'
),
link=None,
level=4,
num='4.2.0.1')
RQ_SRS_010_DateTime64_ExtendedRange_End = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.End',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support extended range for the [DateTime64] data type that ends at `2238-12-31T23:59:59.999999`\n'
'expressed using the [ISO 8601 format].\n'
'\n'
),
link=None,
level=4,
num='4.2.0.2')
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_InvalidDate = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.InvalidDate',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper handling of invalid dates when using [DateTime64] extended range data type,\n'
'such as:\n'
'\n'
'* `YYYY-04-31, YYYY-06-31, YYYY-09-31, YYYY-11-31`\n'
'* `1990-02-30 00:00:02`\n'
'\n'
),
link=None,
level=5,
num='4.2.0.3.1')
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_InvalidTime = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.InvalidTime',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper handling of invalid time for a timezone\n'
'when using [DateTime64] extended range data type, for example,\n'
'\n'
'* `2002-04-07 02:30:00` never happened at all in the US/Eastern timezone ([Stuart Bishop: pytz library](http://pytz.sourceforge.net/#problems-with-localtime))\n'
'\n'
'\n'
),
link=None,
level=5,
num='4.2.0.3.2')
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_TimeZoneSwitch = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.TimeZoneSwitch',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper handling of invalid time when using [DateTime64] extended range data type\n'
'when the invalid time is caused when *countries switch timezone definitions with no\n'
'daylight savings time switch* [Stuart Bishop: pytz library](http://pytz.sourceforge.net/#problems-with-localtime).\n'
'\n'
'>\n'
'> For example, in 1915 Warsaw switched from Warsaw time to Central European time with\n'
'> no daylight savings transition. So at the stroke of midnight on August 5th 1915 the clocks\n'
'> were wound back 24 minutes creating an ambiguous time period that cannot be specified without\n'
'> referring to the timezone abbreviation or the actual UTC offset. In this case midnight happened twice,\n'
'> neither time during a daylight saving time period. pytz handles this transition by treating the ambiguous\n'
'> period before the switch as daylight savings time, and the ambiguous period after as standard time.\n'
'>\n'
'> [Stuart Bishop: pytz library](http://pytz.sourceforge.net/#problems-with-localtime)\n'
'\n'
),
link=None,
level=5,
num='4.2.0.3.3')
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_DaylightSavingTime = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.DaylightSavingTime',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper handling of invalid time when using [DateTime64] extended range data type\n'
'when for a given timezone time switches from standard to daylight saving.\n'
'\n'
'> For example, in the US/Eastern timezone on the last Sunday morning in October, the following sequence happens:\n'
'>\n'
'> 01:00 EDT occurs\n'
'> 1 hour later, instead of 2:00am the clock is turned back 1 hour and 01:00 happens again (this time 01:00 EST)\n'
'> In fact, every instant between 01:00 and 02:00 occurs twice.\n'
'> [Stuart Bishop: pytz library](http://pytz.sourceforge.net/#problems-with-localtime)\n'
'\n'
),
link=None,
level=5,
num='4.2.0.3.4')
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_DaylightSavingTime_Disappeared = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.DaylightSavingTime.Disappeared',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper handling of invalid time when using [DateTime64] extended range data type\n'
'for a given timezone when transition from the standard to daylight saving time causes an hour to disappear.\n'
'\n'
"Expected behavior: if DateTime64 initialized by a skipped time value, it is being treated as DST and resulting value will be an hour earlier, e.g. `SELECT toDateTime64('2020-03-08 02:34:00', 0, 'America/Denver')` returns `2020-03-08 01:34:00`.\n"
'\n'
),
link=None,
level=5,
num='4.2.0.3.5')
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_LeapSeconds = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.LeapSeconds',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support proper handling of leap seconds adjustments when using [DateTime64] extended range data type.\n'
'\n'
),
link=None,
level=5,
num='4.2.0.3.6')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toTimeZone = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toTimeZone',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toTimeZone](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#totimezone)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.1')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toYear = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYear',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toYear](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toyear)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.2')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toQuarter = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toQuarter',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toQuarter](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toquarter)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.3')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toMonth = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMonth',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toMonth](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tomonth)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.4')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toDayOfYear = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfYear',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toDayOfYear](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#todayofyear)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.5')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toDayOfMonth = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfMonth',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toDayOfMonth](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#todayofmonth)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.6')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toDayOfWeek = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfWeek',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toDayOfWeek](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#todayofweek)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.7')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toHour = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toHour',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toHour](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tohour)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.8')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toMinute = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMinute',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toMinute](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tominute)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.9')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toSecond = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toSecond',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toSecond](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tosecond)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.10')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toUnixTimestamp = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toUnixTimestamp',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toUnitTimestamp](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#to-unix-timestamp)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'Timestamp value expected to be negative when DateTime64 value is prior to `1970-01-01` and positine otherwise.\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.11')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfYear = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfYear',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfYear](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofyear)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.12')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfISOYear = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfISOYear',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfISOYear](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofisoyear)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.13')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfQuarter = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfQuarter',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfQuarter](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofquarter)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.14')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfMonth = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfMonth',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfMonth](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofmonth)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.15')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toMonday = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMonday',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toMonday](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tomonday)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.16')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfWeek = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfWeek',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfWeek](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofweektmode)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.17')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfDay = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfDay',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfDay](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofday)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.18')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfHour = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfHour',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfHour](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofhour)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.19')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfMinute = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfMinute',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfMinute](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofminute)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.20')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfSecond = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfSecond',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfSecond](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofsecond)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.21')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfFiveMinute = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfFiveMinute',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfFiveMinute](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartoffiveminute)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.22')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfTenMinutes = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfTenMinutes',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfTenMinutes](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartoftenminutes)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.23')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfFifteenMinutes = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfFifteenMinutes',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfFifteenMinutes](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartoffifteenminutes)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.24')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfInterval = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfInterval',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toStartOfInterval](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofintervaltime-or-data-interval-x-unit-time-zone)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'More detailed description can be found [here](https://github.com/ClickHouse/ClickHouse/issues/1201).\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.25')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toTime = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toTime',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toTime](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#totime)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.26')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeYearNum = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeYearNum',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toRelativeYearNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativeyearnum)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.27')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeQuarterNum = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeQuarterNum',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toRelativeQuarterNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativequarternum)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.28')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeMonthNum = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeMonthNum',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toRelativeMonthNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativemonthnum)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.29')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeWeekNum = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeWeekNum',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toRelativeWeekNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativeweeknum)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.30')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeDayNum = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeDayNum',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toRelativeDayNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativedaynum)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.31')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeHourNum = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeHourNum',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toRelativeHourNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativehournum)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.32')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeMinuteNum = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeMinuteNum',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toRelativeMinuteNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativeminutenum)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.33')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeSecondNum = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeSecondNum',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toRelativeSecondNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativesecondnum)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.34')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toISOYear = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toISOYear',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toISOYear](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toisoyear)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.35')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toISOWeek = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toISOWeek',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toISOWeek](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toisoweek)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.36')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toWeek = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toWeek',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toWeek](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toweekdatemode)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.37')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toYearWeek = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYearWeek',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toYearWeek](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toyearweekdatemode)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.38')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_now = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.now',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support conversion of output from the [now](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#now)\n'
'function to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.39')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_today = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.today',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support conversion of output from the [today](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#today)\n'
'function to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.40')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_yesterday = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.yesterday',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support conversion of output from the [yesterday](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#yesterday)\n'
'function to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.41')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_timeSlot = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.timeSlot',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support conversion of output from the [timeSlot](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#timeslot)\n'
'function to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.42')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toYYYYMM = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMM',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toYYYYMM](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toyyyymm)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.43')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toYYYYMMDD = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMMDD',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toYYYYMMDD](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toyyyymmdd)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.44')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toYYYYMMDDhhmmss = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMMDDhhmmss',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [toYYYYMMDDhhmmss](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toyyyymmddhhmmss)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.45')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addYears = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addYears',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [addYears](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.46')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addMonths = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addMonths',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [addMonths](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.47')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addWeeks = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addWeeks',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [addWeeks](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.48')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addDays = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addDays',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [addDays](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.49')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addHours = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addHours',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [addHours](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.50')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addMinutes = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addMinutes',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [addMinutes](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.51')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addSeconds = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addSeconds',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [addSeconds](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.52')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addQuarters = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addQuarters',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [addQuarters](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.53')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractYears = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractYears',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [subtractYears](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.54')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractMonths = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractMonths',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [subtractMonths](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.55')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractWeeks = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractWeeks',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [subtractWeeks](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.56')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractDays = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractDays',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [subtractDays](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.57')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractHours = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractHours',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [subtractHours](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.58')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractMinutes = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractMinutes',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [subtractMinutes](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.59')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractSeconds = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractSeconds',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [subtractSeconds](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.60')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractQuarters = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractQuarters',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [subtractQuarters](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.61')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_dateDiff = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.dateDiff',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [dateDiff](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#datediff)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.62')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_timeSlots = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.timeSlots',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [timeSlots](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#timeslotsstarttime-duration-size)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.63')
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_formatDateTime = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.formatDateTime',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct operation of the [formatDateTime](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#formatdatetime)\n'
'function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
'\n'
),
link=None,
level=5,
num='4.2.0.4.64')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toInt_8_16_32_64_128_256_ = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toInt(8|16|32|64|128|256)',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'to integer types using [toInt(8|16|32|64|128|256)](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#toint8163264128256) functions.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.1')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toUInt_8_16_32_64_256_ = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUInt(8|16|32|64|256)',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'to unsigned integer types using [toUInt(8|16|32|64|256)](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#touint8163264256) functions.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.2')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toFloat_32_64_ = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toFloat(32|64)',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'to float types using [toFloat(32|64)](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#tofloat3264) functions.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.3')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toDate = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDate',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range]\n'
'to the [Date](https://clickhouse.tech/docs/en/sql-reference/data-types/date/) type using the [toDate](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#todate) function.\n'
'This function is ONLY supposed to work in NORMAL RANGE.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.4')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toDateTime = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'to the [DateTime](https://clickhouse.tech/docs/en/sql-reference/data-types/datetime/) type using the [toDateTime](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#todatetime) function.\n'
'This function is ONLY supposed to work in NORMAL RANGE.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.5')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toDateTime64 = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime64',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion from the data types supported by the [toDateTime64](https://clickhouse.tech/docs/en/sql-reference/data-types/datetime64/) function\n'
'to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.6')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toDateTime64_FromString_MissingTime = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime64.FromString.MissingTime',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion from the [String](https://clickhouse.tech/docs/en/sql-reference/data-types/string/)\n'
'data type to the [DateTime64](https://clickhouse.tech/docs/en/sql-reference/data-types/datetime64/) data type\n'
'when value of the string is missing the `hh:mm-ss.sss` part.\n'
"For example, `toDateTime64('2020-01-01', 3)`.\n"
'\n'
),
link=None,
level=5,
num='4.2.1.4.7')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toDecimal_32_64_128_256_ = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDecimal(32|64|128|256)',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'to [Decimal](https://clickhouse.tech/docs/en/sql-reference/data-types/decimal/) types using [toDecimal(32|64|128|256)](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#todecimal3264128256) functions.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.8')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toString = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toString',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'to the [String](https://clickhouse.tech/docs/en/sql-reference/data-types/string/) type using the [toString](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#tostring) function.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.9')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_CAST_x_T_ = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.CAST(x,T)',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'to one of the supported data type using the [CAST(x,T)](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#type_conversion_function-cast) function.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.10')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toUnixTimestamp64Milli = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Milli',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'to the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type using the [toUnixTimestamp64Milli](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#tounixtimestamp64milli) function.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.11')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toUnixTimestamp64Micro = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Micro',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'to the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type using the [toUnixTimestamp64Micro](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#tounixtimestamp64micro) function.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.12')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toUnixTimestamp64Nano = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Nano',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'to the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type using the [toUnixTimestamp64Nano](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#tounixtimestamp64nano) function.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.13')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_fromUnixTimestamp64Milli = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Milli',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion from the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type\n'
'to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'using the [fromUnixTimestamp64Milli](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#fromunixtimestamp64milli) function.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.14')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_fromUnixTimestamp64Micro = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Micro',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion from the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type\n'
'to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'using the [fromUnixTimestamp64Micro](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#fromunixtimestamp64micro) function.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.15')
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_fromUnixTimestamp64Nano = Requirement(
name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Nano',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support correct conversion from the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type\n'
'to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]\n'
'using the [fromUnixTimestamp64Nano](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#fromunixtimestamp64nano) function.\n'
'\n'
),
link=None,
level=5,
num='4.2.1.4.16')
SRS_010_ClickHouse_DateTime64_Extended_Range = Specification(
name='SRS-010 ClickHouse DateTime64 Extended Range',
description=None,
author=None,
date=None,
status=None,
approved_by=None,
approved_date=None,
approved_version=None,
version=None,
group=None,
type=None,
link=None,
uid=None,
parent=None,
children=None,
headings=(
Heading(name='Revision History', level=1, num='1'),
Heading(name='Introduction', level=1, num='2'),
Heading(name='Terminology', level=1, num='3'),
Heading(name='SRS', level=2, num='3.1'),
Heading(name='Normal Date Range', level=2, num='3.2'),
Heading(name='Extended Date Range', level=2, num='3.3'),
Heading(name='Requirements', level=1, num='4'),
Heading(name='Generic', level=2, num='4.1'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange', level=4, num='4.1.0.1'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.Start', level=4, num='4.1.0.2'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.Start.BeforeEpochForTimeZone', level=4, num='4.1.0.3'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.End', level=4, num='4.1.0.4'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.End.AfterEpochForTimeZone', level=4, num='4.1.0.5'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions', level=4, num='4.1.0.6'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions', level=4, num='4.1.0.7'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TimeZones', level=4, num='4.1.0.8'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime', level=4, num='4.1.0.9'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.Comparison', level=4, num='4.1.0.10'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.SpecificTimestamps', level=4, num='4.1.0.11'),
Heading(name='Specific', level=2, num='4.2'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.Start', level=4, num='4.2.0.1'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.End', level=4, num='4.2.0.2'),
Heading(name='Non-Existent Time', level=4, num='4.2.0.3'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.InvalidDate', level=5, num='4.2.0.3.1'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.InvalidTime', level=5, num='4.2.0.3.2'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.TimeZoneSwitch', level=5, num='4.2.0.3.3'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.DaylightSavingTime', level=5, num='4.2.0.3.4'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.DaylightSavingTime.Disappeared', level=5, num='4.2.0.3.5'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.LeapSeconds', level=5, num='4.2.0.3.6'),
Heading(name='Dates And Times Functions', level=4, num='4.2.0.4'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toTimeZone', level=5, num='4.2.0.4.1'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYear', level=5, num='4.2.0.4.2'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toQuarter', level=5, num='4.2.0.4.3'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMonth', level=5, num='4.2.0.4.4'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfYear', level=5, num='4.2.0.4.5'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfMonth', level=5, num='4.2.0.4.6'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfWeek', level=5, num='4.2.0.4.7'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toHour', level=5, num='4.2.0.4.8'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMinute', level=5, num='4.2.0.4.9'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toSecond', level=5, num='4.2.0.4.10'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toUnixTimestamp', level=5, num='4.2.0.4.11'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfYear', level=5, num='4.2.0.4.12'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfISOYear', level=5, num='4.2.0.4.13'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfQuarter', level=5, num='4.2.0.4.14'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfMonth', level=5, num='4.2.0.4.15'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMonday', level=5, num='4.2.0.4.16'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfWeek', level=5, num='4.2.0.4.17'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfDay', level=5, num='4.2.0.4.18'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfHour', level=5, num='4.2.0.4.19'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfMinute', level=5, num='4.2.0.4.20'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfSecond', level=5, num='4.2.0.4.21'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfFiveMinute', level=5, num='4.2.0.4.22'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfTenMinutes', level=5, num='4.2.0.4.23'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfFifteenMinutes', level=5, num='4.2.0.4.24'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfInterval', level=5, num='4.2.0.4.25'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toTime', level=5, num='4.2.0.4.26'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeYearNum', level=5, num='4.2.0.4.27'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeQuarterNum', level=5, num='4.2.0.4.28'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeMonthNum', level=5, num='4.2.0.4.29'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeWeekNum', level=5, num='4.2.0.4.30'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeDayNum', level=5, num='4.2.0.4.31'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeHourNum', level=5, num='4.2.0.4.32'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeMinuteNum', level=5, num='4.2.0.4.33'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeSecondNum', level=5, num='4.2.0.4.34'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toISOYear', level=5, num='4.2.0.4.35'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toISOWeek', level=5, num='4.2.0.4.36'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toWeek', level=5, num='4.2.0.4.37'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYearWeek', level=5, num='4.2.0.4.38'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.now', level=5, num='4.2.0.4.39'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.today', level=5, num='4.2.0.4.40'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.yesterday', level=5, num='4.2.0.4.41'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.timeSlot', level=5, num='4.2.0.4.42'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMM', level=5, num='4.2.0.4.43'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMMDD', level=5, num='4.2.0.4.44'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMMDDhhmmss', level=5, num='4.2.0.4.45'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addYears', level=5, num='4.2.0.4.46'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addMonths', level=5, num='4.2.0.4.47'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addWeeks', level=5, num='4.2.0.4.48'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addDays', level=5, num='4.2.0.4.49'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addHours', level=5, num='4.2.0.4.50'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addMinutes', level=5, num='4.2.0.4.51'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addSeconds', level=5, num='4.2.0.4.52'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addQuarters', level=5, num='4.2.0.4.53'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractYears', level=5, num='4.2.0.4.54'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractMonths', level=5, num='4.2.0.4.55'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractWeeks', level=5, num='4.2.0.4.56'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractDays', level=5, num='4.2.0.4.57'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractHours', level=5, num='4.2.0.4.58'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractMinutes', level=5, num='4.2.0.4.59'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractSeconds', level=5, num='4.2.0.4.60'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractQuarters', level=5, num='4.2.0.4.61'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.dateDiff', level=5, num='4.2.0.4.62'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.timeSlots', level=5, num='4.2.0.4.63'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.formatDateTime', level=5, num='4.2.0.4.64'),
Heading(name='Type Conversion Functions', level=3, num='4.2.1'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toInt(8|16|32|64|128|256)', level=5, num='4.2.1.4.1'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUInt(8|16|32|64|256)', level=5, num='4.2.1.4.2'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toFloat(32|64)', level=5, num='4.2.1.4.3'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDate', level=5, num='4.2.1.4.4'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime', level=5, num='4.2.1.4.5'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime64', level=5, num='4.2.1.4.6'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime64.FromString.MissingTime', level=5, num='4.2.1.4.7'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDecimal(32|64|128|256)', level=5, num='4.2.1.4.8'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toString', level=5, num='4.2.1.4.9'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.CAST(x,T)', level=5, num='4.2.1.4.10'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Milli', level=5, num='4.2.1.4.11'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Micro', level=5, num='4.2.1.4.12'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Nano', level=5, num='4.2.1.4.13'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Milli', level=5, num='4.2.1.4.14'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Micro', level=5, num='4.2.1.4.15'),
Heading(name='RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Nano', level=5, num='4.2.1.4.16'),
Heading(name='References', level=1, num='5'),
),
requirements=(
RQ_SRS_010_DateTime64_ExtendedRange,
RQ_SRS_010_DateTime64_ExtendedRange_NormalRange_Start,
RQ_SRS_010_DateTime64_ExtendedRange_NormalRange_Start_BeforeEpochForTimeZone,
RQ_SRS_010_DateTime64_ExtendedRange_NormalRange_End,
RQ_SRS_010_DateTime64_ExtendedRange_NormalRange_End_AfterEpochForTimeZone,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions,
RQ_SRS_010_DateTime64_ExtendedRange_TimeZones,
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime,
RQ_SRS_010_DateTime64_ExtendedRange_Comparison,
RQ_SRS_010_DateTime64_ExtendedRange_SpecificTimestamps,
RQ_SRS_010_DateTime64_ExtendedRange_Start,
RQ_SRS_010_DateTime64_ExtendedRange_End,
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_InvalidDate,
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_InvalidTime,
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_TimeZoneSwitch,
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_DaylightSavingTime,
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_DaylightSavingTime_Disappeared,
RQ_SRS_010_DateTime64_ExtendedRange_NonExistentTime_LeapSeconds,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toTimeZone,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toYear,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toQuarter,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toMonth,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toDayOfYear,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toDayOfMonth,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toDayOfWeek,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toHour,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toMinute,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toSecond,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toUnixTimestamp,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfYear,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfISOYear,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfQuarter,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfMonth,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toMonday,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfWeek,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfDay,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfHour,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfMinute,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfSecond,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfFiveMinute,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfTenMinutes,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfFifteenMinutes,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toStartOfInterval,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toTime,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeYearNum,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeQuarterNum,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeMonthNum,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeWeekNum,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeDayNum,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeHourNum,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeMinuteNum,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toRelativeSecondNum,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toISOYear,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toISOWeek,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toWeek,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toYearWeek,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_now,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_today,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_yesterday,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_timeSlot,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toYYYYMM,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toYYYYMMDD,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_toYYYYMMDDhhmmss,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addYears,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addMonths,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addWeeks,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addDays,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addHours,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addMinutes,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addSeconds,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_addQuarters,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractYears,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractMonths,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractWeeks,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractDays,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractHours,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractMinutes,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractSeconds,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_subtractQuarters,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_dateDiff,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_timeSlots,
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_formatDateTime,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toInt_8_16_32_64_128_256_,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toUInt_8_16_32_64_256_,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toFloat_32_64_,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toDate,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toDateTime,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toDateTime64,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toDateTime64_FromString_MissingTime,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toDecimal_32_64_128_256_,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toString,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_CAST_x_T_,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toUnixTimestamp64Milli,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toUnixTimestamp64Micro,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_toUnixTimestamp64Nano,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_fromUnixTimestamp64Milli,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_fromUnixTimestamp64Micro,
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_fromUnixTimestamp64Nano,
),
content='''
# SRS-010 ClickHouse DateTime64 Extended Range
# Software Requirements Specification
## Table of Contents
* 1 [Revision History](#revision-history)
* 2 [Introduction](#introduction)
* 3 [Terminology](#terminology)
* 3.1 [SRS](#srs)
* 3.2 [Normal Date Range](#normal-date-range)
* 3.3 [Extended Date Range](#extended-date-range)
* 4 [Requirements](#requirements)
* 4.1 [Generic](#generic)
* 4.1.0.1 [RQ.SRS-010.DateTime64.ExtendedRange](#rqsrs-010datetime64extendedrange)
* 4.1.0.2 [RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.Start](#rqsrs-010datetime64extendedrangenormalrangestart)
* 4.1.0.3 [RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.Start.BeforeEpochForTimeZone](#rqsrs-010datetime64extendedrangenormalrangestartbeforeepochfortimezone)
* 4.1.0.4 [RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.End](#rqsrs-010datetime64extendedrangenormalrangeend)
* 4.1.0.5 [RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.End.AfterEpochForTimeZone](#rqsrs-010datetime64extendedrangenormalrangeendafterepochfortimezone)
* 4.1.0.6 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions](#rqsrs-010datetime64extendedrangetypeconversionfunctions)
* 4.1.0.7 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions](#rqsrs-010datetime64extendedrangedatesandtimesfunctions)
* 4.1.0.8 [RQ.SRS-010.DateTime64.ExtendedRange.TimeZones](#rqsrs-010datetime64extendedrangetimezones)
* 4.1.0.9 [RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime](#rqsrs-010datetime64extendedrangenonexistenttime)
* 4.1.0.10 [RQ.SRS-010.DateTime64.ExtendedRange.Comparison](#rqsrs-010datetime64extendedrangecomparison)
* 4.1.0.11 [RQ.SRS-010.DateTime64.ExtendedRange.SpecificTimestamps](#rqsrs-010datetime64extendedrangespecifictimestamps)
* 4.2 [Specific](#specific)
* 4.2.0.1 [RQ.SRS-010.DateTime64.ExtendedRange.Start](#rqsrs-010datetime64extendedrangestart)
* 4.2.0.2 [RQ.SRS-010.DateTime64.ExtendedRange.End](#rqsrs-010datetime64extendedrangeend)
* 4.2.0.3 [Non-Existent Time](#non-existent-time)
* 4.2.0.3.1 [RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.InvalidDate](#rqsrs-010datetime64extendedrangenonexistenttimeinvaliddate)
* 4.2.0.3.2 [RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.InvalidTime](#rqsrs-010datetime64extendedrangenonexistenttimeinvalidtime)
* 4.2.0.3.3 [RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.TimeZoneSwitch](#rqsrs-010datetime64extendedrangenonexistenttimetimezoneswitch)
* 4.2.0.3.4 [RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.DaylightSavingTime](#rqsrs-010datetime64extendedrangenonexistenttimedaylightsavingtime)
* 4.2.0.3.5 [RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.DaylightSavingTime.Disappeared](#rqsrs-010datetime64extendedrangenonexistenttimedaylightsavingtimedisappeared)
* 4.2.0.3.6 [RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.LeapSeconds](#rqsrs-010datetime64extendedrangenonexistenttimeleapseconds)
* 4.2.0.4 [Dates And Times Functions](#dates-and-times-functions)
* 4.2.0.4.1 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toTimeZone](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstotimezone)
* 4.2.0.4.2 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYear](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstoyear)
* 4.2.0.4.3 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toQuarter](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstoquarter)
* 4.2.0.4.4 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMonth](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstomonth)
* 4.2.0.4.5 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfYear](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstodayofyear)
* 4.2.0.4.6 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfMonth](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstodayofmonth)
* 4.2.0.4.7 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfWeek](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstodayofweek)
* 4.2.0.4.8 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toHour](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstohour)
* 4.2.0.4.9 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMinute](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstominute)
* 4.2.0.4.10 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toSecond](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstosecond)
* 4.2.0.4.11 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toUnixTimestamp](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstounixtimestamp)
* 4.2.0.4.12 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfYear](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartofyear)
* 4.2.0.4.13 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfISOYear](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartofisoyear)
* 4.2.0.4.14 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfQuarter](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartofquarter)
* 4.2.0.4.15 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfMonth](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartofmonth)
* 4.2.0.4.16 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMonday](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstomonday)
* 4.2.0.4.17 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfWeek](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartofweek)
* 4.2.0.4.18 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfDay](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartofday)
* 4.2.0.4.19 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfHour](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartofhour)
* 4.2.0.4.20 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfMinute](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartofminute)
* 4.2.0.4.21 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfSecond](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartofsecond)
* 4.2.0.4.22 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfFiveMinute](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartoffiveminute)
* 4.2.0.4.23 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfTenMinutes](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartoftenminutes)
* 4.2.0.4.24 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfFifteenMinutes](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartoffifteenminutes)
* 4.2.0.4.25 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfInterval](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstostartofinterval)
* 4.2.0.4.26 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toTime](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstotime)
* 4.2.0.4.27 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeYearNum](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstorelativeyearnum)
* 4.2.0.4.28 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeQuarterNum](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstorelativequarternum)
* 4.2.0.4.29 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeMonthNum](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstorelativemonthnum)
* 4.2.0.4.30 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeWeekNum](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstorelativeweeknum)
* 4.2.0.4.31 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeDayNum](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstorelativedaynum)
* 4.2.0.4.32 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeHourNum](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstorelativehournum)
* 4.2.0.4.33 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeMinuteNum](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstorelativeminutenum)
* 4.2.0.4.34 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeSecondNum](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstorelativesecondnum)
* 4.2.0.4.35 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toISOYear](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstoisoyear)
* 4.2.0.4.36 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toISOWeek](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstoisoweek)
* 4.2.0.4.37 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toWeek](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstoweek)
* 4.2.0.4.38 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYearWeek](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstoyearweek)
* 4.2.0.4.39 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.now](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsnow)
* 4.2.0.4.40 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.today](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstoday)
* 4.2.0.4.41 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.yesterday](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsyesterday)
* 4.2.0.4.42 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.timeSlot](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstimeslot)
* 4.2.0.4.43 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMM](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstoyyyymm)
* 4.2.0.4.44 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMMDD](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstoyyyymmdd)
* 4.2.0.4.45 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMMDDhhmmss](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstoyyyymmddhhmmss)
* 4.2.0.4.46 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addYears](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsaddyears)
* 4.2.0.4.47 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addMonths](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsaddmonths)
* 4.2.0.4.48 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addWeeks](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsaddweeks)
* 4.2.0.4.49 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addDays](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsadddays)
* 4.2.0.4.50 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addHours](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsaddhours)
* 4.2.0.4.51 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addMinutes](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsaddminutes)
* 4.2.0.4.52 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addSeconds](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsaddseconds)
* 4.2.0.4.53 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addQuarters](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsaddquarters)
* 4.2.0.4.54 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractYears](#rqsrs-010datetime64extendedrangedatesandtimesfunctionssubtractyears)
* 4.2.0.4.55 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractMonths](#rqsrs-010datetime64extendedrangedatesandtimesfunctionssubtractmonths)
* 4.2.0.4.56 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractWeeks](#rqsrs-010datetime64extendedrangedatesandtimesfunctionssubtractweeks)
* 4.2.0.4.57 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractDays](#rqsrs-010datetime64extendedrangedatesandtimesfunctionssubtractdays)
* 4.2.0.4.58 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractHours](#rqsrs-010datetime64extendedrangedatesandtimesfunctionssubtracthours)
* 4.2.0.4.59 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractMinutes](#rqsrs-010datetime64extendedrangedatesandtimesfunctionssubtractminutes)
* 4.2.0.4.60 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractSeconds](#rqsrs-010datetime64extendedrangedatesandtimesfunctionssubtractseconds)
* 4.2.0.4.61 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractQuarters](#rqsrs-010datetime64extendedrangedatesandtimesfunctionssubtractquarters)
* 4.2.0.4.62 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.dateDiff](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsdatediff)
* 4.2.0.4.63 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.timeSlots](#rqsrs-010datetime64extendedrangedatesandtimesfunctionstimeslots)
* 4.2.0.4.64 [RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.formatDateTime](#rqsrs-010datetime64extendedrangedatesandtimesfunctionsformatdatetime)
* 4.2.1 [Type Conversion Functions](#type-conversion-functions)
* 4.2.1.4.1 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toInt(8|16|32|64|128|256)](#rqsrs-010datetime64extendedrangetypeconversionfunctionstoint8163264128256)
* 4.2.1.4.2 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUInt(8|16|32|64|256)](#rqsrs-010datetime64extendedrangetypeconversionfunctionstouint8163264256)
* 4.2.1.4.3 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toFloat(32|64)](#rqsrs-010datetime64extendedrangetypeconversionfunctionstofloat3264)
* 4.2.1.4.4 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDate](#rqsrs-010datetime64extendedrangetypeconversionfunctionstodate)
* 4.2.1.4.5 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime](#rqsrs-010datetime64extendedrangetypeconversionfunctionstodatetime)
* 4.2.1.4.6 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime64](#rqsrs-010datetime64extendedrangetypeconversionfunctionstodatetime64)
* 4.2.1.4.7 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime64.FromString.MissingTime](#rqsrs-010datetime64extendedrangetypeconversionfunctionstodatetime64fromstringmissingtime)
* 4.2.1.4.8 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDecimal(32|64|128|256)](#rqsrs-010datetime64extendedrangetypeconversionfunctionstodecimal3264128256)
* 4.2.1.4.9 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toString](#rqsrs-010datetime64extendedrangetypeconversionfunctionstostring)
* 4.2.1.4.10 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.CAST(x,T)](#rqsrs-010datetime64extendedrangetypeconversionfunctionscastxt)
* 4.2.1.4.11 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Milli](#rqsrs-010datetime64extendedrangetypeconversionfunctionstounixtimestamp64milli)
* 4.2.1.4.12 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Micro](#rqsrs-010datetime64extendedrangetypeconversionfunctionstounixtimestamp64micro)
* 4.2.1.4.13 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Nano](#rqsrs-010datetime64extendedrangetypeconversionfunctionstounixtimestamp64nano)
* 4.2.1.4.14 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Milli](#rqsrs-010datetime64extendedrangetypeconversionfunctionsfromunixtimestamp64milli)
* 4.2.1.4.15 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Micro](#rqsrs-010datetime64extendedrangetypeconversionfunctionsfromunixtimestamp64micro)
* 4.2.1.4.16 [RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Nano](#rqsrs-010datetime64extendedrangetypeconversionfunctionsfromunixtimestamp64nano)
* 5 [References](#references)
## Revision History
This document is stored in an electronic form using [Git] source control management software
hosted in a [GitHub Repository].
All the updates are tracked using the [Revision History].
## Introduction
This document will cover requirements to support extended range for the [DateTime64] data type
that is outside the normal **1970** (1970-01-02 00:00:00 UTC) to **2105** (2105-12-31 23:59:59.99999 UTC) date range.
## Terminology
### SRS
Software Requirements Specification
### Normal Date Range
**1970** `1970-01-02T00:00:00.000000` to **2105** `2105-12-31T23:59:59.99999`
### Extended Date Range
**1925** `1925-01-01T00:00:00.000000` to **2238** `2238-12-31 23:59:59.99999`
## Requirements
### Generic
##### RQ.SRS-010.DateTime64.ExtendedRange
version: 1.0
[ClickHouse] SHALL support extended range for the [DateTime64] data type that includes dates from the year **1925** to **2238**.
##### RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.Start
version: 1.0
[ClickHouse] SHALL support proper time handling around the normal date range that starts at `1970-01-01 00:00:00.000`
expressed using the [ISO 8601 format].
##### RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.Start.BeforeEpochForTimeZone
version: 1.0
[ClickHouse] SHALL support proper time handling around the start of the [normal date range]
when this time for the time zone is before the start of the [normal date range].
##### RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.End
version: 1.0
[ClickHouse] SHALL support proper time handling around the normal date range that ends at `2105-12-31T23:59:59.99999`
expressed using the [ISO 8601 format].
##### RQ.SRS-010.DateTime64.ExtendedRange.NormalRange.End.AfterEpochForTimeZone
version: 1.0
[ClickHouse] SHALL support proper time handling around the end of the [normal date range]
when this time for the time zone is after the end of the [normal date range].
##### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions
version: 1.0
[ClickHouse] SHALL support proper conversion to and from [DateTime64] data type from other data types.
##### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions
version: 1.0
[ClickHouse] SHALL support correct operation of the [Dates and Times Functions] with the [DateTime64] data type
when it stores dates within the [normal date range] and the [extended date range].
##### RQ.SRS-010.DateTime64.ExtendedRange.TimeZones
version: 1.0
[ClickHouse] SHALL support correct operation with the [DateTime64] extended range data type
when combined with a supported time zone.
##### RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime
version: 1.0
[ClickHouse] SHALL support proper handling of non-existent times when using [DateTime64] extended range data type.
##### RQ.SRS-010.DateTime64.ExtendedRange.Comparison
version: 1.0
[ClickHouse] SHALL support proper handling of time comparison when using [DateTime64] extended range data type.
For example, `SELECT toDateTime64('2019-05-05 20:20:12.050', 3) < now()`.
##### RQ.SRS-010.DateTime64.ExtendedRange.SpecificTimestamps
version: 1.0
[ClickHouse] SHALL properly work with the following timestamps in all supported timezones:
```
[9961200,73476000,325666800,354675600,370400400,386125200,388566010,401850000,417574811,496803600,528253200,624423614,636516015,671011200,717555600,752047218,859683600,922582800,1018173600,1035705600,1143334800,1162105223,1174784400,1194156000,1206838823,1224982823,1236495624,1319936400,1319936424,1425798025,1459040400,1509872400,2090451627,2140668000]
```
### Specific
##### RQ.SRS-010.DateTime64.ExtendedRange.Start
version: 1.0
[ClickHouse] SHALL support extended range for the [DateTime64] data type that starts at `1925-01-01T00:00:00.000000`
expressed using the [ISO 8601 format].
##### RQ.SRS-010.DateTime64.ExtendedRange.End
version: 1.0
[ClickHouse] SHALL support extended range for the [DateTime64] data type that ends at `2238-12-31T23:59:59.999999`
expressed using the [ISO 8601 format].
##### Non-Existent Time
###### RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.InvalidDate
version: 1.0
[ClickHouse] SHALL support proper handling of invalid dates when using [DateTime64] extended range data type,
such as:
* `YYYY-04-31, YYYY-06-31, YYYY-09-31, YYYY-11-31`
* `1990-02-30 00:00:02`
###### RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.InvalidTime
version: 1.0
[ClickHouse] SHALL support proper handling of invalid time for a timezone
when using [DateTime64] extended range data type, for example,
* `2002-04-07 02:30:00` never happened at all in the US/Eastern timezone ([Stuart Bishop: pytz library](http://pytz.sourceforge.net/#problems-with-localtime))
###### RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.TimeZoneSwitch
version: 1.0
[ClickHouse] SHALL support proper handling of invalid time when using [DateTime64] extended range data type
when the invalid time is caused when *countries switch timezone definitions with no
daylight savings time switch* [Stuart Bishop: pytz library](http://pytz.sourceforge.net/#problems-with-localtime).
>
> For example, in 1915 Warsaw switched from Warsaw time to Central European time with
> no daylight savings transition. So at the stroke of midnight on August 5th 1915 the clocks
> were wound back 24 minutes creating an ambiguous time period that cannot be specified without
> referring to the timezone abbreviation or the actual UTC offset. In this case midnight happened twice,
> neither time during a daylight saving time period. pytz handles this transition by treating the ambiguous
> period before the switch as daylight savings time, and the ambiguous period after as standard time.
>
> [Stuart Bishop: pytz library](http://pytz.sourceforge.net/#problems-with-localtime)
###### RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.DaylightSavingTime
version: 1.0
[ClickHouse] SHALL support proper handling of invalid time when using [DateTime64] extended range data type
when for a given timezone time switches from standard to daylight saving.
> For example, in the US/Eastern timezone on the last Sunday morning in October, the following sequence happens:
>
> 01:00 EDT occurs
> 1 hour later, instead of 2:00am the clock is turned back 1 hour and 01:00 happens again (this time 01:00 EST)
> In fact, every instant between 01:00 and 02:00 occurs twice.
> [Stuart Bishop: pytz library](http://pytz.sourceforge.net/#problems-with-localtime)
###### RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.DaylightSavingTime.Disappeared
version: 1.0
[ClickHouse] SHALL support proper handling of invalid time when using [DateTime64] extended range data type
for a given timezone when transition from the standard to daylight saving time causes an hour to disappear.
Expected behavior: if DateTime64 initialized by a skipped time value, it is being treated as DST and resulting value will be an hour earlier, e.g. `SELECT toDateTime64('2020-03-08 02:34:00', 0, 'America/Denver')` returns `2020-03-08 01:34:00`.
###### RQ.SRS-010.DateTime64.ExtendedRange.NonExistentTime.LeapSeconds
version: 1.0
[ClickHouse] SHALL support proper handling of leap seconds adjustments when using [DateTime64] extended range data type.
##### Dates And Times Functions
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toTimeZone
version: 1.0
[ClickHouse] SHALL support correct operation of the [toTimeZone](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#totimezone)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYear
version: 1.0
[ClickHouse] SHALL support correct operation of the [toYear](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toyear)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toQuarter
version: 1.0
[ClickHouse] SHALL support correct operation of the [toQuarter](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toquarter)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMonth
version: 1.0
[ClickHouse] SHALL support correct operation of the [toMonth](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tomonth)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfYear
version: 1.0
[ClickHouse] SHALL support correct operation of the [toDayOfYear](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#todayofyear)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfMonth
version: 1.0
[ClickHouse] SHALL support correct operation of the [toDayOfMonth](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#todayofmonth)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toDayOfWeek
version: 1.0
[ClickHouse] SHALL support correct operation of the [toDayOfWeek](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#todayofweek)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toHour
version: 1.0
[ClickHouse] SHALL support correct operation of the [toHour](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tohour)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMinute
version: 1.0
[ClickHouse] SHALL support correct operation of the [toMinute](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tominute)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toSecond
version: 1.0
[ClickHouse] SHALL support correct operation of the [toSecond](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tosecond)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toUnixTimestamp
version: 1.0
[ClickHouse] SHALL support correct operation of the [toUnitTimestamp](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#to-unix-timestamp)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
Timestamp value expected to be negative when DateTime64 value is prior to `1970-01-01` and positine otherwise.
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfYear
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfYear](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofyear)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfISOYear
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfISOYear](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofisoyear)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfQuarter
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfQuarter](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofquarter)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfMonth
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfMonth](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofmonth)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toMonday
version: 1.0
[ClickHouse] SHALL support correct operation of the [toMonday](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tomonday)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfWeek
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfWeek](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofweektmode)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfDay
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfDay](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofday)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfHour
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfHour](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofhour)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfMinute
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfMinute](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofminute)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfSecond
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfSecond](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofsecond)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfFiveMinute
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfFiveMinute](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartoffiveminute)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfTenMinutes
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfTenMinutes](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartoftenminutes)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfFifteenMinutes
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfFifteenMinutes](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartoffifteenminutes)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toStartOfInterval
version: 1.0
[ClickHouse] SHALL support correct operation of the [toStartOfInterval](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#tostartofintervaltime-or-data-interval-x-unit-time-zone)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
More detailed description can be found [here](https://github.com/ClickHouse/ClickHouse/issues/1201).
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toTime
version: 1.0
[ClickHouse] SHALL support correct operation of the [toTime](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#totime)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeYearNum
version: 1.0
[ClickHouse] SHALL support correct operation of the [toRelativeYearNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativeyearnum)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeQuarterNum
version: 1.0
[ClickHouse] SHALL support correct operation of the [toRelativeQuarterNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativequarternum)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeMonthNum
version: 1.0
[ClickHouse] SHALL support correct operation of the [toRelativeMonthNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativemonthnum)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeWeekNum
version: 1.0
[ClickHouse] SHALL support correct operation of the [toRelativeWeekNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativeweeknum)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeDayNum
version: 1.0
[ClickHouse] SHALL support correct operation of the [toRelativeDayNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativedaynum)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeHourNum
version: 1.0
[ClickHouse] SHALL support correct operation of the [toRelativeHourNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativehournum)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeMinuteNum
version: 1.0
[ClickHouse] SHALL support correct operation of the [toRelativeMinuteNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativeminutenum)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toRelativeSecondNum
version: 1.0
[ClickHouse] SHALL support correct operation of the [toRelativeSecondNum](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#torelativesecondnum)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toISOYear
version: 1.0
[ClickHouse] SHALL support correct operation of the [toISOYear](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toisoyear)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toISOWeek
version: 1.0
[ClickHouse] SHALL support correct operation of the [toISOWeek](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toisoweek)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toWeek
version: 1.0
[ClickHouse] SHALL support correct operation of the [toWeek](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toweekdatemode)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYearWeek
version: 1.0
[ClickHouse] SHALL support correct operation of the [toYearWeek](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toyearweekdatemode)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.now
version: 1.0
[ClickHouse] SHALL support conversion of output from the [now](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#now)
function to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.today
version: 1.0
[ClickHouse] SHALL support conversion of output from the [today](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#today)
function to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.yesterday
version: 1.0
[ClickHouse] SHALL support conversion of output from the [yesterday](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#yesterday)
function to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.timeSlot
version: 1.0
[ClickHouse] SHALL support conversion of output from the [timeSlot](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#timeslot)
function to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMM
version: 1.0
[ClickHouse] SHALL support correct operation of the [toYYYYMM](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toyyyymm)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMMDD
version: 1.0
[ClickHouse] SHALL support correct operation of the [toYYYYMMDD](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toyyyymmdd)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.toYYYYMMDDhhmmss
version: 1.0
[ClickHouse] SHALL support correct operation of the [toYYYYMMDDhhmmss](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#toyyyymmddhhmmss)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addYears
version: 1.0
[ClickHouse] SHALL support correct operation of the [addYears](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addMonths
version: 1.0
[ClickHouse] SHALL support correct operation of the [addMonths](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addWeeks
version: 1.0
[ClickHouse] SHALL support correct operation of the [addWeeks](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addDays
version: 1.0
[ClickHouse] SHALL support correct operation of the [addDays](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addHours
version: 1.0
[ClickHouse] SHALL support correct operation of the [addHours](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addMinutes
version: 1.0
[ClickHouse] SHALL support correct operation of the [addMinutes](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addSeconds
version: 1.0
[ClickHouse] SHALL support correct operation of the [addSeconds](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.addQuarters
version: 1.0
[ClickHouse] SHALL support correct operation of the [addQuarters](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#addyears-addmonths-addweeks-adddays-addhours-addminutes-addseconds-addquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractYears
version: 1.0
[ClickHouse] SHALL support correct operation of the [subtractYears](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractMonths
version: 1.0
[ClickHouse] SHALL support correct operation of the [subtractMonths](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractWeeks
version: 1.0
[ClickHouse] SHALL support correct operation of the [subtractWeeks](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractDays
version: 1.0
[ClickHouse] SHALL support correct operation of the [subtractDays](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractHours
version: 1.0
[ClickHouse] SHALL support correct operation of the [subtractHours](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractMinutes
version: 1.0
[ClickHouse] SHALL support correct operation of the [subtractMinutes](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractSeconds
version: 1.0
[ClickHouse] SHALL support correct operation of the [subtractSeconds](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.subtractQuarters
version: 1.0
[ClickHouse] SHALL support correct operation of the [subtractQuarters](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#subtractyears-subtractmonths-subtractweeks-subtractdays-subtracthours-subtractminutes-subtractseconds-subtractquarters)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.dateDiff
version: 1.0
[ClickHouse] SHALL support correct operation of the [dateDiff](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#datediff)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.timeSlots
version: 1.0
[ClickHouse] SHALL support correct operation of the [timeSlots](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#timeslotsstarttime-duration-size)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.DatesAndTimesFunctions.formatDateTime
version: 1.0
[ClickHouse] SHALL support correct operation of the [formatDateTime](https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/#formatdatetime)
function used with the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
#### Type Conversion Functions
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toInt(8|16|32|64|128|256)
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
to integer types using [toInt(8|16|32|64|128|256)](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#toint8163264128256) functions.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUInt(8|16|32|64|256)
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
to unsigned integer types using [toUInt(8|16|32|64|256)](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#touint8163264256) functions.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toFloat(32|64)
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
to float types using [toFloat(32|64)](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#tofloat3264) functions.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDate
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range]
to the [Date](https://clickhouse.tech/docs/en/sql-reference/data-types/date/) type using the [toDate](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#todate) function.
This function is ONLY supposed to work in NORMAL RANGE.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
to the [DateTime](https://clickhouse.tech/docs/en/sql-reference/data-types/datetime/) type using the [toDateTime](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#todatetime) function.
This function is ONLY supposed to work in NORMAL RANGE.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime64
version: 1.0
[ClickHouse] SHALL support correct conversion from the data types supported by the [toDateTime64](https://clickhouse.tech/docs/en/sql-reference/data-types/datetime64/) function
to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range].
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDateTime64.FromString.MissingTime
version: 1.0
[ClickHouse] SHALL support correct conversion from the [String](https://clickhouse.tech/docs/en/sql-reference/data-types/string/)
data type to the [DateTime64](https://clickhouse.tech/docs/en/sql-reference/data-types/datetime64/) data type
when value of the string is missing the `hh:mm-ss.sss` part.
For example, `toDateTime64('2020-01-01', 3)`.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toDecimal(32|64|128|256)
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
to [Decimal](https://clickhouse.tech/docs/en/sql-reference/data-types/decimal/) types using [toDecimal(32|64|128|256)](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#todecimal3264128256) functions.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toString
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
to the [String](https://clickhouse.tech/docs/en/sql-reference/data-types/string/) type using the [toString](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#tostring) function.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.CAST(x,T)
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
to one of the supported data type using the [CAST(x,T)](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#type_conversion_function-cast) function.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Milli
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
to the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type using the [toUnixTimestamp64Milli](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#tounixtimestamp64milli) function.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Micro
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
to the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type using the [toUnixTimestamp64Micro](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#tounixtimestamp64micro) function.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.toUnixTimestamp64Nano
version: 1.0
[ClickHouse] SHALL support correct conversion of the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
to the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type using the [toUnixTimestamp64Nano](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#tounixtimestamp64nano) function.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Milli
version: 1.0
[ClickHouse] SHALL support correct conversion from the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type
to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
using the [fromUnixTimestamp64Milli](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#fromunixtimestamp64milli) function.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Micro
version: 1.0
[ClickHouse] SHALL support correct conversion from the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type
to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
using the [fromUnixTimestamp64Micro](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#fromunixtimestamp64micro) function.
###### RQ.SRS-010.DateTime64.ExtendedRange.TypeConversionFunctions.fromUnixTimestamp64Nano
version: 1.0
[ClickHouse] SHALL support correct conversion from the [Int64](https://clickhouse.tech/docs/en/sql-reference/data-types/int-uint/) type
to the [DateTime64] data type when it stores dates within the [normal date range] and the [extended date range]
using the [fromUnixTimestamp64Nano](https://clickhouse.tech/docs/en/sql-reference/functions/type-conversion-functions/#fromunixtimestamp64nano) function.
## References
* **DateTime64**: https://clickhouse.tech/docs/en/sql-reference/data-types/datetime64/
* **ISO 8601 format**: https://en.wikipedia.org/wiki/ISO_8601
* **ClickHouse:** https://clickhouse.tech
* **GitHub Repository:** https://github.com/ClickHouse/ClickHouse/blob/master/tests/testflows/datetime64_extended_range/requirements/requirements.md
* **Revision History:** https://github.com/ClickHouse/ClickHouse/commits/master/tests/testflows/datetime64_extended_range/requirements/requirements.md
* **Git:** https://git-scm.com/
[SRS]: #srs
[normal date range]: #normal-date-range
[extended date range]: #extended-date-range
[Dates and Times Functions]: https://clickhouse.tech/docs/en/sql-reference/functions/date-time-functions/
[DateTime64]: https://clickhouse.tech/docs/en/sql-reference/data-types/datetime64/
[ISO 8601 format]: https://en.wikipedia.org/wiki/ISO_8601
[ClickHouse]: https://clickhouse.tech
[GitHub Repository]: https://github.com/ClickHouse/ClickHouse/blob/master/tests/testflows/datetime64_extended_range/requirements/requirements.md
[Revision History]: https://github.com/ClickHouse/ClickHouse/commits/master/tests/testflows/datetime64_extended_range/requirements/requirements.md
[Git]: https://git-scm.com/
[GitHub]: https://github.com
''')
| 55.426752
| 366
| 0.755344
| 18,245
| 147,934
| 6.05607
| 0.03464
| 0.032418
| 0.043007
| 0.096766
| 0.92748
| 0.926231
| 0.918004
| 0.847176
| 0.776945
| 0.741024
| 0
| 0.062948
| 0.130808
| 147,934
| 2,668
| 367
| 55.447526
| 0.796365
| 0.001453
| 0
| 0.554222
| 1
| 0.210459
| 0.718962
| 0.242964
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.000857
| 0
| 0.000857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b48f30405ca98473352a06f11c02c20bb12c0e5a
| 1,869
|
py
|
Python
|
Full_Project/main.py
|
RGGH/API-1
|
210f5f1349693f64e1301812492fed4ff5ddd036
|
[
"MIT"
] | null | null | null |
Full_Project/main.py
|
RGGH/API-1
|
210f5f1349693f64e1301812492fed4ff5ddd036
|
[
"MIT"
] | null | null | null |
Full_Project/main.py
|
RGGH/API-1
|
210f5f1349693f64e1301812492fed4ff5ddd036
|
[
"MIT"
] | null | null | null |
import uvicorn
from fastapi import FastAPI
from pydantic import BaseModel
from sklearn.neural_network import MLPRegressor
import numpy as np
app = FastAPI()
output = {}
## questionaire data from csv
data = np.loadtxt(open("questions.csv", "rb"), delimiter=",", skiprows=1)
## one liner
# fit first 4 v 5th(last)
# study per week, years, books, projects, ear, rating
neural_net = MLPRegressor(max_iter=10000).fit(data[:, :-1], data[:, -1])
class request_body(BaseModel):
weeks: int
years: int
books: int
projects: int
earn: int
@app.post("/vars")
def vars(data: request_body)->dict:
## result
rating = neural_net.predict([[data.weeks,data.years,data.books,data.projects,data.earn]])
output['rating']= str(rating)
return(output)
if __name__ == "__main__":
uvicorn.run("app.api:app", host="0.0.0.0", port=8000, reload=True)
ken_worthington_360@cloudshell:~/neuro (nltk5-334820)$
ken_worthington_360@cloudshell:~/neuro (nltk5-334820)$ cat main.py
import uvicorn
from fastapi import FastAPI
from pydantic import BaseModel
from sklearn.neural_network import MLPRegressor
import numpy as np
app = FastAPI()
output = {}
## questionaire data from csv
data = np.loadtxt(open("questions.csv", "rb"), delimiter=",", skiprows=1)
## one liner
# fit first 4 v 5th(last)
# study per week, years, books, projects, ear, rating
neural_net = MLPRegressor(max_iter=10000).fit(data[:, :-1], data[:, -1])
class request_body(BaseModel):
weeks: int
years: int
books: int
projects: int
earn: int
@app.post("/vars")
def vars(data: request_body)->dict:
## result
rating = neural_net.predict([[data.weeks,data.years,data.books,data.projects,data.earn]])
output['rating']= str(rating)
return(output)
if __name__ == "__main__":
uvicorn.run("app.api:app", host="0.0.0.0", port=8000, reload=True)
| 22.792683
| 93
| 0.695024
| 267
| 1,869
| 4.745318
| 0.29588
| 0.009471
| 0.047356
| 0.037885
| 0.992897
| 0.992897
| 0.992897
| 0.92502
| 0.92502
| 0.92502
| 0
| 0.035533
| 0.156768
| 1,869
| 81
| 94
| 23.074074
| 0.768401
| 0.127876
| 0
| 0.956522
| 0
| 0
| 0.065716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.217391
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b49b9cc5aaf652fe6d471a6a37ba756d3e7e38b9
| 160
|
py
|
Python
|
changelogs/custom/pypi/django_haystack.py
|
cclauss/changelogs
|
734763fa320d5fbf71016f74f188c5a51e60c45f
|
[
"MIT"
] | 54
|
2017-01-12T09:44:49.000Z
|
2022-02-01T18:15:07.000Z
|
changelogs/custom/pypi/django_haystack.py
|
cclauss/changelogs
|
734763fa320d5fbf71016f74f188c5a51e60c45f
|
[
"MIT"
] | 254
|
2016-12-23T12:53:52.000Z
|
2021-11-23T14:59:01.000Z
|
changelogs/custom/pypi/django_haystack.py
|
cclauss/changelogs
|
734763fa320d5fbf71016f74f188c5a51e60c45f
|
[
"MIT"
] | 26
|
2017-02-25T08:21:05.000Z
|
2022-01-10T15:46:24.000Z
|
URL = 'https://raw.githubusercontent.com/django-haystack/django-haystack/master/docs/changelog.rst'
def get_urls(releases, **kwargs):
return {URL}, set()
| 26.666667
| 99
| 0.7375
| 21
| 160
| 5.571429
| 0.857143
| 0.239316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 160
| 5
| 100
| 32
| 0.806897
| 0
| 0
| 0
| 0
| 0.333333
| 0.56875
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
b4fb728bb7894204f4f705c32ea3ead327f413ba
| 6,908
|
py
|
Python
|
model_tf.py
|
sontung/vacuum_world
|
17684f120e5c6935168c52001dfc8b1256b42375
|
[
"MIT"
] | null | null | null |
model_tf.py
|
sontung/vacuum_world
|
17684f120e5c6935168c52001dfc8b1256b42375
|
[
"MIT"
] | null | null | null |
model_tf.py
|
sontung/vacuum_world
|
17684f120e5c6935168c52001dfc8b1256b42375
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
slim = tf.contrib.slim
class DoubleQnetwork:
def __init__(self, h_size=512, nb_actions=5):
# The network receives a frame from the game, flattened into an array.
# It then resizes it and processes it through four convolutional layers.
self.scalarInput = tf.placeholder(shape=[None, 21168], dtype=tf.float32)
self.imageIn = tf.reshape(self.scalarInput,shape=[-1, 84, 84, 3])
self.conv1 = slim.conv2d(inputs=self.imageIn, num_outputs=32, kernel_size=[8, 8],
stride=[4, 4], padding='VALID', biases_initializer=None)
self.conv2 = slim.conv2d(inputs=self.conv1, num_outputs=64, kernel_size=[4, 4],
stride=[2, 2], padding='VALID', biases_initializer=None)
self.conv3 = slim.conv2d(inputs=self.conv2, num_outputs=64, kernel_size=[3, 3],
stride=[1, 1], padding='VALID', biases_initializer=None)
self.conv4 = slim.conv2d(inputs=self.conv3, num_outputs=h_size, kernel_size=[7, 7],
stride=[1, 1], padding='VALID', biases_initializer=None)
# We take the output from the final convolutional layer and split it into separate advantage and value streams.
self.streamAC, self.streamVC = tf.split(self.conv4, 2, 3)
self.streamA = slim.flatten(self.streamAC)
self.streamV = slim.flatten(self.streamVC)
xavier_init = tf.contrib.layers.xavier_initializer()
self.AW = tf.Variable(xavier_init([h_size // 2, nb_actions]))
self.VW = tf.Variable(xavier_init([h_size // 2, 1]))
self.Advantage = tf.matmul(self.streamA, self.AW)
self.Value = tf.matmul(self.streamV, self.VW)
# Then combine them together to get our final Q-values.
self.Q_out = self.Value + tf.subtract(self.Advantage, tf.reduce_mean(self.Advantage, axis=1, keep_dims=True))
self.predict = tf.argmax(self.Q_out, 1)
# Below we obtain the loss by taking the sum of squares difference between the target and prediction Q values.
self.targetQ = tf.placeholder(shape=[None], dtype=tf.float32)
self.actions = tf.placeholder(shape=[None], dtype=tf.int32)
self.actions_onehot = tf.one_hot(self.actions, nb_actions, dtype=tf.float32)
self.Q = tf.reduce_sum(tf.multiply(self.Q_out, self.actions_onehot), axis=1)
self.td_error = tf.square(self.targetQ - self.Q)
self.loss = tf.reduce_mean(self.td_error)
self.trainer = tf.train.AdamOptimizer(learning_rate=0.0001)
self.updateModel = self.trainer.minimize(self.loss)
class DoubleQnetwork2:
def __init__(self, h_size=512, nb_actions=5):
# The network receives a frame from the game, flattened into an array.
# It then resizes it and processes it through four convolutional layers.
self.scalarInput = tf.placeholder(shape=[None, 21168], dtype=tf.float32)
self.imageIn = tf.reshape(self.scalarInput,shape=[-1, 84, 84, 3])
self.conv1 = slim.conv2d(inputs=self.imageIn, num_outputs=32, kernel_size=[8, 8],
stride=[2, 2], padding='VALID', biases_initializer=None)
self.conv2 = slim.conv2d(inputs=self.conv1, num_outputs=64, kernel_size=[4, 4],
stride=[2, 2], padding='VALID', biases_initializer=None)
self.conv3 = slim.conv2d(inputs=self.conv2, num_outputs=64, kernel_size=[3, 3],
stride=[1, 1], padding='VALID', biases_initializer=None)
self.conv3a = slim.conv2d(inputs=self.conv3, num_outputs=64, kernel_size=[2, 2],
stride=[2, 2], padding='VALID', biases_initializer=None)
self.conv3b = slim.conv2d(inputs=self.conv3a, num_outputs=64, kernel_size=[2, 2],
stride=[1, 1], padding='VALID', biases_initializer=None)
self.conv4 = slim.conv2d(inputs=self.conv3b, num_outputs=h_size, kernel_size=[7, 7],
stride=[1, 1], padding='VALID', biases_initializer=None)
# We take the output from the final convolutional layer and split it into separate advantage and value streams.
self.streamAC, self.streamVC = tf.split(self.conv4, 2, 3)
self.streamA = slim.flatten(self.streamAC)
self.streamV = slim.flatten(self.streamVC)
xavier_init = tf.contrib.layers.xavier_initializer()
self.AW = tf.Variable(xavier_init([h_size // 2, nb_actions]))
self.VW = tf.Variable(xavier_init([h_size // 2, 1]))
self.Advantage = tf.matmul(self.streamA, self.AW)
self.Value = tf.matmul(self.streamV, self.VW)
# Then combine them together to get our final Q-values.
self.Q_out = self.Value + tf.subtract(self.Advantage, tf.reduce_mean(self.Advantage, axis=1, keep_dims=True))
self.predict = tf.argmax(self.Q_out, 1)
# Below we obtain the loss by taking the sum of squares difference between the target and prediction Q values.
self.targetQ = tf.placeholder(shape=[None], dtype=tf.float32)
self.actions = tf.placeholder(shape=[None], dtype=tf.int32)
self.actions_onehot = tf.one_hot(self.actions, nb_actions, dtype=tf.float32)
self.Q = tf.reduce_sum(tf.multiply(self.Q_out, self.actions_onehot), axis=1)
self.td_error = tf.square(self.targetQ - self.Q)
self.loss = tf.reduce_mean(self.td_error)
self.trainer = tf.train.AdamOptimizer(learning_rate=0.0001)
self.updateModel = self.trainer.minimize(self.loss)
class deepQnet:
def __init__(self, lr, nb_actions=5):
self.input = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.float32)
self.target = tf.placeholder(shape=[None, nb_actions], dtype=tf.float32)
self.action = tf.placeholder(shape=[None], dtype=tf.int32)
self.conv1 = tf.layers.conv2d(inputs=self.input, kernel_size=[8, 8], strides=[4, 4],
padding="VALID", filters=32, activation=tf.nn.relu)
self.conv2 = tf.layers.conv2d(inputs=self.conv1, kernel_size=[4, 4], strides=[2, 2],
padding="VALID", filters=64, activation=tf.nn.relu)
self.conv3 = tf.layers.conv2d(inputs=self.conv2, kernel_size=[3, 3], strides=[1, 1],
padding="VALID", filters=32, activation=tf.nn.relu)
self.fc1 = tf.layers.dense(inputs=tf.layers.flatten(self.conv3), units=512, activation=tf.nn.relu)
self.output = tf.layers.dense(inputs=self.fc1, units=nb_actions)
self.q_value = tf.nn.embedding_lookup(self.output, self.action)
self.loss = tf.losses.mean_squared_error(labels=self.target, predictions=self.q_value)
self.optimizer = tf.train.RMSPropOptimizer(learning_rate=lr, decay=0.95, epsilon=0.01).minimize(self.loss)
| 58.542373
| 119
| 0.646642
| 958
| 6,908
| 4.556367
| 0.17119
| 0.032073
| 0.047652
| 0.045819
| 0.85063
| 0.824055
| 0.817869
| 0.810997
| 0.791294
| 0.760825
| 0
| 0.039161
| 0.227417
| 6,908
| 117
| 120
| 59.042735
| 0.778715
| 0.119427
| 0
| 0.719512
| 0
| 0
| 0.010707
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036585
| false
| 0
| 0.012195
| 0
| 0.085366
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
370ec8ef96a7c55b77ddd4eed9860c5668f23b8e
| 200
|
py
|
Python
|
calculator.py
|
bhavnesh2211/Calculator
|
2f08e85b09c9445b94a52640f3b8b9243b327551
|
[
"MIT"
] | 2
|
2019-03-03T06:08:35.000Z
|
2019-03-03T06:11:49.000Z
|
calculator.py
|
bhavnesh2211/Calculator
|
2f08e85b09c9445b94a52640f3b8b9243b327551
|
[
"MIT"
] | null | null | null |
calculator.py
|
bhavnesh2211/Calculator
|
2f08e85b09c9445b94a52640f3b8b9243b327551
|
[
"MIT"
] | null | null | null |
def add(num1,num2,oparation):
return num1 + num2
def sub(num1,num2,oparation):
return num1 - num2
def mul(num1,num2,oparation):
return num1 * num2
def div(num1,num2,oparation):
return num1 / num2
| 25
| 29
| 0.74
| 32
| 200
| 4.625
| 0.28125
| 0.432432
| 0.459459
| 0.621622
| 0.898649
| 0.898649
| 0.689189
| 0
| 0
| 0
| 0
| 0.093023
| 0.14
| 200
| 8
| 30
| 25
| 0.767442
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
2eaa5af3de4e2f0c715c48698ebe75b34ddc7f4a
| 29,110
|
py
|
Python
|
test-common/integrationtest/testcase/test_ns_client_changeleader.py
|
lotabout/OpenMLDB
|
432da3afbed240eb0b8d0571c05f233b1a5a1cd4
|
[
"Apache-2.0"
] | 2,659
|
2021-06-07T12:59:15.000Z
|
2022-03-30T15:29:37.000Z
|
test-common/integrationtest/testcase/test_ns_client_changeleader.py
|
wei20024/OpenMLDB
|
16b426bcba18f70e083179f82db51e71e65d1bf6
|
[
"Apache-2.0"
] | 1,396
|
2021-05-28T09:50:13.000Z
|
2022-03-31T16:37:49.000Z
|
test-common/integrationtest/testcase/test_ns_client_changeleader.py
|
wei20024/OpenMLDB
|
16b426bcba18f70e083179f82db51e71e65d1bf6
|
[
"Apache-2.0"
] | 499
|
2021-05-31T07:36:48.000Z
|
2022-03-31T15:10:12.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2021 4Paradigm
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
from testcasebase import TestCaseBase
import time
import os
from libs.test_loader import load
import libs.utils as utils
from libs.logger import infoLogger
from libs.deco import multi_dimension
import libs.ddt as ddt
import libs.conf as conf
@ddt.ddt
@multi_dimension(False)
class TestChangeLeader(TestCaseBase):
def test_changeleader_master_disconnect(self):
"""
changeleader功能正常,主节点断网后,可以手工故障切换,切换成功后从节点可以同步数据
:return:
"""
self.start_client(self.leader)
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = 'tname{}'.format(time.time())
infoLogger.info(name)
m = utils.gen_table_metadata(
'"{}"'.format(name), None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-2"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
('table_partition', '"{}"'.format(self.slave2), '"0-2"', 'false'),
('column_desc', '"k1"', '"string"', 'true'),
('column_desc', '"k2"', '"string"', 'false'),
('column_desc', '"k3"', '"string"', 'false')
)
utils.gen_table_metadata_file(m, metadata_path)
rs0 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs0)
self.multidimension_vk = {'k1': ('string:index', 'testvalue0'),
'k2': ('string', 'testvalue0'),
'k3': ('string', 'testvalue0')}
self.multidimension_scan_vk = {'k1': 'testvalue0'}
rs1 = self.showtable(self.ns_leader, name)
tid = rs1.keys()[0][1]
rs = self.disconnectzk(self.leader)
self.assertIn('disconnect zk ok', rs)
rs = self.updatetablealive(self.ns_leader, name, '*', self.leader, 'no')
self.assertIn('update ok', rs)
time.sleep(3)
rs = self.changeleader(self.ns_leader, name, 0, 'auto')
self.assertIn('change leader ok', rs)
time.sleep(2)
rs = self.connectzk(self.leader)
self.assertIn('connect zk ok', rs)
for repeat in range(20):
time.sleep(2)
rs = self.ns_showopstatus(self.ns_leader)
tablestatus = self.parse_tb(rs, ' ', [0, 1, 2, 3], [4, 5, 6, 7])
kDone_cnt = 0
cnt = 0
flag = False
for status in tablestatus:
if status[2] == name:
cnt = cnt + 1
infoLogger.info('{} = {}'.format(status, tablestatus[status]))
if tablestatus[status][0] == 'kFailed':
infoLogger.error('{} = {}'.format(status, tablestatus[status]))
flag = True
break
if tablestatus[status][0] == 'kDone':
infoLogger.info('{} = {}'.format(status, tablestatus[status]))
kDone_cnt = kDone_cnt + 1
if flag == True:
break
if kDone_cnt == cnt:
break
rs2 = self.showtable(self.ns_leader)
act1 = rs2[(name, tid, '0', self.slave1)]
act2 = rs2[(name, tid, '0', self.slave2)]
roles = [x[0] for x in [act1, act2]]
for repeat in range(20):
rs = self.showtable(self.ns_leader, name)
act1 = rs2[(name, tid, '0', self.slave1)]
act2 = rs2[(name, tid, '0', self.slave2)]
roles = [x[0] for x in [act1, act2]]
if roles.count('leader') == 1 and roles.count('follower') == 1:
self.assertEqual(roles.count('leader'), 1)
self.assertEqual(roles.count('follower'), 1)
break
time.sleep(2)
self.assertEqual(rs2[(name, tid, '0', self.leader)], ['leader', '144000min', 'no', 'kNoCompress'])
self.assertEqual(rs2[(name, tid, '1', self.leader)], ['leader', '144000min', 'no', 'kNoCompress'])
self.assertEqual(rs2[(name, tid, '2', self.leader)], ['leader', '144000min', 'no', 'kNoCompress'])
if roles.count('leader') != 1:
rs = self.ns_showopstatus(self.ns_leader)
infoLogger.debug(rs)
self.assertEqual(roles.count('leader'), 1)
self.assertEqual(roles.count('leader'), 1)
self.assertEqual(roles.count('follower'), 1)
leader_new = self.slave1 if 'leader' in act1 else self.slave2
follower = self.slave1 if 'follower' in act1 else self.slave2
rs2 = self.put(self.leader, tid, 1, 'testkey0', self.now(), 'testvalue0')
self.assertIn('Put ok', rs2)
rs3 = self.put(self.slave1, tid, 1, 'testkey0', self.now(), 'testvalue0')
self.assertIn('Put failed', rs3)
rs4 = self.put(leader_new, tid, 0, 'testkey0', self.now(), 'testvalue0')
self.assertIn('Put ok', rs4)
time.sleep(1)
self.assertIn('testvalue0', self.scan(follower, tid, 0, 'testkey0', self.now(), 1))
self.ns_drop(self.ns_leader, name)
def test_changeleader_master_killed(self):
"""
changeleader功能正常,主节点挂掉后,可以手工故障切换,切换成功后从节点可以同步数据
原主节点启动后可以手工recoversnapshot成功
:return:
"""
self.start_client(self.leader)
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = 'tname{}'.format(time.time())
m = utils.gen_table_metadata(
'"{}"'.format(name), None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-2"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
('table_partition', '"{}"'.format(self.slave2), '"0-1"', 'false'),
('column_desc', '"k1"', '"string"', 'true'),
('column_desc', '"k2"', '"string"', 'false'),
('column_desc', '"k3"', '"string"', 'false')
)
utils.gen_table_metadata_file(m, metadata_path)
rs0 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs0)
self.multidimension_vk = {'k1': ('string:index', 'testvalue0'),
'k2': ('string', 'testvalue0'),
'k3': ('string', 'testvalue0')}
self.multidimension_scan_vk = {'k1': 'testvalue0'}
rs1 = self.showtable(self.ns_leader, name)
tid = rs1.keys()[0][1]
self.stop_client(self.leader)
self.updatetablealive(self.ns_leader, name, '*', self.leader, 'no')
time.sleep(5)
self.changeleader(self.ns_leader, name, 0)
time.sleep(5)
rs2 = self.showtable(self.ns_leader)
self.start_client(self.leader)
time.sleep(1)
self.assertEqual(rs2[(name, tid, '0', self.leader)], ['leader', '144000min', 'no', 'kNoCompress'])
self.assertEqual(rs2[(name, tid, '1', self.leader)], ['leader', '144000min', 'no', 'kNoCompress'])
self.assertEqual(rs2[(name, tid, '2', self.leader)], ['leader', '144000min', 'no', 'kNoCompress'])
act1 = rs2[(name, tid, '0', self.slave1)]
act2 = rs2[(name, tid, '0', self.slave2)]
roles = [x[0] for x in [act1, act2]]
self.assertEqual(roles.count('leader'), 1)
self.assertEqual(roles.count('follower'), 1)
leader_new = self.slave1 if 'leader' in act1 else self.slave2
follower = self.slave1 if 'follower' in act1 else self.slave2
rs2 = self.put(self.leader, tid, 1, 'testkey0', self.now(), 'testvalue0')
rs3 = self.put(self.slave1, tid, 1, 'testkey0', self.now(), 'testvalue0')
rs4 = self.put(leader_new, tid, 0, 'testkey0', self.now(), 'testvalue0')
self.assertFalse('Put ok' in rs2)
self.assertFalse('Put ok' in rs3)
self.assertIn('Put ok', rs4)
time.sleep(1)
self.assertIn('testvalue0', self.scan(follower, tid, 0, 'testkey0', self.now(), 1))
self.ns_drop(self.ns_leader, name)
def test_changeleader_master_alive(self):
"""
changeleader传入有主节点的表,执行失败. 加上auto参数可以执行成功
:return:
"""
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = 'tname{}'.format(time.time())
m = utils.gen_table_metadata(
'"{}"'.format(name), None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-2"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
)
utils.gen_table_metadata_file(m, metadata_path)
rs1 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs1)
rs2 = self.changeleader(self.ns_leader, name, 0)
self.assertIn('failed to change leader', rs2)
rs3 = self.changeleader(self.ns_leader, name, 0, 'auto')
self.assertIn('change leader ok', rs3)
time.sleep(3)
self.ns_drop(self.ns_leader, name)
def test_changeleader_candidate_leader(self):
"""
指定candidate_leader
:return:
"""
self.get_new_ns_leader()
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = 'tname{}'.format(time.time())
m = utils.gen_table_metadata(
'"{}"'.format(name), None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-2"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
('table_partition', '"{}"'.format(self.slave2), '"0-1"', 'false'),
('column_desc', '"k1"', '"string"', 'true'),
('column_desc', '"k2"', '"string"', 'false'),
('column_desc', '"k3"', '"string"', 'false')
)
utils.gen_table_metadata_file(m, metadata_path)
rs1 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs1)
result = self.showtable(self.ns_leader)
tid = result.keys()[0][1]
rs2 = self.put(self.leader, tid, 0, 'testkey0', self.now(), 'testvalue0')
self.assertIn('Put ok', rs2)
rs3 = self.changeleader(self.ns_leader, name, 0, self.slave1)
self.assertIn('change leader ok', rs3)
time.sleep(3)
rs4 = self.showtable(self.ns_leader)
self.assertEqual(rs4[(name, tid, '0', self.leader)], ['leader', '144000min', 'no', 'kNoCompress'])
self.assertEqual(rs4[(name, tid, '0', self.slave1)], ['leader', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs4[(name, tid, '0', self.slave2)], ['follower', '144000min', 'yes', 'kNoCompress'])
rs5 = self.put(self.slave1, tid, 0, 'testkey1', self.now(), 'testvalue1')
self.assertIn('Put ok', rs5)
time.sleep(1)
self.multidimension_scan_vk = {'k1': 'testvalue1'}
self.assertIn('testvalue1', self.scan(self.slave2, tid, 0, 'testkey1', self.now(), 1))
self.ns_drop(self.ns_leader, name)
def test_changeleader_tname_notexist(self):
"""
changeleader传入不存在的表名,执行失败
:return:
"""
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = '"{}"'.format('tname{}'.format(time.time()))
m = utils.gen_table_metadata(
name, None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-2"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
)
utils.gen_table_metadata_file(m, metadata_path)
rs1 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs1)
rs2 = self.changeleader(self.ns_leader, 'nullnullnull', 0)
self.assertIn('failed to change leader', rs2)
self.ns_drop(self.ns_leader, name)
@ddt.data(
(0, 'auto', 'no'),
(1, 'auto', 'no'),
)
@ddt.unpack
def test_changeleader_auto_without_offline(self, pid, switch,rsp_msg):
"""
不当机更新leader, auto模式(1 leader 2 follower)。在三个副本的情况下,进行changeleader 参数是auto,测试自动切换是否成功
:return:
"""
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = 'tname{}'.format(time.time())
m = utils.gen_table_metadata(
'"{}"'.format(name), None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-2"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
('table_partition', '"{}"'.format(self.slave2), '"0-1"', 'false'),
('column_desc', '"k1"', '"string"', 'true'),
('column_desc', '"k2"', '"string"', 'false'),
('column_desc', '"k3"', '"string"', 'false')
)
utils.gen_table_metadata_file(m, metadata_path)
rs0 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs0)
self.multidimension_vk = {'k1': ('string:index', 'testvalue0'),
'k2': ('string', 'testvalue0'),
'k3': ('string', 'testvalue0')}
self.multidimension_scan_vk = {'k1': 'testvalue0'}
rs1 = self.showtable(self.ns_leader)
tid = rs1.keys()[0][1]
self.assertEqual(rs1[(name, tid, str(pid), self.leader)], ['leader', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs1[(name, tid, str(pid), self.slave1)], ['follower', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs1[(name, tid, str(pid), self.slave2)], ['follower', '144000min', 'yes', 'kNoCompress'])
self.changeleader(self.ns_leader, name, pid, switch)
time.sleep(1)
rs2 = self.showtable(self.ns_leader)
self.assertEqual(rs2[(name, tid, str(pid), self.leader)], ['leader', '144000min', rsp_msg, 'kNoCompress'])
flag = 'false'
if (rs2[(name, tid, str(pid), self.slave1)] == ['leader', '144000min', 'yes', 'kNoCompress'] and
rs2[(name, tid, str(pid), self.slave2)] == ['follower', '144000min', 'yes', 'kNoCompress']):
flag = 'true'
if (rs2[(name, tid, str(pid), self.slave1)] == ['follower', '144000min', 'yes', 'kNoCompress'] and
rs2[(name, tid, str(pid), self.slave2)] == ['leader', '144000min', 'yes', 'kNoCompress']):
flag = 'true'
self.assertEqual(flag, 'true')
self.ns_drop(self.ns_leader, name)
@multi_dimension(False)
@ddt.data(
(0, conf.tb_endpoints[1], 'no'),
(0, conf.tb_endpoints[2], 'no'),
(1, conf.tb_endpoints[1], 'no'),
(1, conf.tb_endpoints[2], 'no'),
(0, '', 'no'),
(1, '', 'no')
)
@ddt.unpack
def test_changeleader_endpoint_without_offline(self, pid, switch, rsp_msg):
"""
不当机更新leader,指定endpoint模式,同时测试原leader在put数据的时候,是否会同步到其他节点的问题
:return:
"""
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = 'tname{}'.format(time.time())
infoLogger.info(name)
m = utils.gen_table_metadata(
'"{}"'.format(name), None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-2"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
('table_partition', '"{}"'.format(self.slave2), '"0-1"', 'false'),
('column_desc', '"k1"', '"string"', 'true'),
('column_desc', '"k2"', '"string"', 'false'),
('column_desc', '"k3"', '"string"', 'false')
)
utils.gen_table_metadata_file(m, metadata_path)
rs0 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs0)
self.multidimension_vk = {'k1': ('string:index', 'testvalue0'),
'k2': ('string', 'testvalue0'),
'k3': ('string', 'testvalue0')}
self.multidimension_scan_vk = {'k1': 'testvalue0'}
rs1 = self.showtable(self.ns_leader)
tid = rs1.keys()[0][1]
data_time = self.now()
put_rs0 = self.put(self.leader, tid, str(pid), 'before', data_time, 'beforevalue')
self.assertTrue('Put ok' in put_rs0)
time.sleep(3)
put_rsleader = self.scan(self.leader, tid, str(pid), 'before', self.now(), 1)
self.assertTrue('beforevalue' in put_rsleader)
put_rsslave = self.scan(self.slave1, tid, str(pid), 'before', self.now(), 1)
self.assertTrue('beforevalue' in put_rsslave)
put_rsslave = self.scan(self.slave2, tid, str(pid), 'before', self.now(), 1)
self.assertTrue('beforevalue' in put_rsslave)
self.assertEqual(rs1[(name, tid, str(pid), self.leader)], ['leader', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs1[(name, tid, str(pid), self.slave1)], ['follower', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs1[(name, tid, str(pid), self.slave2)], ['follower', '144000min', 'yes', 'kNoCompress'])
rs0 = self.changeleader(self.ns_leader, name, pid, switch)
time.sleep(1)
if (switch == ''):
self.assertIn(rs0, 'failed to change leader. error msg: leader is alive')
else:
rs2 = self.showtable(self.ns_leader)
self.assertEqual(rs2[(name, tid, str(pid), self.leader)], ['leader', '144000min', rsp_msg, 'kNoCompress'])
self.assertEqual(rs2[(name, tid, str(pid), switch)], ['leader', '144000min', 'yes', 'kNoCompress'])
put_rs0 = self.put(self.leader, tid, str(pid), 'after', self.now(), 'aftervalue')
self.assertTrue('Put ok' in put_rs0)
time.sleep(1)
put_rsleader = self.scan(self.leader, tid, str(pid), 'after', self.now(), 1)
self.assertTrue('aftervalue' in put_rsleader)
put_rsslave = self.scan(self.slave1, tid, str(pid), 'after', self.now(), 1)
self.assertFalse('aftervalue' in put_rsslave)
put_rsslave = self.scan(self.slave2, tid, str(pid), 'after', self.now(), 1)
self.assertFalse('aftervalue' in put_rsslave)
put_rs0 = self.put(switch, tid, str(pid), 'newleader', self.now(), 'newleadervalue')
self.assertTrue('Put ok' in put_rs0)
time.sleep(1)
put_rsleader = self.scan(switch, tid, str(pid), 'newleader', self.now(), 1)
self.assertTrue('newleadervalue' in put_rsleader)
put_rsslave = self.scan(self.leader, tid, str(pid), 'newleader', self.now(), 1)
self.assertFalse('newleadervalue' in put_rsslave)
put_rsslave = self.scan(self.slave1, tid, str(pid), 'newleader', self.now(), 1)
self.assertTrue('newleadervalue' in put_rsslave)
put_rsslave = self.scan(self.slave2, tid, str(pid), 'newleader', self.now(), 1)
self.assertTrue('newleadervalue' in put_rsslave)
self.ns_drop(self.ns_leader, name)
@ddt.data(
(0, conf.tb_endpoints[1], 'no'),
(0, conf.tb_endpoints[2], 'no'),
(1, conf.tb_endpoints[1], 'no'),
(1, conf.tb_endpoints[2], 'no'),
(0, '', 'no'),
(1, '', 'no')
)
@ddt.unpack
def test_changeleader_with_illegal_parameter(self, pid, switch, rsp_msg):
"""
不当机更新leader,针对changeleader的函数,测试不合法的参数值
:return:
"""
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = 'tname{}'.format(time.time())
m = utils.gen_table_metadata(
'"{}"'.format(name), None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-2"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
('table_partition', '"{}"'.format(self.slave2), '"0-1"', 'false'),
('column_desc', '"k1"', '"string"', 'true'),
('column_desc', '"k2"', '"string"', 'false'),
('column_desc', '"k3"', '"string"', 'false')
)
utils.gen_table_metadata_file(m, metadata_path)
rs0 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs0)
self.multidimension_vk = {'k1': ('string:index', 'testvalue0'),
'k2': ('string', 'testvalue0'),
'k3': ('string', 'testvalue0')}
self.multidimension_scan_vk = {'k1': 'testvalue0'}
rs1 = self.showtable(self.ns_leader)
tid = rs1.keys()[0][1]
self.assertEqual(rs1[(name, tid, str(pid), self.leader)], ['leader', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs1[(name, tid, str(pid), self.slave1)], ['follower', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs1[(name, tid, str(pid), self.slave2)], ['follower', '144000min', 'yes', 'kNoCompress'])
rs = self.changeleader(self.ns_leader, name+'wrong', pid, switch)
self.assertTrue('failed to change leader. error msg: table is not exist' in rs)
rs = self.changeleader(self.ns_leader, name, pid+10, switch)
self.assertTrue('failed to change leader. error msg: pid is not exist' in rs)
rs = self.changeleader(self.ns_leader, name, pid, '199.199.233.21:21')
self.assertTrue('failed to change leader. error msg: create op failed' in rs)
self.ns_drop(self.ns_leader, name)
@ddt.data(
(0, conf.tb_endpoints[1], conf.tb_endpoints[2], 'no'),
(1, conf.tb_endpoints[1], conf.tb_endpoints[2], 'no')
)
@ddt.unpack
def test_changeleader_with_many_times(self, pid, switch, switch1, rsp_msg):
"""
不当机更新leader,多次changeleader。有follower情况,change成功,无follow情况,change失败
:return:
"""
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = 'tname{}'.format(time.time())
m = utils.gen_table_metadata(
'"{}"'.format(name), None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-2"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
('table_partition', '"{}"'.format(self.slave2), '"0-1"', 'false'),
('column_desc', '"k1"', '"string"', 'true'),
('column_desc', '"k2"', '"string"', 'false'),
('column_desc', '"k3"', '"string"', 'false')
)
utils.gen_table_metadata_file(m, metadata_path)
rs0 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs0)
self.multidimension_vk = {'k1': ('string:index', 'testvalue0'),
'k2': ('string', 'testvalue0'),
'k3': ('string', 'testvalue0')}
self.multidimension_scan_vk = {'k1': 'testvalue0'}
rs1 = self.showtable(self.ns_leader)
tid = rs1.keys()[0][1]
self.assertEqual(rs1[(name, tid, str(pid), self.leader)], ['leader', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs1[(name, tid, str(pid), self.slave1)], ['follower', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs1[(name, tid, str(pid), self.slave2)], ['follower', '144000min', 'yes', 'kNoCompress'])
rs_change1 = self.changeleader(self.ns_leader, name, pid, switch)
time.sleep(1)
self.assertTrue('change leader ok' in rs_change1)
rs_change2 = self.changeleader(self.ns_leader, name, pid, switch1)
time.sleep(1)
self.assertTrue('change leader ok' in rs_change2)
rs_change3 = self.changeleader(self.ns_leader, name, pid, 'auto')
self.assertTrue('failed to change leader. error msg: no alive follower' in rs_change3)
rs_change4 = self.changeleader(self.ns_leader, name, pid, 'auto')
self.assertTrue('failed to change leader. error msg: no alive follower' in rs_change4)
rs_change5 = self.changeleader(self.ns_leader, name, pid, switch)
self.assertTrue('failed to change leader. error msg: no alive follower' in rs_change5)
rs_change6 = self.changeleader(self.ns_leader, name, pid, self.leader)
self.assertTrue('failed to change leader. error msg: no alive follower' in rs_change6)
self.ns_drop(self.ns_leader, name)
@ddt.data(
(0, conf.tb_endpoints[1], 'no'),
(1, conf.tb_endpoints[1], 'no'),
(0, '', 'no'),
(1, '', 'no')
)
@ddt.unpack
def test_changeleader_endpoint_without_offline_with_one_follower_and_endpoint(self, pid, switch,rsp_msg):
"""
不当机更新leader,指定endpoint模式。一个leader和一个follower,测试changeleader的结果
:return:
"""
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = 'tname{}'.format(time.time())
m = utils.gen_table_metadata(
'"{}"'.format(name), None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-1"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
('column_desc', '"k1"', '"string"', 'true'),
('column_desc', '"k2"', '"string"', 'false'),
('column_desc', '"k3"', '"string"', 'false')
)
utils.gen_table_metadata_file(m, metadata_path)
rs0 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs0)
self.multidimension_vk = {'k1': ('string:index', 'testvalue0'),
'k2': ('string', 'testvalue0'),
'k3': ('string', 'testvalue0')}
self.multidimension_scan_vk = {'k1': 'testvalue0'}
rs1 = self.showtable(self.ns_leader)
tid = rs1.keys()[0][1]
self.assertEqual(rs1[(name, tid, str(pid), self.leader)], ['leader', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs1[(name, tid, str(pid), self.slave1)], ['follower', '144000min', 'yes', 'kNoCompress'])
rs0 = self.changeleader(self.ns_leader, name, pid, switch)
time.sleep(1)
if (switch == ''):
self.assertIn(rs0, 'failed to change leader. error msg: leader is alive')
else:
rs2 = self.showtable(self.ns_leader)
self.assertEqual(rs2[(name, tid, str(pid), self.leader)], ['leader', '144000min', rsp_msg, 'kNoCompress'])
self.assertEqual(rs2[(name, tid, str(pid), switch)], ['leader', '144000min', 'yes', 'kNoCompress'])
self.ns_drop(self.ns_leader, name)
@ddt.data(
(0, 'auto', 'no'),
(1, 'auto', 'no'),
(0, '', 'no'),
(1, '', 'no')
)
@ddt.unpack
def test_changeleader_endpoint_without_offline_with_one_follower_and_auto(self, pid, switch,rsp_msg):
"""
不当机更新leader,auto模式。一个leader和一个follower,测试changeleader的结果
:return:
"""
metadata_path = '{}/metadata.txt'.format(self.testpath)
name = 'tname{}'.format(time.time())
m = utils.gen_table_metadata(
'"{}"'.format(name), None, 144000, 2,
('table_partition', '"{}"'.format(self.leader), '"0-1"', 'true'),
('table_partition', '"{}"'.format(self.slave1), '"0-1"', 'false'),
('column_desc', '"k1"', '"string"', 'true'),
('column_desc', '"k2"', '"string"', 'false'),
('column_desc', '"k3"', '"string"', 'false')
)
utils.gen_table_metadata_file(m, metadata_path)
rs0 = self.ns_create(self.ns_leader, metadata_path)
self.assertIn('Create table ok', rs0)
self.multidimension_vk = {'k1': ('string:index', 'testvalue0'),
'k2': ('string', 'testvalue0'),
'k3': ('string', 'testvalue0')}
self.multidimension_scan_vk = {'k1': 'testvalue0'}
rs1 = self.showtable(self.ns_leader)
tid = rs1.keys()[0][1]
self.assertEqual(rs1[(name, tid, str(pid), self.leader)], ['leader', '144000min', 'yes', 'kNoCompress'])
self.assertEqual(rs1[(name, tid, str(pid), self.slave1)], ['follower', '144000min', 'yes', 'kNoCompress'])
rs0 = self.changeleader(self.ns_leader, name, pid, switch)
time.sleep(1)
if (switch == ''):
self.assertIn(rs0, 'failed to change leader. error msg: leader is alive')
else:
rs2 = self.showtable(self.ns_leader)
self.assertEqual(rs2[(name, tid, str(pid), self.leader)], ['leader', '144000min', rsp_msg, 'kNoCompress'])
self.assertEqual(rs2[(name, tid, str(pid), conf.tb_endpoints[1])], ['leader', '144000min', 'yes', 'kNoCompress'])
self.ns_drop(self.ns_leader, name)
if __name__ == "__main__":
load(TestChangeLeader)
| 47.799672
| 125
| 0.567537
| 3,366
| 29,110
| 4.789661
| 0.07962
| 0.032006
| 0.046148
| 0.033743
| 0.860129
| 0.840715
| 0.820494
| 0.790845
| 0.765848
| 0.730244
| 0
| 0.039689
| 0.253899
| 29,110
| 608
| 126
| 47.878289
| 0.702611
| 0.044418
| 0
| 0.719512
| 0
| 0
| 0.187892
| 0
| 0
| 0
| 0
| 0
| 0.193089
| 1
| 0.022358
| false
| 0
| 0.018293
| 0
| 0.042683
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2ed8b331b0dae5b4f66b3a17f25e21939cf7cda3
| 28,641
|
py
|
Python
|
levels.py
|
StefanAvra/madlove
|
2cc2cbfcd98df7283b6425c711e4bf767d4a127b
|
[
"MIT"
] | 1
|
2021-12-15T23:18:41.000Z
|
2021-12-15T23:18:41.000Z
|
levels.py
|
StefanAvra/madlove
|
2cc2cbfcd98df7283b6425c711e4bf767d4a127b
|
[
"MIT"
] | null | null | null |
levels.py
|
StefanAvra/madlove
|
2cc2cbfcd98df7283b6425c711e4bf767d4a127b
|
[
"MIT"
] | null | null | null |
TILE_MAP = (17, 62)
TILE = (25, 7)
TILE_PADDING = 3
_levels = {
0: {
'bricks': [' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' r r ',
' rr rr ',
' rrr rrr ',
' rrr rrr ',
' rrr rrr ',
' rrrr rrrr ',
' rrrrr rrrrr ',
' rrrrr rrrrr ',
' rrrrr rrrrr ',
' rrrrr rrrrr ',
' rrrrr rrrrr ',
' rrrrr rrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrr rrrrr ',
' rrrrr rrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrrr rrrrrrr ',
' rrrrrrr rrrrrrr ',
' rrrrrrr rrrrrrr ',
' rrrrrrr rrrrrrr ',
' rrrrrrr rrrrrrr ',
' rrrrrrr rrrrrrr ',
' rrrrrrr rrrrrrr ',
' rrrrrrr rrrrrrr ',
' rrrrrrr rrrrrrr ',
' rrrrrr rrrrrr ',
' rr rr ',
' r r ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' '],
'name': 'The Lungs',
'powerups': {
1: {'pu_type': 'longer'},
3: {'pu_type': 'pack', 'amount': 1},
5: {'pu_type': 'shoot', 'timer': 6000},
20: {'pu_type': 'shorter'},
27: {'pu_type': 'metastasis', 'amount': 3},
33: {'pu_type': 'longer'},
40: {'pu_type': 'shoot', 'timer': 6000},
50: {'pu_type': 'hotball', 'timer': 2000},
51: {'pu_type': 'hotball', 'timer': 2000},
60: {'pu_type': 'hotball', 'timer': 2000},
65: {'pu_type': 'longer'},
70: {'pu_type': 'metastasis', 'amount': 3},
80: {'pu_type': 'shoot', 'timer': 3000},
87: {'pu_type': 'shoot', 'timer': 1000},
89: {'pu_type': 'shoot', 'timer': 1000},
92: {'pu_type': 'shoot', 'timer': 1000},
96: {'pu_type': 'metastasis', 'amount': 3},
100: {'pu_type': 'metastasis', 'amount': 2},
104: {'pu_type': 'shoot', 'timer': 3000},
106: {'pu_type': 'shoot', 'timer': 1000},
108: {'pu_type': 'shoot', 'timer': 1000},
110: {'pu_type': 'hotball', 'timer': 3000},
115: {'pu_type': 'longer'},
119: {'pu_type': 'hotball', 'timer': 3000},
122: {'pu_type': 'hotball', 'timer': 2000},
130: {'pu_type': 'metastasis', 'amount': 3},
140: {'pu_type': 'shoot', 'timer': 3000},
150: {'pu_type': 'metastasis', 'amount': 1},
160: {'pu_type': 'metastasis', 'amount': 3},
170: {'pu_type': 'shoot', 'timer': 3000},
180: {'pu_type': 'metastasis', 'amount': 3},
190: {'pu_type': 'metastasis', 'amount': 1},
200: {'pu_type': 'shorter'},
210: {'pu_type': 'shoot', 'timer': 3000},
220: {'pu_type': 'longer'},
230: {'pu_type': 'shoot', 'timer': 3000},
235: {'pu_type': 'longer'},
240: {'pu_type': 'shoot', 'timer': 3000},
250: {'pu_type': 'hotball', 'timer': 3000},
253: {'pu_type': 'hotball', 'timer': 2000},
255: {'pu_type': 'shoot', 'timer': 3000},
# 260: {'pu_type': 'metastasis', 'amount': 1},
261: {'pu_type': 'metastasis', 'amount': 2},
263: {'pu_type': 'shorter'},
268: {'pu_type': 'longer'},
270: {'pu_type': 'hotball', 'timer': 4000},
280: {'pu_type': 'shoot', 'timer': 3000},
290: {'pu_type': 'metastasis', 'amount': 2},
300: {'pu_type': 'shoot', 'timer': 3000},
310: {'pu_type': 'metastasis', 'amount': 2},
320: {'pu_type': 'shoot', 'timer': 3000},
330: {'pu_type': 'metastasis', 'amount': 2},
340: {'pu_type': 'shoot', 'timer': 3000},
# 350: {'pu_type': 'metastasis', 'amount': 2},
360: {'pu_type': 'shoot', 'timer': 3000},
370: {'pu_type': 'metastasis', 'amount': 2},
373: {'pu_type': 'shoot', 'timer': 3000},
},
'bonus_time': 300
},
1: {
'bricks': [' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' w w ',
' www www ',
' wwwwwwwww ',
' wwwwwwwww ',
' wwwwwwwww ',
' wwwwwwwww ',
' wwwwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwww ',
' wwwwwww ',
' wwwwwwwww ',
' wwwwwwwww ',
' wwwwwwwww ',
' wwwwwwwww ',
' www www ',
' w w ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' '],
'name': 'The Bone',
'powerups': {
2: {'pu_type': 'longer'},
5: {'pu_type': 'shoot', 'timer': 6000},
14: {'pu_type': 'metastasis', 'amount': 3},
20: {'pu_type': 'shorter'},
27: {'pu_type': 'metastasis', 'amount': 3},
33: {'pu_type': 'longer'},
40: {'pu_type': 'shoot', 'timer': 6000},
50: {'pu_type': 'hotball', 'timer': 3000},
55: {'pu_type': 'hotball', 'timer': 2000},
65: {'pu_type': 'longer'},
70: {'pu_type': 'metastasis', 'amount': 3},
80: {'pu_type': 'shoot', 'timer': 3000},
87: {'pu_type': 'shoot', 'timer': 1000},
89: {'pu_type': 'shoot', 'timer': 1000},
92: {'pu_type': 'shoot', 'timer': 1000},
96: {'pu_type': 'metastasis', 'amount': 3},
100: {'pu_type': 'metastasis', 'amount': 2},
104: {'pu_type': 'shoot', 'timer': 3000},
106: {'pu_type': 'shoot', 'timer': 1000},
108: {'pu_type': 'shoot', 'timer': 1000},
110: {'pu_type': 'hotball', 'timer': 3000},
115: {'pu_type': 'longer'},
119: {'pu_type': 'hotball', 'timer': 2000},
120: {'pu_type': 'hotball', 'timer': 2000},
122: {'pu_type': 'hotball', 'timer': 2000},
130: {'pu_type': 'metastasis', 'amount': 3},
140: {'pu_type': 'shoot', 'timer': 3000},
150: {'pu_type': 'metastasis', 'amount': 1},
160: {'pu_type': 'metastasis', 'amount': 3},
170: {'pu_type': 'shoot', 'timer': 3000},
180: {'pu_type': 'metastasis', 'amount': 3},
190: {'pu_type': 'metastasis', 'amount': 1},
200: {'pu_type': 'shorter'},
210: {'pu_type': 'shoot', 'timer': 3000},
216: {'pu_type': 'pack', 'amount': 1},
},
'bonus_time': 300
},
2: {
'bricks': [' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' r r ',
' rr rr rr rr ',
' rrrrrr rrrrrr ',
' rrrrrrrrrrrrrrr ',
' rr rrrrrrr rr ',
' r rrrrrrr r ',
' r rrrrrrr r ',
' rr rrrrrrr rr ',
' rr rrrrrrr rr ',
' rr rrrrrrr rr ',
' rr rrrrrrr rr ',
' r rrrrr r ',
' rrrrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' rrr ',
' r ',
' r ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' '],
'name': 'The Ovary',
'powerups': {
2: {'pu_type': 'shoot', 'timer': 4000},
5: {'pu_type': 'longer'},
14: {'pu_type': 'metastasis', 'amount': 2},
20: {'pu_type': 'shorter'},
27: {'pu_type': 'metastasis', 'amount': 2},
33: {'pu_type': 'longer'},
40: {'pu_type': 'shoot', 'timer': 6000},
50: {'pu_type': 'hotball', 'timer': 3000},
55: {'pu_type': 'hotball', 'timer': 2000},
65: {'pu_type': 'longer'},
70: {'pu_type': 'metastasis', 'amount': 2},
80: {'pu_type': 'shoot', 'timer': 3000},
# 87: {'pu_type': 'shoot', 'timer': 1000},
# 89: {'pu_type': 'shoot', 'timer': 1000},
92: {'pu_type': 'shoot', 'timer': 3000},
96: {'pu_type': 'metastasis', 'amount': 1},
100: {'pu_type': 'metastasis', 'amount': 2},
104: {'pu_type': 'shoot', 'timer': 3000},
166: {'pu_type': 'pack', 'amount': 1},
# 108: {'pu_type': 'shoot', 'timer': 1000},
110: {'pu_type': 'hotball', 'timer': 3000},
115: {'pu_type': 'longer'},
# 119: {'pu_type': 'hotball', 'timer': 2000},
120: {'pu_type': 'hotball', 'timer': 3000},
# 122: {'pu_type': 'hotball', 'timer': 2000},
130: {'pu_type': 'metastasis', 'amount': 1},
140: {'pu_type': 'shoot', 'timer': 3000},
150: {'pu_type': 'metastasis', 'amount': 1},
160: {'pu_type': 'metastasis', 'amount': 3},
},
'bonus_time': 300
},
3: {
'bricks': [' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' rrrrrrr ',
' rrrrrrrrr ',
' rrrrrrrrrrr ',
' rrrrrrrrrrr ',
' rrr rrr ',
' rrr rrr ',
' rrr rrr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rrr rrr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rrr rrr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rr ',
' rr rrr ',
' rr rrr ',
' rr rrrrrr ',
' rr rrrrrr ',
' r rrrrr ',
' rrrr ',
' rr ',
' rr ',
' r ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' '],
'name': 'The Large Bowel',
'powerups': {
2: {'pu_type': 'shoot', 'timer': 4000},
5: {'pu_type': 'longer'},
14: {'pu_type': 'metastasis', 'amount': 2},
20: {'pu_type': 'shorter'},
27: {'pu_type': 'metastasis', 'amount': 2},
33: {'pu_type': 'longer'},
40: {'pu_type': 'shoot', 'timer': 6000},
50: {'pu_type': 'hotball', 'timer': 4000},
65: {'pu_type': 'longer'},
70: {'pu_type': 'metastasis', 'amount': 2},
80: {'pu_type': 'shoot', 'timer': 3000},
92: {'pu_type': 'shoot', 'timer': 1000},
96: {'pu_type': 'metastasis', 'amount': 2},
100: {'pu_type': 'metastasis', 'amount': 2},
104: {'pu_type': 'shoot', 'timer': 3000},
110: {'pu_type': 'hotball', 'timer': 3000},
115: {'pu_type': 'longer'},
120: {'pu_type': 'hotball', 'timer': 3000},
130: {'pu_type': 'metastasis', 'amount': 1},
140: {'pu_type': 'shoot', 'timer': 3000},
150: {'pu_type': 'metastasis', 'amount': 2},
180: {'pu_type': 'pack', 'amount': 1},
190: {'pu_type': 'metastasis', 'amount': 3},
},
'bonus_time': 300
},
4: {
'bricks': [' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' rrr rrr ',
' rrrr rrrr ',
' rrrrr rrrrr ',
' rrrrr rrrrr ',
' rrrrr rrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrr rrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrr rrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrr rrrrr ',
' rrrrr rrrrr ',
' rrrrr rrrrr ',
' rrrr rrrr ',
' rrr rrr ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' '],
'name': 'The Kidneys',
'powerups': {
1: {'pu_type': 'longer'},
3: {'pu_type': 'pack', 'amount': 1},
5: {'pu_type': 'shoot', 'timer': 5000},
20: {'pu_type': 'shorter'},
27: {'pu_type': 'metastasis', 'amount': 2},
33: {'pu_type': 'longer'},
40: {'pu_type': 'shoot', 'timer': 5000},
50: {'pu_type': 'hotball', 'timer': 2000},
51: {'pu_type': 'hotball', 'timer': 2000},
60: {'pu_type': 'hotball', 'timer': 2000},
65: {'pu_type': 'longer'},
70: {'pu_type': 'metastasis', 'amount': 1},
80: {'pu_type': 'shoot', 'timer': 3000},
90: {'pu_type': 'metastasis', 'amount': 1},
100: {'pu_type': 'shoot', 'timer': 3000},
108: {'pu_type': 'shoot', 'timer': 1000},
110: {'pu_type': 'hotball', 'timer': 3000},
120: {'pu_type': 'hotball', 'timer': 4000},
130: {'pu_type': 'metastasis', 'amount': 1},
140: {'pu_type': 'shoot', 'timer': 3000},
150: {'pu_type': 'metastasis', 'amount': 1},
160: {'pu_type': 'metastasis', 'amount': 1},
170: {'pu_type': 'shoot', 'timer': 3000},
180: {'pu_type': 'metastasis', 'amount': 1},
190: {'pu_type': 'shorter'},
200: {'pu_type': 'metastasis', 'amount': 1},
210: {'pu_type': 'shoot', 'timer': 3000},
220: {'pu_type': 'longer'},
230: {'pu_type': 'shoot', 'timer': 3000},
240: {'pu_type': 'shoot', 'timer': 3000},
250: {'pu_type': 'hotball', 'timer': 3000},
260: {'pu_type': 'metastasis', 'amount': 1},
263: {'pu_type': 'shorter'},
268: {'pu_type': 'longer'},
270: {'pu_type': 'hotball', 'timer': 4000},
280: {'pu_type': 'shoot', 'timer': 3000},
290: {'pu_type': 'metastasis', 'amount': 1},
300: {'pu_type': 'shoot', 'timer': 3000},
310: {'pu_type': 'metastasis', 'amount': 1},
320: {'pu_type': 'shoot', 'timer': 3000},
330: {'pu_type': 'metastasis', 'amount': 2},
340: {'pu_type': 'shoot', 'timer': 3000},
},
'bonus_time': 300
},
5: {
'bricks': [' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' r r ',
' rrrr rrrr ',
' rrrr rrrr ',
' rrrrrr rrrrrr ',
' rrrrrr rrrrrr ',
' rrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrrrr ',
' rrrrrrrrrrrrr ',
' rrrrrrrrrrrrr ',
' rrrrrrrrrrrrr ',
' rrrrrrrrrrr ',
' rrrrrrrrrrr ',
' rrrrrrrrr ',
' rrrrrrrrr ',
' rrrrrrr ',
' rrrrrrr ',
' rrrrr ',
' rrrrr ',
' rrr ',
' rrr ',
' r ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' ',
' '],
'name': 'The Heart',
'powerups': {
1: {'pu_type': 'longer'},
3: {'pu_type': 'shorter'},
5: {'pu_type': 'shoot', 'timer': 2000},
20: {'pu_type': 'shorter'},
27: {'pu_type': 'metastasis', 'amount': 3},
33: {'pu_type': 'longer'},
40: {'pu_type': 'shoot', 'timer': 6000},
50: {'pu_type': 'hotball', 'timer': 2000},
# 51: {'pu_type': 'hotball', 'timer': 2000},
60: {'pu_type': 'hotball', 'timer': 2000},
# 65: {'pu_type': 'longer'},
70: {'pu_type': 'metastasis', 'amount': 1},
80: {'pu_type': 'shoot', 'timer': 3000},
# 87: {'pu_type': 'shoot', 'timer': 1000},
# 89: {'pu_type': 'shoot', 'timer': 1000},
92: {'pu_type': 'shoot', 'timer': 3000},
# 96: {'pu_type': 'metastasis', 'amount': 3},
100: {'pu_type': 'metastasis', 'amount': 1},
# 104: {'pu_type': 'shoot', 'timer': 3000},
# 106: {'pu_type': 'shoot', 'timer': 1000},
# 108: {'pu_type': 'shoot', 'timer': 1000},
110: {'pu_type': 'hotball', 'timer': 3000},
115: {'pu_type': 'longer'},
# 119: {'pu_type': 'hotball', 'timer': 2000},
# 120: {'pu_type': 'hotball', 'timer': 3000},
# 122: {'pu_type': 'hotball', 'timer': 2000},
130: {'pu_type': 'metastasis', 'amount': 1},
140: {'pu_type': 'shoot', 'timer': 3000},
150: {'pu_type': 'metastasis', 'amount': 1},
160: {'pu_type': 'metastasis', 'amount': 1},
# 170: {'pu_type': 'shoot', 'timer': 3000},
180: {'pu_type': 'metastasis', 'amount': 1},
190: {'pu_type': 'metastasis', 'amount': 1},
200: {'pu_type': 'shorter'},
210: {'pu_type': 'shoot', 'timer': 3000},
220: {'pu_type': 'longer'},
# 230: {'pu_type': 'shoot', 'timer': 3000},
# 235: {'pu_type': 'longer'},
240: {'pu_type': 'shoot', 'timer': 3000},
250: {'pu_type': 'hotball', 'timer': 3000},
# 253: {'pu_type': 'hotball', 'timer': 2000},
# 255: {'pu_type': 'shoot', 'timer': 3000},
260: {'pu_type': 'metastasis', 'amount': 1},
# 261: {'pu_type': 'metastasis', 'amount': 2},
263: {'pu_type': 'shorter'},
# 268: {'pu_type': 'longer'},
270: {'pu_type': 'hotball', 'timer': 4000},
# 280: {'pu_type': 'shoot', 'timer': 3000},
290: {'pu_type': 'metastasis', 'amount': 1},
# 300: {'pu_type': 'shoot', 'timer': 3000},
# 310: {'pu_type': 'metastasis', 'amount': 2},
320: {'pu_type': 'shoot', 'timer': 3000},
330: {'pu_type': 'metastasis', 'amount': 1},
340: {'pu_type': 'shoot', 'timer': 3000},
350: {'pu_type': 'hotball', 'timer': 4000},
360: {'pu_type': 'metastasis', 'amount': 4},
370: {'pu_type': 'metastasis', 'amount': 4},
380: {'pu_type': 'metastasis', 'amount': 4}
},
'bonus_time': 300
}
}
class Level:
def __init__(self, no):
global _levels
self.no = no
try:
self.bricks = _levels.get(no).get('bricks')
except AttributeError:
print('Level {} not found.'.format(no))
self.powerups = _levels.get(no).get('powerups')
self.bonus_time = _levels.get(no).get('bonus_time')
def get_total_levels():
return len(_levels)
| 42.305761
| 59
| 0.275793
| 1,828
| 28,641
| 4.175602
| 0.075492
| 0.191799
| 0.11673
| 0.169789
| 0.881567
| 0.836368
| 0.825364
| 0.824053
| 0.800734
| 0.789598
| 0
| 0.100025
| 0.572745
| 28,641
| 676
| 60
| 42.368343
| 0.523739
| 0.040327
| 0
| 0.876369
| 0
| 0
| 0.381906
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00313
| false
| 0
| 0
| 0.001565
| 0.00626
| 0.001565
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2c31a72df76d13769b4d03f601d00ba7cfe513e9
| 3,679
|
py
|
Python
|
tests/test_egi.py
|
Open-EO/openeo-aggregator
|
88bc1ef44b700dd1b13b29e6c50eda0f197280eb
|
[
"Apache-2.0"
] | 1
|
2021-05-19T10:17:45.000Z
|
2021-05-19T10:17:45.000Z
|
tests/test_egi.py
|
Open-EO/openeo-aggregator
|
88bc1ef44b700dd1b13b29e6c50eda0f197280eb
|
[
"Apache-2.0"
] | 32
|
2021-07-19T13:11:26.000Z
|
2022-03-24T16:42:05.000Z
|
tests/test_egi.py
|
Open-EO/openeo-aggregator
|
88bc1ef44b700dd1b13b29e6c50eda0f197280eb
|
[
"Apache-2.0"
] | 1
|
2021-10-21T08:20:12.000Z
|
2021-10-21T08:20:12.000Z
|
import pytest
from openeo_aggregator.egi import parse_eduperson_entitlement, Entitlement, is_early_adopter, is_free_tier
def test_parse_eduperson_entitlement():
assert parse_eduperson_entitlement(
"urn:mace:egi.eu:group:vo.openeo.cloud#aai.egi.eu"
) == Entitlement(
namespace="urn:mace:egi.eu", vo="vo.openeo.cloud", group=None, role=None, authority="aai.egi.eu"
)
assert parse_eduperson_entitlement(
"urn:mace:egi.eu:group:vo.openeo.cloud:role=early_adopter#aai.egi.eu"
) == Entitlement(
namespace="urn:mace:egi.eu", vo="vo.openeo.cloud", group=None, role="early_adopter", authority="aai.egi.eu"
)
assert parse_eduperson_entitlement(
"urn:mace:egi.eu:group:vo.openeo.cloud:vo.openeo-sub.cloud:role=early_adopter#aai.egi.eu"
) == Entitlement(
namespace="urn:mace:egi.eu", vo="vo.openeo.cloud", group="vo.openeo-sub.cloud", role="early_adopter",
authority="aai.egi.eu"
)
assert parse_eduperson_entitlement(
"urn:mace:egi-dev.eu:group:vo.openeo-dev.cloud:vo.openeo-sub.cloud:role=Early-Adop.ter#aai.egi-dev.eu"
) == Entitlement(
namespace="urn:mace:egi-dev.eu", vo="vo.openeo-dev.cloud", group="vo.openeo-sub.cloud", role="Early-Adop.ter",
authority="aai.egi-dev.eu"
)
assert parse_eduperson_entitlement(
"urn:mace:egi.eu:group:openEO_test:education_package.openEO_test:admins:role=member#aai.egi.eu"
) == Entitlement(
namespace="urn:mace:egi.eu", vo="openEO_test", group="education_package.openEO_test:admins", role="member",
authority="aai.egi.eu"
)
def test_parse_eduperson_entitlement_strict():
with pytest.raises(ValueError, match="Failed to parse"):
parse_eduperson_entitlement("foobar")
def test_parse_eduperson_entitlement_loose():
e = parse_eduperson_entitlement("foobar", strict=False)
assert e == Entitlement(None, None, None, None, None)
def test_is_early_adopter():
assert is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=early_adopter#aai.egi.eu")
assert is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=Early_Adopter#aai.egi.eu")
assert is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=Early-Adopter#aai.egi.eu")
assert is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=EarlyAdopter#aai.egi.eu")
assert not is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud#aai.egi.eu")
assert not is_early_adopter("urn:mace:uho.ai:group:vo.openeo.cloud:role=early_adopter#aai.egi.eu")
assert not is_early_adopter("urn:mace:egi.eu:group:vo.kleurenwiezen.be:role=early_adopter#aai.egi.eu")
assert not is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=member#aai.egi.eu")
assert not is_early_adopter("urn:mace:egi.eu:group:vo.openeo.cloud:role=early_adopter#ooi.egi.eu")
assert not is_early_adopter("foobar")
assert not is_early_adopter("")
def test_is_free_tier():
assert is_free_tier("urn:mace:egi.eu:group:vo.openeo.cloud:role=early_adopter#aai.egi.eu")
assert is_free_tier("urn:mace:egi.eu:group:vo.openeo.cloud:role=free_tier#aai.egi.eu")
assert is_free_tier("urn:mace:egi.eu:group:vo.openeo.cloud:role=free-tier#aai.egi.eu")
assert is_free_tier("urn:mace:egi.eu:group:vo.openeo.cloud#aai.egi.eu")
assert not is_free_tier("urn:mace:uho.ai:group:vo.openeo.cloud:role=free_tier#aai.egi.eu")
assert not is_free_tier("urn:mace:egi.eu:group:vo.kleurenwiezen.be:role=free_tier#aai.egi.eu")
assert not is_free_tier("urn:mace:egi.eu:group:vo.openeo.cloud:role=free_tier#ooi.egi.eu")
assert not is_free_tier("foobar")
assert not is_free_tier("")
| 49.053333
| 118
| 0.730633
| 589
| 3,679
| 4.397284
| 0.091681
| 0.088803
| 0.092664
| 0.101931
| 0.833205
| 0.773745
| 0.759073
| 0.700386
| 0.665637
| 0.650193
| 0
| 0
| 0.116336
| 3,679
| 74
| 119
| 49.716216
| 0.796678
| 0
| 0
| 0.20339
| 0
| 0.355932
| 0.48274
| 0.39277
| 0
| 0
| 0
| 0
| 0.440678
| 1
| 0.084746
| false
| 0
| 0.033898
| 0
| 0.118644
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25784fb46056ca41386b24f682866fa2ab6773df
| 4,375
|
py
|
Python
|
test/test_SepiaLogLik.py
|
lanl/SEPIA
|
0a1e606e1d1072f49e4f3f358962bd8918a5d3a3
|
[
"BSD-3-Clause"
] | 19
|
2020-06-22T16:37:07.000Z
|
2022-02-18T22:50:59.000Z
|
test/test_SepiaLogLik.py
|
lanl/SEPIA
|
0a1e606e1d1072f49e4f3f358962bd8918a5d3a3
|
[
"BSD-3-Clause"
] | 41
|
2020-07-07T22:52:33.000Z
|
2021-11-04T14:05:03.000Z
|
test/test_SepiaLogLik.py
|
lanl/SEPIA
|
0a1e606e1d1072f49e4f3f358962bd8918a5d3a3
|
[
"BSD-3-Clause"
] | 6
|
2020-08-14T18:58:45.000Z
|
2022-03-01T21:00:14.000Z
|
import unittest
import numpy as np
import generate_data
from sepia.SepiaData import SepiaData
from sepia.SepiaModel import SepiaModel
np.random.seed(42)
class SepiaLogLikTestCase(unittest.TestCase):
def setUp(self, m=100, n=1, nt_sim=50, nt_obs=20, n_theta=3, n_basis=5, sig_n=0.1, seed=42):
self.multi_data_dict = generate_data.generate_multi_sim_and_obs(m=m, n=n, nt_sim=nt_sim, nt_obs=nt_obs,
n_theta=n_theta, n_basis=n_basis,
sig_n=sig_n, seed=seed)
self.univ_data_dict = generate_data.generate_univ_sim_and_obs(m=m, n=n, sig_n=sig_n, seed=seed)
def test_univariate_sim_only_lik(self):
"""
Tests log lik for univariate sim only model
"""
d = SepiaData(t_sim=self.univ_data_dict['t_sim'], y_sim=self.univ_data_dict['y_sim'])
print('Testing univariate sim-only SepiaLogLik...', flush=True)
print(d, flush=True)
d.transform_xt()
d.standardize_y()
model = SepiaModel(d)
model.logLik()
for param in model.params.mcmcList:
for cindex in range(int(np.prod(param.val_shape))):
model.logLik(cvar=param.name, cindex=cindex)
def test_univariate_sim_and_obs_lik(self):
"""
Tests log lik for univariate sim and obs model
"""
d = SepiaData(t_sim=self.univ_data_dict['t_sim'], y_sim=self.univ_data_dict['y_sim'],
y_obs=self.univ_data_dict['y_obs'])
print('Testing univariate sim and obs SepiaLogLik...', flush=True)
print(d, flush=True)
# Do explicit transformation
d.transform_xt()
d.standardize_y()
model = SepiaModel(d)
model.logLik()
for param in model.params.mcmcList:
for cindex in range(int(np.prod(param.val_shape))):
model.logLik(cvar=param.name, cindex=cindex)
def test_multivariate_sim_only_lik(self):
"""
Tests log lik for multivariate sim only model
"""
d = SepiaData(t_sim=self.multi_data_dict['t_sim'], y_sim=self.multi_data_dict['y_sim'],
y_ind_sim=self.multi_data_dict['y_ind_sim'])
print('Testing multivariate sim-only SepiaLogLik...', flush=True)
print(d, flush=True)
d.transform_xt()
d.standardize_y()
d.create_K_basis(5)
model = SepiaModel(d)
model.logLik()
for param in model.params.mcmcList:
for cindex in range(int(np.prod(param.val_shape))):
model.logLik(cvar=param.name, cindex=cindex)
def test_multivariate_sim_and_obs_lik(self):
"""
Tests log lik for multivariate sim and obs model
"""
d = SepiaData(t_sim=self.multi_data_dict['t_sim'], y_sim=self.multi_data_dict['y_sim'],
y_ind_sim=self.multi_data_dict['y_ind_sim'], y_obs=self.multi_data_dict['y_obs'],
y_ind_obs=self.multi_data_dict['y_ind_obs'])
print('Testing multivariate sim-only SepiaLogLik...', flush=True)
print(d, flush=True)
d.transform_xt()
d.standardize_y()
d.create_K_basis(5)
d.create_D_basis('linear')
model = SepiaModel(d)
model.logLik()
for param in model.params.mcmcList:
for cindex in range(int(np.prod(param.val_shape))):
model.logLik(cvar=param.name, cindex=cindex)
def test_multivariate_sim_and_obs_noD_lik(self):
"""
Tests log lik for multivariate sim and obs model no discrep
"""
d = SepiaData(t_sim=self.multi_data_dict['t_sim'], y_sim=self.multi_data_dict['y_sim'],
y_ind_sim=self.multi_data_dict['y_ind_sim'], y_obs=self.multi_data_dict['y_obs'],
y_ind_obs=self.multi_data_dict['y_ind_obs'])
print('Testing multivariate sim-only SepiaLogLik...', flush=True)
print(d, flush=True)
d.transform_xt()
d.standardize_y()
d.create_K_basis(5)
model = SepiaModel(d)
model.logLik()
for param in model.params.mcmcList:
for cindex in range(int(np.prod(param.val_shape))):
model.logLik(cvar=param.name, cindex=cindex)
| 33.143939
| 111
| 0.607086
| 612
| 4,375
| 4.076797
| 0.133987
| 0.064128
| 0.072946
| 0.095391
| 0.824449
| 0.79479
| 0.781964
| 0.757515
| 0.717836
| 0.702204
| 0
| 0.006055
| 0.282743
| 4,375
| 131
| 112
| 33.396947
| 0.789038
| 0.0624
| 0
| 0.688312
| 1
| 0
| 0.08415
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.077922
| false
| 0
| 0.064935
| 0
| 0.155844
| 0.12987
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25a97acfd941b5f7d8f55ba7fe188444fd9736c4
| 3,183
|
py
|
Python
|
data/__init__.py
|
huangzh13/ReID.template.pytorch
|
34ce8318bbd35db5a21e0914b429a71a4ea26998
|
[
"MIT"
] | 4
|
2019-10-06T15:00:40.000Z
|
2020-02-24T14:23:31.000Z
|
data/__init__.py
|
huangzh13/ReID.template.pytorch
|
34ce8318bbd35db5a21e0914b429a71a4ea26998
|
[
"MIT"
] | null | null | null |
data/__init__.py
|
huangzh13/ReID.template.pytorch
|
34ce8318bbd35db5a21e0914b429a71a4ea26998
|
[
"MIT"
] | null | null | null |
"""
-------------------------------------------------
File Name: __init__.py.py
Author: Zhonghao Huang
Date: 2019/9/2
Description:
-------------------------------------------------
"""
from torch.utils.data import DataLoader
from data.cuhk01 import CUHK01
from data.dataset import ReIDDataset, ImageData
from data.transforms import TrainTransform, TestTransform
from data.samplers import RandomIdentitySampler
def make_loader_flip(cfg):
_data = ReIDDataset(dataset_dir=cfg.DATASETS.NAME, root=cfg.DATASETS.ROOT)
query_flip_loader = DataLoader(ImageData(_data.query, TestTransform(flip=True)),
batch_size=cfg.DATALOADER.BATCH_SIZE, num_workers=cfg.DATALOADER.NUM_WORKERS,
pin_memory=True)
gallery_flip_loader = DataLoader(ImageData(_data.gallery, TestTransform(flip=True)),
batch_size=cfg.DATALOADER.BATCH_SIZE, num_workers=cfg.DATALOADER.NUM_WORKERS,
pin_memory=True)
return query_flip_loader, gallery_flip_loader
def make_loader(cfg):
_data = ReIDDataset(dataset_dir=cfg.DATASETS.NAME, root=cfg.DATASETS.ROOT)
num_train_pids = _data.num_train_pids
train_loader = DataLoader(ImageData(_data.train, TrainTransform(p=0.5)),
sampler=RandomIdentitySampler(_data.train, cfg.DATALOADER.NUM_INSTANCES),
batch_size=cfg.DATALOADER.BATCH_SIZE, num_workers=cfg.DATALOADER.NUM_WORKERS,
pin_memory=True, drop_last=True)
query_loader = DataLoader(ImageData(_data.query, TestTransform(flip=False)),
batch_size=cfg.DATALOADER.BATCH_SIZE, num_workers=cfg.DATALOADER.NUM_WORKERS,
pin_memory=True)
gallery_loader = DataLoader(ImageData(_data.gallery, TestTransform(flip=False)),
batch_size=cfg.DATALOADER.BATCH_SIZE, num_workers=cfg.DATALOADER.NUM_WORKERS,
pin_memory=True)
return train_loader, query_loader, gallery_loader, num_train_pids
def make_loader_cuhk01(cfg):
_data = CUHK01(root=cfg.DATASETS.ROOT)
num_train_pids = _data.num_train_pids
train_loader = DataLoader(ImageData(_data.train, TrainTransform(p=0.5)),
sampler=RandomIdentitySampler(_data.train, cfg.DATALOADER.NUM_INSTANCES),
batch_size=cfg.DATALOADER.BATCH_SIZE, num_workers=cfg.DATALOADER.NUM_WORKERS,
pin_memory=True, drop_last=True)
query_loader = DataLoader(ImageData(_data.query, TestTransform(flip=False)),
batch_size=cfg.DATALOADER.BATCH_SIZE, num_workers=cfg.DATALOADER.NUM_WORKERS,
pin_memory=True)
gallery_loader = DataLoader(ImageData(_data.gallery, TestTransform(flip=False)),
batch_size=cfg.DATALOADER.BATCH_SIZE, num_workers=cfg.DATALOADER.NUM_WORKERS,
pin_memory=True)
return train_loader, query_loader, gallery_loader, num_train_pids
| 45.471429
| 114
| 0.633993
| 342
| 3,183
| 5.602339
| 0.160819
| 0.122129
| 0.083507
| 0.121086
| 0.817328
| 0.813152
| 0.813152
| 0.776618
| 0.776618
| 0.776618
| 0
| 0.007608
| 0.256676
| 3,183
| 69
| 115
| 46.130435
| 0.802198
| 0.064405
| 0
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.119048
| 0
| 0.261905
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25e6c9bfdbed7819447be22d4367e1d0751bea27
| 74
|
py
|
Python
|
01_Language/01_Functions/python/log10.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 3
|
2020-06-28T07:42:51.000Z
|
2021-01-15T10:32:11.000Z
|
01_Language/01_Functions/python/log10.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 9
|
2021-03-10T22:45:40.000Z
|
2022-02-27T06:53:20.000Z
|
01_Language/01_Functions/python/log10.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 1
|
2021-01-15T10:51:24.000Z
|
2021-01-15T10:51:24.000Z
|
# coding: utf-8
import math
def log10(arg):
return math.log10(arg)
| 9.25
| 26
| 0.662162
| 12
| 74
| 4.083333
| 0.75
| 0.326531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086207
| 0.216216
| 74
| 7
| 27
| 10.571429
| 0.758621
| 0.175676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d363aa43e98b74fc2d682d2ebac14f2f6fd031b3
| 58,465
|
py
|
Python
|
hourglass.py
|
Paul-C137/Hourglass
|
5073bec3f24775cfb0074e4c0ea5ffa50af43f49
|
[
"MIT"
] | null | null | null |
hourglass.py
|
Paul-C137/Hourglass
|
5073bec3f24775cfb0074e4c0ea5ffa50af43f49
|
[
"MIT"
] | null | null | null |
hourglass.py
|
Paul-C137/Hourglass
|
5073bec3f24775cfb0074e4c0ea5ffa50af43f49
|
[
"MIT"
] | null | null | null |
###############################################################################
# Title: hourglass.py #
# Author: Paul Lack #
# Date Created: 4/2/21 #
# #
# Purpose: The purpose of this program is to graphically simulate an hour- #
# glass on a terminal screen. The user can enter a number of #
# minutes from 1-60. Falling 'sand' shows it is working. The sand#
# levels change at quarterly invervals of the time selected. #
###############################################################################
# import system from os which allows the ability to clear the screen.
from os import system, name
# import sleep to allow 'frames' to persist for a specified time interval.
from time import sleep
# import datetime to allow storing necessary systme time variables.
from datetime import datetime
###############################################################################
# Define the top of a full hourglass.
top_full= ['============================================================',
'|| ||',
'============================================================',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X .... .......X ',
' X ......... ........ ............X ',
' X .....................................X ',
' X .................................X ',
' X ...........................X ',
' X .....................X ',
' X ...............X ',
' X ...........X ',
' X .......X ',
' X ...X ',
' X .X ']
#################################################################################
# Define the top of an almost full hourglass.
top_3Q_full= ['============================================================',
'|| ||',
'============================================================',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X .. .....X ',
' X .....................................X ',
' X .................................X ',
' X ...........................X ',
' X .....................X ',
' X ...............X ',
' X ...........X ',
' X .......X ',
' X ...X ',
' X .X ']
#################################################################################
# Define the top of a half full hourglass.
top_2Q_full= ['============================================================',
'|| ||',
'============================================================',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X ..... X ',
' X .....................X ',
' X ...............X ',
' X ...........X ',
' X .......X ',
' X ...X ',
' X .X ']
#################################################################################
# Define the top of a mostly empty hourglass.
top_1Q_full= ['============================================================',
'|| ||',
'============================================================',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X ...........X ',
' X .......X ',
' X ...X ',
' X .X ']
#################################################################################
# Define the top of a completly empty hourglass.
top_finished= ['============================================================',
'|| ||',
'============================================================',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X FINISHED! X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a mostly empty hourglass in an inverted format with
# falling sand. It is easier to flip it programatically than to 'draw' it
# correctly using a list. Shows the first of three falling sand 'animations'.
bottom_empty_1= ['============================================================',
'|| ||',
'============================================================',
' X .................. X ',
' X ............ X ',
' X .... X ',
' X . X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a mostly empty hourglass in an inverted format with
# falling sand. It is easier to flip it programatically than to 'draw' it
# correctly using a list. Shows the second of three falling sand 'animations'.
bottom_empty_2= ['============================================================',
'|| ||',
'============================================================',
' X .................. X ',
' X ............ X ',
' X .... X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a mostly empty hourglass in an inverted format with
# falling sand. It is easier to flip it programatically than to 'draw' it
# correctly using a list. Shows the last of three falling sand 'animations'.
bottom_empty_3= ['============================================================',
'|| ||',
'============================================================',
' X .................. X ',
' X ............ X ',
' X .... X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a mostly one quarter full hourglass in an inverted
# format with falling sand. It is easier to flip it programatically than to
# draw it correctly. Shows the first of three falling sand 'animations'.
bottom_1Q_1= ['============================================================',
'|| ||',
'============================================================',
' X .......................... X ',
' X ................. X ',
' X ........... X ',
' X ..... X ',
' X ... X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a mostly one quarter full hourglass in an inverted
# format with falling sand. It is easier to flip it programatically than to
# draw it correctly. Shows the second of three falling sand 'animations'.
bottom_1Q_2= ['============================================================',
'|| ||',
'============================================================',
' X .......................... X ',
' X ................. X ',
' X ........... X ',
' X ..... X ',
' X ... X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a mostly one quarter full hourglass in an inverted
# format with falling sand. It is easier to flip it programatically than to
# draw it correctly. Shows the last of three falling sand 'animations'.
bottom_1Q_3= ['============================================================',
'|| ||',
'============================================================',
' X .......................... X ',
' X ................. X ',
' X ........... X ',
' X ..... X ',
' X ... X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a half empty hourglass in an inverted format with
# falling sand. It is easier to flip it programatically than to draw it
# correctly. Shows the first of three falling sand 'animations'.
bottom_2Q_1= ['============================================================',
'|| ||',
'============================================================',
' X .......................................................X ',
' X ............................................ X ',
' X ............................... X ',
' X .................... X ',
' X ........... X ',
' X ...... X ',
' X ... X ',
' X . X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a half empty hourglass in an inverted format with
# falling sand. It is easier to flip it programatically than to draw it
# correctly. Shows the second of three falling sand 'animations'.
bottom_2Q_2= ['============================================================',
'|| ||',
'============================================================',
' X .......................................................X ',
' X ............................................ X ',
' X ............................... X ',
' X .................... X ',
' X ........... X ',
' X ...... X ',
' X ... X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a half empty hourglass in an inverted format with
# falling sand. It is easier to flip it programatically than to draw it
# correctly. Shows the last of three falling sand 'animations'.
bottom_2Q_3= ['============================================================',
'|| ||',
'============================================================',
' X .......................................................X ',
' X ............................................ X ',
' X ............................... X ',
' X .................... X ',
' X ........... X ',
' X ...... X ',
' X ... X ',
' X . X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a mostly full hour glass in an inverted format with
# falling sand. It is easier to flip it programatically than to draw it
# correctly. Shows the first of three falling sand 'animations'.
bottom_3Q_1= ['============================================================',
'|| ||',
'============================================================',
' X .......................................................X ',
' X .......................................................X ',
' X .................................................. X ',
' X ...................................... X ',
' X .......................... X ',
' X ................ X ',
' X .......... X ',
' X ...... X ',
' X ... X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a mostly full hour glass in an inverted format with
# falling sand. It is easier to flip it programatically than to draw it
# correctly. Shows the second of three falling sand 'animations'.
bottom_3Q_2= ['============================================================',
'|| ||',
'============================================================',
' X .......................................................X ',
' X .......................................................X ',
' X .................................................. X ',
' X ...................................... X ',
' X .......................... X ',
' X ................ X ',
' X .......... X ',
' X ...... X ',
' X ... X ',
' X . X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a mostly full hour glass in an inverted format with
# falling sand. It is easier to flip it programatically than to draw it
# correctly. Shows the last of three falling sand 'animations'.
bottom_3Q_3= ['============================================================',
'|| ||',
'============================================================',
' X .......................................................X ',
' X .......................................................X ',
' X .................................................. X ',
' X ...................................... X ',
' X .......................... X ',
' X ................ X ',
' X .......... X ',
' X ...... X ',
' X ... X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X . X ',
' X X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Define the bottom of a full hourglass in an inverted format with
# falling sand. It is easier to flip it programatically than to draw it
# correctly.
bottom_full= ['============================================================',
'|| ||',
'============================================================',
' X .......................................................X ',
' X .......................................................X ',
' X .......................................................X ',
' X ...................................................... X ',
' X......................................................X ',
' X .......................................... X ',
' X ........................... X ',
' X ............. X ',
' X ...... X ',
' X .. X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ',
' X X ']
#################################################################################
# Function Name: clear() #
# Purpose: Clears the terminal window for Windows, Linux, or Mac users. #
#################################################################################
def clear():
# for windows
if name == 'nt':
_ = system('cls')
# for mac and linux(here, os.name is 'posix')
else:
_ = system('clear')
#################################################################################
# Function Name: start_frame_1() #
# Purpose: Displays the first frame of the first quarter interval. #
#################################################################################
def start_frame_1():
# Print each string in the 'top_full' list to the console.
for line in top_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_empty_1)
x=0
for line in bottom_empty_1:
print(bottom_empty_1[length-1-x])
x=x+1
#################################################################################
# Function Name: start_frame_2() #
# Purpose: Displays the second frame of the first quarter interval. #
#################################################################################
def start_frame_2():
# Print each string in the 'top_full' list to the console.
for line in top_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_empty_2)
x=0
for line in bottom_empty_2:
print(bottom_empty_2[length-1-x])
x=x+1
#################################################################################
# Function Name: start_frame_3() #
# Purpose: Displays the third frame of the first quarter interval. #
#################################################################################
def start_frame_3():
# Print each string in the 'top_full' list to the console.
for line in top_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_empty_3)
x=0
for line in bottom_empty_3:
print(bottom_empty_3[length-1-x])
x=x+1
#################################################################################
# Function Name: second_frame_1() #
# Purpose: Displays the first frame of the first second interval. #
#################################################################################
def second_frame_1():
# Print each string in the 'top_full' list to the console.
for line in top_3Q_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_1Q_1)
x=0
for line in bottom_1Q_1:
print(bottom_1Q_1[length-1-x])
x=x+1
#################################################################################
# Function Name: second_frame_2() #
# Purpose: Displays the second frame of the second interval. #
#################################################################################
def second_frame_2():
# Print each string in the 'top_full' list to the console.
for line in top_3Q_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_1Q_2)
x=0
for line in bottom_1Q_2:
print(bottom_1Q_2[length-1-x])
x=x+1
#################################################################################
# Function Name: second_frame_3() #
# Purpose: Displays the third frame of the second interval. #
#################################################################################
def second_frame_3():
# Print each string in the 'top_full' list to the console.
for line in top_3Q_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_1Q_3)
x=0
for line in bottom_1Q_3:
print(bottom_1Q_3[length-1-x])
x=x+1
#################################################################################
# Function Name: third_frame_1() #
# Purpose: Displays the first frame of the third interval. #
#################################################################################
def third_frame_1():
# Print each string in the 'top_full' list to the console.
for line in top_2Q_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_2Q_1)
x=0
for line in bottom_2Q_1:
print(bottom_2Q_1[length-1-x])
x=x+1
#################################################################################
# Function Name: third_frame_2() #
# Purpose: Displays the second frame of the third interval. #
#################################################################################
def third_frame_2():
# Print each string in the 'top_full' list to the console.
for line in top_2Q_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_2Q_2)
x=0
for line in bottom_2Q_2:
print(bottom_2Q_2[length-1-x])
x=x+1
#################################################################################
# Function Name: third_frame_3() #
# Purpose: Displays the third frame of the third interval. #
#################################################################################
def third_frame_3():
# Print each string in the 'top_full' list to the console.
for line in top_2Q_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_2Q_3)
x=0
for line in bottom_2Q_3:
print(bottom_2Q_3[length-1-x])
x=x+1
#################################################################################
# Function Name: fourth_frame_1() #
# Purpose: Displays the first frame of the fourth interval. #
#################################################################################
def fourth_frame_1():
# Print each string in the 'top_full' list to the console.
for line in top_1Q_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_3Q_1)
x=0
for line in bottom_3Q_1:
print(bottom_3Q_1[length-1-x])
x=x+1
#################################################################################
# Function Name: fourth_frame_2() #
# Purpose: Displays the second frame of the fourth interval. #
#################################################################################
def fourth_frame_2():
# Print each string in the 'top_full' list to the console.
for line in top_1Q_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_3Q_2)
x=0
for line in bottom_3Q_2:
print(bottom_3Q_2[length-1-x])
x=x+1
#################################################################################
# Function Name: fourth_frame_3() #
# Purpose: Displays the third frame of the fourth interval. #
#################################################################################
def fourth_frame_3():
# Print each string in the 'top_full' list to the console.
for line in top_1Q_full:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_3Q_3)
x=0
for line in bottom_3Q_3:
print(bottom_3Q_3[length-1-x])
x=x+1
#################################################################################
# Function Name: finished_frame() #
# Purpose: Displays the last frame of the entire sequence #
#################################################################################
def finished_frame():
# Print each string in the 'top_full' list to the console.
for line in top_finished:
print(line)
# Get the length of the list and use it to print each string in the
# 'bottom_empty' list from the end of the list in reverse order.
length=len(bottom_full)
x=0
for line in bottom_full:
print(bottom_full[length-1-x])
x=x+1
###############################################################################
# Set the speed of the falling sand.
speed=.25
# Get time number of minutes from the user (1-60 are valid).
time_set = input('Enter the number of minutes from 1-60. ')
# Create variables to hold all the necessary time variable.
# 'interval_time' divides the user input time into four segments for use later.
interval_time=int(time_set)/4
now = datetime.now()
start_hour = int(now.strftime("%H"))
start_minute = int(now.strftime("%M"))
start_second = int(now.strftime("%S"))
first_quarter_end_second = int(now.strftime("%S"))
# Set all the interval end times correctly depending on the value the user
# entered. When minutes > 60, roll minutes back to 0 and increment the hour.
if start_minute+interval_time<60:
first_quarter_end_minute=start_minute+interval_time
first_quarter_end_hour=start_hour
else:
first_quarter_end_minute=start_minute+interval_time-60
first_quarter_end_hour=start_hour+1
second_quarter_end_second = int(now.strftime("%S"))
if start_minute+(interval_time*2)<60:
second_quarter_end_minute=start_minute+(interval_time*2)
second_quarter_end_hour=start_hour
else:
second_quarter_end_minute=start_minute+(interval_time*2)-60
second_quarter_end_hour=start_hour+1
third_quarter_end_second = int(now.strftime("%S"))
if start_minute+(interval_time*3)<60:
third_quarter_end_minute=start_minute+(interval_time*3)
third_quarter_end_hour=start_hour
else:
third_quarter_end_minute=start_minute+(interval_time*3)-60
third_quarter_end_hour=start_hour+1
end_second = int(now.strftime("%S"))
if int(time_set)==60:
end_minute=start_minute
end_hour=start_hour+1
else:
end_minute=start_minute+int(time_set)
end_hour=start_hour
if int(time_set)>=1 and int(time_set)<=60:
now_minute = int(now.strftime("%M"))
now_second = int(now.strftime("%S"))
# Run the appropriate three frame sequence while the current system time is
# within the first of the four interval segments.
while now_minute<=first_quarter_end_minute:
clear()
start_frame_1()
sleep(speed)
clear()
start_frame_2()
sleep(speed)
clear()
start_frame_3()
sleep(speed)
now_minute=now_minute+(3*.25)/60
# Run the appropriate three frame sequence while the current system time is
# within the second of the four interval segments.
while now_minute<=second_quarter_end_minute:
clear()
second_frame_1()
sleep(speed)
clear()
second_frame_2()
sleep(speed)
clear()
second_frame_3()
sleep(speed)
now_minute=now_minute+(3*.25)/60
# Run the appropriate three frame sequence while the current system time is
# within the second of the four interval segments.
while now_minute<=third_quarter_end_minute:
clear()
third_frame_1()
sleep(speed)
clear()
third_frame_2()
sleep(speed)
clear()
third_frame_3()
sleep(speed)
now_minute=now_minute+(3*.25)/60
# Run the appropriate three frame sequence while the current system time is
# within the last of the four interval segments.
while now_minute<=third_quarter_end_minute:
clear()
fourth_frame_1()
sleep(speed)
clear()
fourth_frame_2()
sleep(speed)
clear()
fourth_frame_3()
sleep(speed)
now_minute=now_minute+(3*.25)/60
# Displays the final frame.
finished_frame()
# Tell the user input was invalid. Only works for invalid integers.
else:
time_set = input('Invalid entry. Goodbye.')
| 66.893593
| 126
| 0.186163
| 3,180
| 58,465
| 3.312893
| 0.056918
| 0.158519
| 0.228666
| 0.292738
| 0.833982
| 0.798576
| 0.778548
| 0.746369
| 0.713431
| 0.637684
| 0
| 0.010798
| 0.624596
| 58,465
| 874
| 127
| 66.893593
| 0.4692
| 0.174857
| 0
| 0.8
| 0
| 0
| 0.638763
| 0.088926
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021053
| false
| 0
| 0.004511
| 0
| 0.025564
| 0.039098
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
9f5413fa555201a6e81de3a409bfa5b0eb58a00b
| 28,272
|
py
|
Python
|
tests/test_streamingresources.py
|
betcode-org/betfair
|
9126dcec91a1941f03cc58e4fd75818f36de9013
|
[
"MIT"
] | 2
|
2022-03-25T20:33:11.000Z
|
2022-03-29T17:13:58.000Z
|
tests/test_streamingresources.py
|
betcode-org/betfair
|
9126dcec91a1941f03cc58e4fd75818f36de9013
|
[
"MIT"
] | 4
|
2022-03-22T08:27:32.000Z
|
2022-03-28T10:33:35.000Z
|
tests/test_streamingresources.py
|
betcode-org/betfair
|
9126dcec91a1941f03cc58e4fd75818f36de9013
|
[
"MIT"
] | 1
|
2022-03-30T07:10:48.000Z
|
2022-03-30T07:10:48.000Z
|
import unittest
from betfairlightweight.resources.streamingresources import (
CricketMatch,
MarketDefinition,
MarketDefinitionRunner,
Race,
RaceProgress,
RaceChange,
)
from tests.tools import create_mock_json
class TestMarketDefinition(unittest.TestCase):
def setUp(self):
self.mock_response = create_mock_json(
"tests/resources/streaming_market_definition.json"
)
self.market_definition = MarketDefinition(**self.mock_response.json())
def test_init(self):
assert len(self.market_definition.runners) == 7
assert self.market_definition.bsp_market is True
assert self.market_definition.market_base_rate == 5
assert len(self.market_definition.key_line_definitions.key_line) == 2
def test_missing_open_date(self):
response_json = dict(self.mock_response.json())
response_json.pop("openDate")
market_definition = MarketDefinition(**response_json)
assert market_definition.open_date is None
class TestMarketDefinitionRunner(unittest.TestCase):
def setUp(self):
self.mock_response = create_mock_json(
"tests/resources/streaming_market_definition.json"
)
market_definition = self.mock_response.json()
runner = market_definition["runners"][0]
self.market_definition_runner = MarketDefinitionRunner(**runner)
def test_init(self):
assert self.market_definition_runner.selection_id == 11131804
assert self.market_definition_runner.adjustment_factor == 44.323
assert self.market_definition_runner.sort_priority == 1
assert self.market_definition_runner.status == "ACTIVE"
assert self.market_definition_runner.removal_date is None
def test_str(self):
assert str(self.market_definition_runner) == "MarketDefinitionRunner: 11131804"
def test_repr(self):
assert repr(self.market_definition_runner) == "<MarketDefinitionRunner>"
class TestRace(unittest.TestCase):
def setUp(self):
self.mock_response = {
"mid": "1.123",
"id": "1234.56",
"rpc": {"hi": "world"},
"rrc": [{"test": "me"}],
}
self.race = Race(**self.mock_response)
def test_init(self):
assert self.race.market_id == "1.123"
assert self.race.race_id == "1234.56"
self.assertIsInstance(self.race.race_progress, RaceProgress)
self.assertIsInstance(self.race.race_runners[0], RaceChange)
class TestRaceProgress(unittest.TestCase):
def setUp(self):
self.mock_response = create_mock_json("tests/resources/streaming_rcm.json")
self.race_progress = RaceProgress(**self.mock_response.json()["rc"][0]["rpc"])
def test_init(self):
assert self.race_progress.feed_time_epoch == 1518626674
assert self.race_progress.gate_name == "1f"
assert self.race_progress.sectional_time == 10.6
assert self.race_progress.running_time == 46.7
assert self.race_progress.speed == 17.8
assert self.race_progress.progress == 87.5
assert self.race_progress.order == [
7390417,
5600338,
11527189,
6395118,
8706072,
]
class TestRaceChange(unittest.TestCase):
def setUp(self):
self.mock_response = create_mock_json("tests/resources/streaming_rcm.json")
self.race_change = RaceChange(**self.mock_response.json()["rc"][0]["rrc"][0])
def test_init(self):
assert self.race_change.feed_time_epoch == 1518626674
assert self.race_change.selection_id == 7390417
assert self.race_change.lat == 51.4189543
assert self.race_change.long == -0.4058491
assert self.race_change.speed == 17.8
assert self.race_change.progress == 2051
assert self.race_change.stride_frequency == 2.07
class TestCricketMatch0(unittest.TestCase):
def setUp(self):
self.mock_response = create_mock_json("tests/resources/ccms/ccm0.json")
self.cricket_match = CricketMatch(**self.mock_response.json()["cc"][0])
def test_init(self):
assert self.cricket_match.event_id == "30610310"
assert self.cricket_match.market_id == "1.179676557"
assert self.cricket_match.fixture_info.expected_start_time == 1643295600000
assert self.cricket_match.fixture_info.fixture_status == "IN_RUNNING"
assert (
self.cricket_match.fixture_info.event_description
== "Karachi Kings v Multan Sultans, Pakistan Super League Match 1, from National Stadium"
)
assert self.cricket_match.fixture_info.max_overs == 20
assert self.cricket_match.fixture_info.event_status == "BALL_IN_PROGRESS"
assert self.cricket_match.home_team is None
assert self.cricket_match.away_team is None
assert self.cricket_match.match_stats.current_innings == 1
assert self.cricket_match.match_stats.innings_stats[0].innings_num == 1
assert (
self.cricket_match.match_stats.innings_stats[0].batting_team
== "Karachi Kings"
)
assert (
self.cricket_match.match_stats.innings_stats[0].bowling_team
== "Multan Sultans"
)
assert self.cricket_match.match_stats.innings_stats[0].innings_runs == 80
assert self.cricket_match.match_stats.innings_stats[0].innings_overs == "12.5"
assert self.cricket_match.match_stats.innings_stats[0].innings_wickets == 2
assert (
self.cricket_match.match_stats.batting_team_stats.team_name
== "Karachi Kings"
)
assert self.cricket_match.match_stats.batting_team_stats.bat_1_name is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_runs == 4
assert self.cricket_match.match_stats.batting_team_stats.bat_1_balls is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_fours is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_sixes is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_strike == 0
assert self.cricket_match.match_stats.batting_team_stats.bat_2_name is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_runs == 8
assert self.cricket_match.match_stats.batting_team_stats.bat_2_balls is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_fours is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_sixes is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_strike == 1
assert (
self.cricket_match.match_stats.bowling_team_stats.team_name
== "Multan Sultans"
)
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_name is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_overs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_runs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_maidens is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_wickets is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_name is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_overs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_runs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_maidens is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_wickets is None
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].incident_type
== "STRIKE"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].qualifier_type
== "RUNS"
)
assert self.cricket_match.incident_list_wrapper.incident_list[0].value == "1"
assert self.cricket_match.incident_list_wrapper.incident_list[0].overs == "12.5"
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].actual_time
== 1643299013861
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].incident_type
== "STRIKE"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].qualifier_type
== "RUNS"
)
assert self.cricket_match.incident_list_wrapper.incident_list[1].value == "1"
assert self.cricket_match.incident_list_wrapper.incident_list[1].overs == "12.4"
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].actual_time
== 1643298990238
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].incident_type
== "WIDE"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].qualifier_type
== "RUNS"
)
assert self.cricket_match.incident_list_wrapper.incident_list[2].value == "1"
assert self.cricket_match.incident_list_wrapper.incident_list[2].overs == "12.3"
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].actual_time
== 1643298961522
)
class TestCricketMatch1(unittest.TestCase):
def setUp(self):
self.mock_response = create_mock_json("tests/resources/ccms/ccm1.json")
self.cricket_match = CricketMatch(**self.mock_response.json()["cc"][0])
def test_init(self):
assert self.cricket_match.event_id == "30610359"
assert self.cricket_match.market_id == "1.179676556"
assert self.cricket_match.fixture_info.expected_start_time == 1643293800000
assert self.cricket_match.fixture_info.fixture_status == "IN_RUNNING"
assert (
self.cricket_match.fixture_info.event_description
== "World Giants v India Maharajas, Legends Cricket League Match 6, from Al Amerat Cricket Ground Oman Cricket (Ministry Turf 1)"
)
assert self.cricket_match.fixture_info.max_overs == 20
assert self.cricket_match.fixture_info.event_status == "BALL_IN_PROGRESS"
assert self.cricket_match.home_team is None
assert self.cricket_match.away_team is None
assert self.cricket_match.match_stats.current_innings == 1
assert self.cricket_match.match_stats.innings_stats[0].innings_num == 1
assert (
self.cricket_match.match_stats.innings_stats[0].batting_team
== "World Giants"
)
assert (
self.cricket_match.match_stats.innings_stats[0].bowling_team
== "India Maharajas"
)
assert self.cricket_match.match_stats.innings_stats[0].innings_runs == 186
assert self.cricket_match.match_stats.innings_stats[0].innings_overs == "15.5"
assert self.cricket_match.match_stats.innings_stats[0].innings_wickets == 3
assert (
self.cricket_match.match_stats.batting_team_stats.team_name
== "World Giants"
)
assert self.cricket_match.match_stats.batting_team_stats.bat_1_name is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_runs == 83
assert self.cricket_match.match_stats.batting_team_stats.bat_1_balls is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_fours is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_sixes is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_strike == 0
assert self.cricket_match.match_stats.batting_team_stats.bat_2_name is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_runs == 0
assert self.cricket_match.match_stats.batting_team_stats.bat_2_balls is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_fours is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_sixes is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_strike == 1
assert (
self.cricket_match.match_stats.bowling_team_stats.team_name
== "India Maharajas"
)
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_name is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_overs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_runs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_maidens is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_wickets is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_name is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_overs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_runs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_maidens is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_wickets is None
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].incident_type
== "OUT"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].qualifier_type
is None
)
assert self.cricket_match.incident_list_wrapper.incident_list[0].value is None
assert self.cricket_match.incident_list_wrapper.incident_list[0].overs == "15.5"
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].actual_time
== 1643298137862
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].incident_type
== "STRIKE"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].qualifier_type
== "RUNS"
)
assert self.cricket_match.incident_list_wrapper.incident_list[1].value == "0"
assert self.cricket_match.incident_list_wrapper.incident_list[1].overs == "15.4"
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].actual_time
== 1643298112170
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].incident_type
== "STRIKE"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].qualifier_type
== "RUNS"
)
assert self.cricket_match.incident_list_wrapper.incident_list[2].value == "1"
assert self.cricket_match.incident_list_wrapper.incident_list[2].overs == "15.3"
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].actual_time
== 1643298085471
)
class TestCricketMatch2(unittest.TestCase):
def setUp(self):
self.mock_response = create_mock_json("tests/resources/ccms/ccm2.json")
self.cricket_match = CricketMatch(**self.mock_response.json()["cc"][0])
def test_init(self):
assert self.cricket_match.event_id == "30610280"
assert self.cricket_match.market_id == "1.179668557"
assert self.cricket_match.fixture_info.expected_start_time == 1643359200000
assert self.cricket_match.fixture_info.fixture_status == "IN_RUNNING"
assert (
self.cricket_match.fixture_info.event_description
== "TRAINING: Perth Scorchers v Sydney Sixers, Big Bash League Final, from Docklands Stadium"
)
assert self.cricket_match.fixture_info.max_overs == 20
assert self.cricket_match.fixture_info.event_status == "MATCH_STABLE"
assert self.cricket_match.home_team is None
assert self.cricket_match.away_team is None
assert self.cricket_match.match_stats.current_innings == 2
assert self.cricket_match.match_stats.innings_stats[0].innings_num == 2
assert (
self.cricket_match.match_stats.innings_stats[0].batting_team
== "Sydney Sixers"
)
assert (
self.cricket_match.match_stats.innings_stats[0].bowling_team
== "Perth Scorchers"
)
assert self.cricket_match.match_stats.innings_stats[0].innings_runs == 6
assert self.cricket_match.match_stats.innings_stats[0].innings_overs == "1.4"
assert self.cricket_match.match_stats.innings_stats[0].innings_wickets == 1
assert self.cricket_match.match_stats.innings_stats[1].innings_num == 1
assert (
self.cricket_match.match_stats.innings_stats[1].batting_team
== "Perth Scorchers"
)
assert (
self.cricket_match.match_stats.innings_stats[1].bowling_team
== "Sydney Sixers"
)
assert self.cricket_match.match_stats.innings_stats[1].innings_runs == 171
assert self.cricket_match.match_stats.innings_stats[1].innings_overs == "20"
assert self.cricket_match.match_stats.innings_stats[1].innings_wickets == 6
assert (
self.cricket_match.match_stats.batting_team_stats.team_name
== "Sydney Sixers"
)
assert self.cricket_match.match_stats.batting_team_stats.bat_1_name is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_runs == 0
assert self.cricket_match.match_stats.batting_team_stats.bat_1_balls is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_fours is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_sixes is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_strike == 1
assert self.cricket_match.match_stats.batting_team_stats.bat_2_name is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_runs == 3
assert self.cricket_match.match_stats.batting_team_stats.bat_2_balls is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_fours is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_sixes is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_strike == 0
assert (
self.cricket_match.match_stats.bowling_team_stats.team_name
== "Perth Scorchers"
)
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_name is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_overs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_runs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_maidens is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_wickets is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_name is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_overs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_runs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_maidens is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_wickets is None
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].participant_ref
== "Daniel Hughes"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].incident_type
== "STRIKE"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].qualifier_type
== "RUNS"
)
assert self.cricket_match.incident_list_wrapper.incident_list[0].value == "0"
assert self.cricket_match.incident_list_wrapper.incident_list[0].innings == 2
assert self.cricket_match.incident_list_wrapper.incident_list[0].overs == "1.4"
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].actual_time
== 1643366603847
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].incident_type
== "OUT"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].qualifier_type
is None
)
assert self.cricket_match.incident_list_wrapper.incident_list[1].value is None
assert self.cricket_match.incident_list_wrapper.incident_list[1].overs == "1.3"
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].actual_time
== 1643366467184
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].incident_type
== "STRIKE"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].qualifier_type
== "RUNS"
)
assert self.cricket_match.incident_list_wrapper.incident_list[2].value == "1"
assert self.cricket_match.incident_list_wrapper.incident_list[2].overs == "1.2"
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].actual_time
== 1643366432553
)
class TestCricketMatch3(unittest.TestCase):
def setUp(self):
self.mock_response = create_mock_json("tests/resources/ccms/ccm3.json")
self.cricket_match = CricketMatch(**self.mock_response.json()["cc"][0])
def test_init(self):
assert self.cricket_match.event_id == "30610280"
assert self.cricket_match.market_id == "1.179668557"
assert self.cricket_match.fixture_info.expected_start_time == 1643359200000
assert self.cricket_match.fixture_info.fixture_status == "IN_RUNNING"
assert (
self.cricket_match.fixture_info.event_description
== "TRAINING: Perth Scorchers v Sydney Sixers, Big Bash League Final, from Docklands Stadium"
)
assert self.cricket_match.fixture_info.max_overs == 20
assert self.cricket_match.fixture_info.event_status == "BALL_IN_PROGRESS"
assert self.cricket_match.home_team is None
assert self.cricket_match.away_team is None
assert self.cricket_match.match_stats.current_innings == 1
assert self.cricket_match.match_stats.innings_stats[0].innings_num == 1
assert (
self.cricket_match.match_stats.innings_stats[0].batting_team
== "Perth Scorchers"
)
assert (
self.cricket_match.match_stats.innings_stats[0].bowling_team
== "Sydney Sixers"
)
assert self.cricket_match.match_stats.innings_stats[0].innings_runs == 169
assert self.cricket_match.match_stats.innings_stats[0].innings_overs == "19.5"
assert self.cricket_match.match_stats.innings_stats[0].innings_wickets == 6
assert (
self.cricket_match.match_stats.batting_team_stats.team_name
== "Perth Scorchers"
)
assert self.cricket_match.match_stats.batting_team_stats.bat_1_name is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_runs == 74
assert self.cricket_match.match_stats.batting_team_stats.bat_1_balls is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_fours is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_sixes is None
assert self.cricket_match.match_stats.batting_team_stats.bat_1_strike == 1
assert self.cricket_match.match_stats.batting_team_stats.bat_2_name is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_runs == 1
assert self.cricket_match.match_stats.batting_team_stats.bat_2_balls is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_fours is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_sixes is None
assert self.cricket_match.match_stats.batting_team_stats.bat_2_strike == 0
assert (
self.cricket_match.match_stats.bowling_team_stats.team_name
== "Sydney Sixers"
)
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_name is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_overs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_runs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_maidens is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_1_wickets is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_name is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_overs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_runs is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_maidens is None
assert self.cricket_match.match_stats.bowling_team_stats.bowl_2_wickets is None
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].incident_type
== "NO_BALL"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].qualifier_type
== "RUNS"
)
assert self.cricket_match.incident_list_wrapper.incident_list[0].value == "2"
assert self.cricket_match.incident_list_wrapper.incident_list[0].overs == "19.5"
assert (
self.cricket_match.incident_list_wrapper.incident_list[0].actual_time
== 1643365137339
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].incident_type
== "WIDE"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].qualifier_type
== "RUNS"
)
assert self.cricket_match.incident_list_wrapper.incident_list[1].value == "1"
assert self.cricket_match.incident_list_wrapper.incident_list[1].overs == "19.4"
assert (
self.cricket_match.incident_list_wrapper.incident_list[1].actual_time
== 1643365102771
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].participant_ref
is None
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].incident_type
== "STRIKE"
)
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].qualifier_type
== "RUNS"
)
assert self.cricket_match.incident_list_wrapper.incident_list[2].value == "1"
assert self.cricket_match.incident_list_wrapper.incident_list[2].overs == "19.3"
assert (
self.cricket_match.incident_list_wrapper.incident_list[2].actual_time
== 1643365029126
)
| 48
| 141
| 0.69514
| 3,636
| 28,272
| 5.053905
| 0.064081
| 0.142577
| 0.21158
| 0.286134
| 0.8947
| 0.871191
| 0.864171
| 0.851001
| 0.843165
| 0.837669
| 0
| 0.034091
| 0.223932
| 28,272
| 588
| 142
| 48.081633
| 0.803427
| 0
| 0
| 0.605027
| 0
| 0.001795
| 0.050403
| 0.011708
| 0
| 0
| 0
| 0
| 0.482944
| 1
| 0.037702
| false
| 0
| 0.005386
| 0
| 0.059246
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9f7a240ba32ec05774b2b1d7ff19b73a2fc8799d
| 732
|
py
|
Python
|
hw2/q8_run_ablation.py
|
hsilva664/Reinforcement_Learning_Course
|
6a250bc017965bec76b6fe909068e40127e62fa7
|
[
"MIT"
] | null | null | null |
hw2/q8_run_ablation.py
|
hsilva664/Reinforcement_Learning_Course
|
6a250bc017965bec76b6fe909068e40127e62fa7
|
[
"MIT"
] | null | null | null |
hw2/q8_run_ablation.py
|
hsilva664/Reinforcement_Learning_Course
|
6a250bc017965bec76b6fe909068e40127e62fa7
|
[
"MIT"
] | null | null | null |
import os
lr = 0.02
bs = 50000
os.system("cd code && python3 train_pg_f18.py HalfCheetah-v2 -ep 150 --discount 0.95 -n 100 -e 3 -l 2 -s 32 -b %d -lr %f --exp_name hc_b%d_r%f"%(bs,lr,bs,lr))
os.system("cd code && python3 train_pg_f18.py HalfCheetah-v2 -ep 150 --discount 0.95 -n 100 -e 3 -l 2 -s 32 -b %d -lr %f -rtg --exp_name hc_rtg_b%d_r%f"%(bs,lr,bs,lr))
os.system("cd code && python3 train_pg_f18.py HalfCheetah-v2 -ep 150 --discount 0.95 -n 100 -e 3 -l 2 -s 32 -b %d -lr %f --nn_baseline --exp_name hc_baseline_b%d_r%f"%(bs,lr,bs,lr))
os.system("cd code && python3 train_pg_f18.py HalfCheetah-v2 -ep 150 --discount 0.95 -n 100 -e 3 -l 2 -s 32 -b %d -lr %f -rtg --nn_baseline --exp_name hc_rtg_baseline_b%d_r%f"%(bs,lr,bs,lr))
| 81.333333
| 190
| 0.669399
| 173
| 732
| 2.682081
| 0.225434
| 0.034483
| 0.086207
| 0.12069
| 0.905172
| 0.823276
| 0.823276
| 0.823276
| 0.823276
| 0.762931
| 0
| 0.12199
| 0.148907
| 732
| 9
| 190
| 81.333333
| 0.622793
| 0
| 0
| 0
| 0
| 0.571429
| 0.802183
| 0.031378
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9f8edceb90c6f099d2af3601e5e60ea6328f155b
| 9,869
|
py
|
Python
|
appendix_figS1.py
|
mcolacino57/elevators
|
f4408496a53877d6b57e8e65338c537d0fb16b47
|
[
"MIT"
] | 1
|
2020-11-24T20:51:43.000Z
|
2020-11-24T20:51:43.000Z
|
appendix_figS1.py
|
mcolacino57/elevators
|
f4408496a53877d6b57e8e65338c537d0fb16b47
|
[
"MIT"
] | 1
|
2021-05-07T17:55:56.000Z
|
2021-05-14T14:59:10.000Z
|
appendix_figS1.py
|
mcolacino57/elevators
|
f4408496a53877d6b57e8e65338c537d0fb16b47
|
[
"MIT"
] | 1
|
2021-05-07T18:04:37.000Z
|
2021-05-07T18:04:37.000Z
|
"""
This python code generates figure S1 (appendix) in the paper.
"""
from matplotlib import pyplot as plt
import pandas as pd
import numpy as np
import random
import math
from collections import Counter
import itertools
"Small Sized building configuration"
# building information: floor 1-7, pax destination 2-6
numFloor = 6 # total number of pax destinations
numPax = 400 # total number of pax during rush hour
timeInterval = 10 # we update the system every 10 seconds
# info for pax arrivals
# number of simulations
numFile = 100
# floor ranges of each queue if we are queue splitting, default []
queueDest = []
#can be anything from 0 to 100- usually we do 0,20,40,60,80,100
WtW = 0
# create arrival files, will be stored in the same folder we run the code
# if we want different random simulations, uncomment the lines below
"""
createOtherArrivalFiles(numPax,numFloor,WtW,numFile)
fileName = str(numPax)+"_"+str(numFloor)+"_"+str(WtW)+"_"
"""
# name of the files used for the simulations,
# these files will yield exactly the figures in the paper
# if we want new random files, comment the line below
# get current working directory
parent_dir = str(os.getcwd())
# get csv files in subdirectory for this setup
all_files = glob.glob(parent_dir+'/data/400pax_small/*')
# put together all pax configurations
paxInfo = [all_files,numFile,queueDest,WtW]
# elevator info
# total number of elevators
elevNumTotal = 2
# speed of elevator to traverse one floor
elevSpeed = 1.4
# multiplier to account for intermediate pax entering and leaving
elevSpeedMultiplier = 1.3
# time to board an elevator, depends on num of pax entering the elev
# if there is one pax, they take 15s to board + 2s for every additional pax
elevBoardTime = [15, 17, 19, 21]
# time to deboard an elevator, depends on num of pax exiting the elev
# if there is one pax, they take 15s to deboard + 2s for every additional pax
elevStopTime = [15, 17, 19, 21]
# the range of floors the elev can serve, right now all elevs can serve all floors
elevServiceRange = dict.fromkeys(range(elevNumTotal),list(range(2,numFloor+2,1)))
# capacity of the elevators
elevCap = 2
# put together all elev configurations
elevInfo = [numFloor, elevNumTotal,elevSpeed,elevSpeedMultiplier, elevBoardTime, elevStopTime, elevServiceRange, elevCap]
"Compute results of FCFS"
intervention = "FCFS"
(_,_,timequeue,_,_,_,_,_) = run_InterventionOnMultipleFiles(paxInfo, elevInfo, intervention)
timequeue_FCFS_small = timequeue
"Compute results of Cohorting"
intervention = "CohortFCFS"
# run the intervention
(_,_,timequeue,_,_,_,_,_) = run_InterventionOnMultipleFiles(paxInfo, elevInfo, intervention)
timequeue_CohortFCFS_small = timequeue
"Compute results of 2 Queue Split"
intervention = "FCFSQueueSplit"
# set the floor ranges for each queue, here it is 2 queue split
queueDest = [list(range(2,5,1)),list(range(5,8,1))]
# update paxInfo
paxInfo[2] = queueDest
# run the intervention
(_,_,timequeue,_,_,_,_,_) = run_InterventionOnMultipleFiles(paxInfo, elevInfo, intervention)
timequeue_FCFSQueueSpliteven_2Q_small = timequeue
"Compute results of 3 Queue Split"
intervention = "FCFSQueueSplit"
# set the floor ranges for each queue, here it is 2 queue split
queueDest = [list(range(2,4,1)),list(range(4,6,1)),list(range(6,8,1))]
# update paxInfo
paxInfo[2] = queueDest
# run the intervention
(_,_,timequeue,_,_,_,_,_) = run_InterventionOnMultipleFiles(paxInfo, elevInfo, intervention)
timequeue_FCFSQueueSpliteven_3Q_small = timequeue
"""
"Medium Sized building configuration
"""
# building information: floor 1-16, pax destination 2-16
numFloor = 15 # total number of pax destinations
numPax = 1500 # total number of pax during rush hour
timeInterval = 10 # we update the system every 10 seconds
# info for pax arrivals
# number of simulations
numFile = 100
# floor ranges of each queue if we are queue splitting, default []
queueDest = []
#can be anything from 0 to 100- usually we do 0,20,40,60,80,100
WtW = 0
# create arrival files, will be stored in the same folder we run the code
# if we want different random simulations, uncomment the lines below
"""
createOtherArrivalFiles(numPax,numFloor,WtW,numFile)
fileName = str(numPax)+"_"+str(numFloor)+"_"+str(WtW)+"_"
"""
# name of the files used for the simulations,
# these files will yield exactly the figures in the paper
# if we want new random files, comment the line below
# get current working directory
parent_dir = str(os.getcwd())
# get csv files in subdirectory for this setup
all_files = glob.glob(parent_dir+'/data/1500pax_medium/*')
# put together all pax configurations
paxInfo = [all_files,numFile,queueDest,WtW]
# elevator info
# total number of elevators
elevNumTotal = 6
# speed of elevator to traverse one floor
elevSpeed = 1.4
# multiplier to account for intermediate pax entering and leaving
elevSpeedMultiplier = 1.3
# time to board an elevator, depends on num of pax entering the elev
# if there is one pax, they take 15s to board + 2s for every additional pax
elevBoardTime = [15, 17, 19, 21]
# time to deboard an elevator, depends on num of pax exiting the elev
# if there is one pax, they take 15s to deboard + 2s for every additional pax
elevStopTime = [15, 17, 19, 21]
# the range of floors the elev can serve, right now all elevs can serve all floors
elevServiceRange = dict.fromkeys(range(elevNumTotal),list(range(2,numFloor+2,1)))
# capacity of the elevators
elevCap = 4
# put together all elev configurations
elevInfo = [numFloor, elevNumTotal,elevSpeed,elevSpeedMultiplier, elevBoardTime, elevStopTime, elevServiceRange, elevCap]
"Compute results of FCFS"
intervention = "FCFS"
# run the intervention
(_,_,timequeue,_,_,_,_,_) = run_InterventionOnMultipleFiles(paxInfo, elevInfo, intervention)
timequeue_FCFS_medium = timequeue
"Compute results of Cohorting"
intervention = "CohortFCFS"
# run the intervention
(_,_,timequeue,_,_,_,_,_) = run_InterventionOnMultipleFiles(paxInfo, elevInfo, intervention)
timequeue_CohortFCFS_medium = timequeue
"Compute results of 2 Queue Split"
intervention = "FCFSQueueSplit"
# set the floor ranges for each queue, here it is 2 queue split
queueDest = [list(range(2,10,1)),list(range(10,17,1))]
# update paxInfo
paxInfo[2] = queueDest
# run the intervention
(_,_,timequeue,_,_,_,_,_) = run_InterventionOnMultipleFiles(paxInfo, elevInfo, intervention)
timequeue_FCFSQueueSpliteven_2Q_medium = timequeue
"Compute results of 3 Queue Split"
intervention = "FCFSQueueSplit"
# set the floor ranges for each queue, here it is 2 queue split
queueDest = [list(range(2,7,1)),list(range(7,12,1)),list(range(12,17,1))]
# update paxInfo
paxInfo[2] = queueDest
# run the intervention
(_,_,timequeue,_,_,_,_,_) = run_InterventionOnMultipleFiles(paxInfo, elevInfo, intervention)
timequeue_FCFSQueueSpliteven_3Q_medium = timequeue
"Compute results of 4 Queue Split"
intervention = "FCFSQueueSplit"
# set the floor ranges for each queue, here it is 2 queue split
queueDest = [list(range(2,6,1)),list(range(6,10,1)),list(range(10,14,1)),list(range(14,17,1))]
# update paxInfo
paxInfo[2] = queueDest
# run the intervention
(_,_,timequeue,_,_,_,_,_) = run_InterventionOnMultipleFiles(paxInfo, elevInfo, intervention)
timequeue_FCFSQueueSpliteven_4Q_medium = timequeue
"Plots comparing interventions- creates a 1 row 2 column plot"
# queue length in the lobby vs time
fig = plt.figure(figsize=(15, 5))
ax = plt.subplot(1,2,1)
timeInterval=10 # we update the system every 10 seconds
d = list(timequeue_FCFS_small)
plt.plot(list(range(0,timeInterval*len(d), timeInterval)), d,color='black',label='Default FCFS')
d = list(timequeue_CohortFCFS_small)
plt.plot(list(range(0,timeInterval*len(d), timeInterval)), d,color='red',label ='Cohorting')
d = list(timequeue_FCFSQueueSpliteven_2Q_small)
plt.plot(list(range(0,timeInterval*len(d), timeInterval)), d,color='blue',label='2 Queue Split')
d = list(timequeue_FCFSQueueSpliteven_3Q_small)
plt.plot(list(range(0,timeInterval*len(d), timeInterval)), d,color='cyan',label='3 Queue Split')
plt.xticks([0,1800,3600,5400,7100],['8:00 AM', '8:30 AM','9:00 AM','9:30 AM','10:00 AM'])
plt.xlim(-50,7101)
plt.xlabel('Time')
plt.ylabel('Queue length in the lobby')
ax.text(-0.1, 1.1,'A', transform=ax.transAxes,
size=20, weight='bold')
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
plt.legend(loc='upper left', bbox_to_anchor=(0,1),fontsize='x-large',shadow=True, fancybox=True)
# queue length in the lobby vs time
ax = plt.subplot(1,2,2)
timeInterval=10 # we update the system every 10 seconds
d = list(timequeue_FCFS_medium)
plt.plot(list(range(0,timeInterval*len(d), timeInterval)), d,color='black',label='Default FCFS')
d = list(timequeue_CohortFCFS_medium)
plt.plot(list(range(0,timeInterval*len(d), timeInterval)), d,color='red',label ='Cohorting')
d = list(timequeue_FCFSQueueSpliteven_2Q_medium)
plt.plot(list(range(0,timeInterval*len(d), timeInterval)), d,color='blue',label='2 Queue Split')
d = list(timequeue_FCFSQueueSpliteven_3Q_medium)
plt.plot(list(range(0,timeInterval*len(d), timeInterval)), d,color='cyan',label='3 Queue Split')
d = list(timequeue_FCFSQueueSpliteven_4Q_medium)
plt.plot(list(range(0,timeInterval*len(d), timeInterval)), d,color='teal',label='4 Queue Split')
plt.xticks([0,1800,3600,5400,7100],['8:00 AM', '8:30 AM','9:00 AM','9:30 AM','10:00 AM'])
plt.xlim(-50,7101)
plt.xlabel('Time')
plt.ylabel('Queue length in the lobby')
ax.text(-0.1, 1.1,'B', transform=ax.transAxes,
size=20, weight='bold')
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
plt.legend(loc='upper left', bbox_to_anchor=(0,1),fontsize='x-large',shadow=True, fancybox=True)
| 39.318725
| 121
| 0.756814
| 1,464
| 9,869
| 5.002049
| 0.182377
| 0.030725
| 0.019664
| 0.056534
| 0.937048
| 0.917384
| 0.903728
| 0.882425
| 0.871091
| 0.871091
| 0
| 0.040804
| 0.128382
| 9,869
| 250
| 122
| 39.476
| 0.810509
| 0.343196
| 0
| 0.634328
| 1
| 0
| 0.135524
| 0.003597
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052239
| 0
| 0.052239
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ccb9c7d1e93356b26450a4bb1568ac1f4b6a56b
| 25,670
|
py
|
Python
|
tests/test_entity_extraction.py
|
NHSDigital/medicines-text-mining-tool
|
bea1efddd832b614fdb00ae8df5f8dc206ba81b0
|
[
"MIT"
] | 1
|
2022-03-31T13:04:05.000Z
|
2022-03-31T13:04:05.000Z
|
tests/test_entity_extraction.py
|
NHSDigital/medicines-text-mining-tool
|
bea1efddd832b614fdb00ae8df5f8dc206ba81b0
|
[
"MIT"
] | null | null | null |
tests/test_entity_extraction.py
|
NHSDigital/medicines-text-mining-tool
|
bea1efddd832b614fdb00ae8df5f8dc206ba81b0
|
[
"MIT"
] | null | null | null |
# Databricks notebook source
# MAGIC %run ../notebooks/_modules/epma_global/functions
# COMMAND ----------
# MAGIC %run ../notebooks/2_entity_extraction/functions/entity_extraction_functions
# COMMAND ----------
# MAGIC %run ../notebooks/_modules/epma_global/function_test_suite
# COMMAND ----------
import warnings
from pyspark.sql.types import StringType, IntegerType, BooleanType, StructType, StructField, Row
# COMMAND ----------
dose_form_list_bc = sc.broadcast(['cream'])
# COMMAND ----------
suite = FunctionTestSuite()
# COMMAND ----------
@suite.add_test
def test_check_for_dose_form():
test_cases = {
"powder": True,
"cReam": True,
"4mg hydrocortisone": False,
"50ml ampoule of ketamine": False
}
dose_form_desc = ["powder", "balm", "Cream"]
for test_case, expected in test_cases.items():
assert check_for_dose_form(test_case, dose_form_desc) == expected
# COMMAND ----------
@suite.add_test
def test_extract_strength_unit():
test_cases = {
'Furosemide 240 mg in 50ml sodium chloride': FormattedStrengthUnitValues(SVN='240', SVU='mg', SVD='50', SDU='ml'), # known limitation
'Furosemide 240 mg in 50ml cream': FormattedStrengthUnitValues(SVN='240', SVU='mg', SVN_2='50', SVU_2='ml'),
'null': FormattedStrengthUnitValues(),
'linezolid 600 mg in 300ml (2mg in 1ml) cream': FormattedStrengthUnitValues(SVN='600', SVU='mg', SVN_2='300', SVU_2='ml', SVN_3='2', SVU_3='mg'),
'dexamethasone/framycetin/gramicidin 12.5%-1%-0.005%': FormattedStrengthUnitValues(SVN='12.5', SVU='%', SVN_2='1', SVU_2='%', SVN_3='0.005', SVU_3='%')
}
dose_form_desc = ["powder", "powder", "balm", "Cream"]
for test_case, expected in test_cases.items():
assert extract_strength_unit(test_case, dose_form_desc) == expected
# COMMAND ----------
@suite.add_test
def test_find_all_strength_unit_pairs():
test_cases = {
'dexamethasone/framycetin/gramicidin 12.5%-1%-0.005%': FormattedStrengthUnitValues(SVN='12.5', SVU='%', SVN_2='1', SVU_2='%', SVN_3='0.005', SVU_3='%'),
'betamethasone-calcipotriol 0.05%-0.005% topical foam': FormattedStrengthUnitValues(SVN='0.05', SVU='%', SVN_2='0.005', SVU_2='%'),
'dexamethasone/framycetin 0.1% eye': FormattedStrengthUnitValues(SVN='0.1', SVU='%'),
'null': FormattedStrengthUnitValues(),
'50ml-1g': FormattedStrengthUnitValues(SVN='50', SVU='ml')
}
for test_case, expected in test_cases.items():
assert find_all_strength_unit_pairs(test_case) == expected
# COMMAND ----------
@suite.add_test
def test_find_all_strength_unit_pairs_bad_labels():
with warnings.catch_warnings(record=True) as w:
result = find_all_strength_unit_pairs(['folic acid 2.5mg', ' in ', '5ml (100micrograms', ' in ', '0.2ml) syrup'])
assert len(w) == 1
assert issubclass(w[-1].category, RuntimeWarning)
assert result == FormattedStrengthUnitValues()
# COMMAND ----------
@suite.add_test
def test_find_strength_unit_pairs_regex():
test_cases = {
"dexamethasone/framycetin/gramicidin 12.5%-1%-0.005%": [(' 12.5', '.5', '%'), ('-1', '-1', '%'), ('-0.005', '-0.005', '%')],
"paracetamol 5mg in cream 7 ml": [(' 5', ' 5', 'mg'), (' 7', ' 7', 'ml')],
"54 ml": [('54', '54', 'ml')],
"54": [('54', '54', '')],
}
for test_case, expected in test_cases.items():
assert find_strength_unit_pairs_regex(test_case) == expected
# COMMAND ----------
@suite.add_test
def test_split_denominators():
test_cases = {
"dexamethasone 1mg +framycetin 0.1 %": ['dexamethasone 1mg ', 'framycetin 0.1 %'], # limitation
"as/as 5mg/2mg": ['as', '/', 'as 5mg', '/', '2mg'],
"paracetamol 5mg in cream 7 ml": ['paracetamol 5mg in cream 7 ml'],
"paracetamol 5mg in radasvoil 7 mg": ['paracetamol 5mg', ' in ', 'radasvoil 7 mg'],
"null": ['null'],
"Paracetamol 5ml O/D-3rd": ['Paracetamol 5ml O', '/', 'D-']
}
dose_form_desc = ["powder", "powder", "balm", "cream"]
for test_case, expected in test_cases.items():
assert split_denominators(test_case, dose_form_desc) == expected
# COMMAND ----------
@suite.add_test
def test_remove_special_characters():
test_cases = {
"my( test) ": "my test",
"5mg --promethazine": "5mg promethazine",
"paracetamol": "paracetamol",
}
for test_case, expected in test_cases.items():
assert remove_special_characters(test_case) == expected
# COMMAND ----------
@suite.add_test
def test_three_moieties_separated_by_slashes():
assert three_moieties_separated_by_slashes("dexamethasone/framycetin/gramicidin 0.05%-0.5%-0.005%")
assert three_moieties_separated_by_slashes("dexamethasone/framycetin/gramicidin")
assert not three_moieties_separated_by_slashes("dexamethasone/framycetin /gramicidin 0.05%-0.5%-0.005%")
assert not three_moieties_separated_by_slashes("dexamethasone/framycetin 0.05%-0.5%")
assert not three_moieties_separated_by_slashes("")
# COMMAND ----------
@suite.add_test
def test_two_moieties_separated_by_slashes():
assert two_moieties_separated_by_slashes("dexamethasone/framycetin/gramicidin 0.05%-0.5%-0.005%")
assert two_moieties_separated_by_slashes("dexamethasone/framycetin 0.05%-0.5%")
assert two_moieties_separated_by_slashes("dexamethasone/framycetin")
assert not two_moieties_separated_by_slashes("dexamethasone")
assert not two_moieties_separated_by_slashes("")
# COMMAND ----------
@suite.add_test
def test_moiety_separated_by_dash():
assert moiety_separated_by_dash("dexamethasone-framycetin-gramicidin 0.05%-0.5%-0.005%")
assert moiety_separated_by_dash("dexamethasone-framycetin 0.05%-0.5%")
assert not moiety_separated_by_dash("dexamethasone framycetin 0.05%-0.5%")
assert not moiety_separated_by_dash("dexamethasone/framycetin 0.05%-0.5%")
assert not moiety_separated_by_dash("")
# COMMAND ----------
@suite.add_test
def test_join_on_columns_and_filter_max_moieties():
schema_df_input = StructType([StructField("id", StringType(), False),
StructField("epma_description", StringType(), False)])
df_input = spark.createDataFrame([
Row('1', 'Trelegy Ellipta 92 mcg-55 mcg-22 mcg/inh inhalation powder'),
Row('2', 'Vitamin svrthv78 oil')
], schema=schema_df_input)
schema_df_refdata = StructType([
StructField("MOIETY", StringType(), True),
StructField("MOIETY_2", StringType(), True),
StructField("MOIETY_3", StringType(), True),
StructField("MOIETY_4", StringType(), True),
StructField("MOIETY_5", StringType(), True),
])
df_refdata = spark.createDataFrame([
Row("Trelegy", None, None, None, None),
Row("trelegy", "ellipta", None, None, None),
Row("Paracetamol", None, None, None, None),
Row("Vitamin", None, None, None, None),
Row("vitamin", None, None, None, None),
Row("Trelegy", "Paracetamol", None, None, None),
], schema=schema_df_refdata)
schema_df_expected = StructType([
StructField("id", StringType(), False),
StructField("epma_description", StringType(), False),
StructField("MOIETY", StringType(), True),
StructField("MOIETY_2", StringType(), True),
StructField("MOIETY_3", StringType(), True),
StructField("MOIETY_4", StringType(), True),
StructField("MOIETY_5", StringType(), True)
])
df_expected = spark.createDataFrame([
('1', 'Trelegy Ellipta 92 mcg-55 mcg-22 mcg/inh inhalation powder', "trelegy", "ellipta", None, None, None),
('2', 'Vitamin svrthv78 oil', "Vitamin", None, None, None, None),
('2', 'Vitamin svrthv78 oil', "vitamin", None, None, None, None),
], schema=schema_df_expected)
df_actual = join_on_columns_and_filter_max_moieties(
df_input,
moiety_join_cols=['MOIETY', 'MOIETY_2', 'MOIETY_3', 'MOIETY_4', 'MOIETY_5'],
df_refdata=df_refdata,
id_col='id',
text_col='epma_description'
)
assert compare_results(df_actual, df_expected, join_columns=['epma_description', 'MOIETY'])
# COMMAND ----------
@suite.add_test
def test_partial_entity_match():
schema_df_input = StructType([
StructField("epma_id", StringType(), False),
StructField("original_epma_description", StringType(), False),
StructField("form_in_text", StringType(), False),
StructField("epma_description", StringType(), False)
])
df_input = spark.createDataFrame([
Row('123', 'Trelegy Ellipta 92 mcg inhalation powder', ' ', 'Trelegy Ellipta 92 mcg inhalation powder'),
Row('456', 'Vitamin 78 mg oil', ' ', 'Vitamin 78 mg oil'),
Row('467', 'not matching', ' ', 'not matching'),
Row('724', 'not matching again', ' ', 'not matching again')
], schema=schema_df_input)
schema_df_refdata_vpid = StructType([
StructField("text_col", StringType(), True),
StructField("MOIETY", StringType(), True),
StructField("MOIETY_2", StringType(), True),
StructField("MOIETY_3", StringType(), True),
StructField("MOIETY_4", StringType(), True),
StructField("MOIETY_5", StringType(), True),
StructField("SVN", StringType(), True),
StructField("SVU", StringType(), True),
StructField("_id", StringType(), False),
StructField("DOSEFORM", StringType(), False),
])
schema_df_refdata_apid = StructType([
StructField("text_col", StringType(), True),
StructField("MOIETY", StringType(), True),
StructField("MOIETY_2", StringType(), True),
StructField("MOIETY_3", StringType(), True),
StructField("MOIETY_4", StringType(), True),
StructField("MOIETY_5", StringType(), True),
StructField("SVN", StringType(), True),
StructField("SVU", StringType(), True),
StructField("SVN2", StringType(), True),
StructField("SVU2", StringType(), True),
StructField("SVD", StringType(), True),
StructField("SDU", StringType(), True),
StructField("SVN_2", StringType(), True),
StructField("SVU_2", StringType(), True),
StructField("SVD_2", StringType(), True),
StructField("SDU_2", StringType(), True),
StructField("SVN_3", StringType(), True),
StructField("SVU_3", StringType(), True),
StructField("SITE", StringType(), True),
StructField("_id", StringType(), False),
StructField("DOSEFORM", StringType(), False),
])
df_refdata_vpid = spark.createDataFrame([
Row("Trelegy cream", "Trelegy", None, None, None, None, None, None, "7536", "cream"),
Row("Trelegy ellipta oil", "trelegy", "ellipta", None, None, None, "20", "ml", "120", "oil"),
Row("Trelegy ellipta balm", "trelegy", "ellipta", None, None, None, "92", "mcg", "66", "balm"),
], schema=schema_df_refdata_vpid)
df_refdata_apid = spark.createDataFrame([
Row("Vitamin C tablet", "Vitamin", None, None, None, None, "78", "mg", None, None, None, None, None, None, None, None, None, None, None, "8264", "tablet"),
Row("Vitamin D tablet", "vitamin", None, None, None, None, "78", "mg", None, None, None, None, None, None, None, None, None, None, None, "8348", "cream"),
Row("Paracetamol solution", "Paracetamol", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, "13412", "solution"),
Row("Trelegy/Paracetamol balm", "Trelegy", "Paracetamol", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, "9574", "balm"),
], schema=schema_df_refdata_apid)
schema_df_expected = StructType([
StructField("epma_id", StringType(), True),
StructField("original_epma_description", StringType(), True),
StructField("form_in_text", StringType(), True),
StructField("epma_description", StringType(), True),
StructField("match_level", StringType(), True),
StructField("match_id", StringType(), True),
StructField("match_term", StringType(), True),
])
df_expected = spark.createDataFrame([
Row('123', 'Trelegy Ellipta 92 mcg inhalation powder', ' ', 'Trelegy Ellipta 92 mcg inhalation powder', 'VPID', '120', "Trelegy ellipta oil"),
Row('456', 'Vitamin 78 mg oil', ' ', 'Vitamin 78 mg oil', 'APID', '8264', "Vitamin C tablet"),
Row('123', 'Trelegy Ellipta 92 mcg inhalation powder', ' ', 'Trelegy Ellipta 92 mcg inhalation powder', 'VPID', '66', "Trelegy ellipta balm"),
Row('456', 'Vitamin 78 mg oil', ' ', 'Vitamin 78 mg oil', 'APID', '8348', "Vitamin D tablet")
], schema=schema_df_expected)
df_expected_na = spark.createDataFrame([
Row('467', 'not matching', ' ', 'not matching', None, None, None),
Row('724', 'not matching again', ' ', 'not matching again', None, None, None)
], schema=schema_df_expected)
df_result, df_unmappable = partial_entity_match(df_input, df_refdata_apid, df_refdata_vpid, id_col='epma_id', original_text_col='original_epma_description',
form_in_text_col='form_in_text', text_col='epma_description', match_id_col='match_id', match_level_col='match_level',
match_term_col='match_term', ref_id_col='_id', ref_text_col='text_col')
assert compare_results(df_result.where(col('match_id').isNotNull()), df_expected, join_columns=['epma_description', 'match_id'])
assert compare_results(df_result.where(col('match_id').isNull()), df_expected_na, join_columns=['epma_description'])
# COMMAND ----------
@suite.add_test
def test_partial_entity_match_exclude_multiples():
schema_df_input = StructType([
StructField("epma_id", StringType(), False),
StructField("original_epma_description", StringType(), False),
StructField("form_in_text", StringType(), False),
StructField("epma_description", StringType(), False)
])
df_input = spark.createDataFrame([
Row('123', 'Paracetamol 500mg in Sodium Chloride', ' ', 'paracetamol 500mg in sodium chloride'),
Row('456', 'Sodium Chloride 500mg', ' ', 'sodium chloride 500mg'),
Row('467', 'Glucose 500mg', ' ', 'glucose 500mg'),
Row('724', 'Adcal and Calcium', ' ', 'adcal and calcium'),
Row('739', 'Adcal-D3 500mg', ' ', 'adcal-d3 500mg'),
Row('770', 'Adcal and Paracetamol', ' ', 'adcal and paracetamol'),
Row('885', 'Calcium Carbonate 500mg', ' ', 'calcium carbonate 500mg'),
Row('999', 'Adcal (Calcium) 500mg', ' ', 'adcal (calcium) 500mg')
], schema=schema_df_input)
schema_df_refdata_vpid = StructType([
StructField("text_col", StringType(), True),
StructField("MOIETY", StringType(), True),
StructField("MOIETY_2", StringType(), True),
StructField("MOIETY_3", StringType(), True),
StructField("MOIETY_4", StringType(), True),
StructField("MOIETY_5", StringType(), True),
StructField("SVN", StringType(), True),
StructField("SVU", StringType(), True),
StructField("_id", StringType(), False),
StructField("DOSEFORM", StringType(), False),
])
schema_df_refdata_apid = StructType([
StructField("text_col", StringType(), True),
StructField("MOIETY", StringType(), True),
StructField("MOIETY_2", StringType(), True),
StructField("MOIETY_3", StringType(), True),
StructField("MOIETY_4", StringType(), True),
StructField("MOIETY_5", StringType(), True),
StructField("SVN", StringType(), True),
StructField("SVU", StringType(), True),
StructField("SVN2", StringType(), True),
StructField("SVU2", StringType(), True),
StructField("SVD", StringType(), True),
StructField("SDU", StringType(), True),
StructField("SVN_2", StringType(), True),
StructField("SVU_2", StringType(), True),
StructField("SVD_2", StringType(), True),
StructField("SDU_2", StringType(), True),
StructField("SVN_3", StringType(), True),
StructField("SVU_3", StringType(), True),
StructField("SITE", StringType(), True),
StructField("_id", StringType(), False),
StructField("DOSEFORM", StringType(), False),
])
df_refdata_vpid = spark.createDataFrame([
Row("Adcal and Paracetamol", "Adcal", "Paracetamol", None, None, None, None, None, "1", "tablet"),
Row("Adcal", "Adcal", None, None, None, None, None, None, "2", "tablet"),
Row("Adcal-D3", "Adcal-D3", None, None, None, None, None, None, "3", "tablet"),
Row("Calcium Carbonate", "Calcium Carbonate", None, None, None, None, None, None, "4", "tablet"),
Row("Sodium Chloride", "Sodium Chloride", None, None, None, None, None, None, "6", "tablet"),
Row("Glucose", "Glucose", None, None, None, None, None, None, "7", "tablet"),
Row("Calcium", "Calcium", None, None, None, None, None, None, "8", "tablet"),
Row("Paracetamol", "Paracetamol", None, None, None, None, None, None, "9", "tablet"),
], schema=schema_df_refdata_vpid)
df_refdata_apid = spark.createDataFrame([
Row("Adcal and Paracetamol", "Adcal", "Paracetamol", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, "11", "tablet"),
Row("Adcal", "Adcal", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,"12", "tablet"),
Row("Adcal-D3", "Adcal-D3", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,"13", "tablet"),
Row("Calcium Carbonate", "Calcium Carbonate", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, "14", "tablet"),
Row("Sodium Chloride", "Sodium Chloride", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, "16", "tablet"),
Row("Glucose", "Glucose", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,"17", "tablet"),
Row("Calcium", "Calcium", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, "18", "tablet"),
Row("Paracetamol", "Paracetamol", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, "19", "tablet"),
], schema=schema_df_refdata_apid)
schema_df_expected = StructType([
StructField("epma_id", StringType(), True),
StructField("original_epma_description", StringType(), True),
StructField("form_in_text", StringType(), True),
StructField("epma_description", StringType(), True),
StructField("match_level", StringType(), True),
StructField("match_id", StringType(), True),
StructField("match_term", StringType(), True),
])
schema_df_expected_unmappable = StructType([
StructField("original_epma_description", StringType(), True),
StructField("form_in_text", StringType(), True)
])
df_expected = spark.createDataFrame([
Row('770', 'Adcal and Paracetamol', ' ', 'adcal and paracetamol', 'APID', '11', 'Adcal and Paracetamol'),
Row('467', 'Glucose 500mg', ' ', 'glucose 500mg', 'APID', '17', 'Glucose'),
Row('456', 'Sodium Chloride 500mg', ' ', 'sodium chloride 500mg', 'APID', '16', 'Sodium Chloride'),
Row('739', 'Adcal-D3 500mg', ' ', 'adcal-d3 500mg', 'APID', '13', 'Adcal-D3'),
Row('123', 'Paracetamol 500mg in Sodium Chloride', ' ', 'paracetamol 500mg in sodium chloride', 'APID', '19', 'Paracetamol'),
Row('885', 'Calcium Carbonate 500mg', ' ', 'calcium carbonate 500mg', 'APID', '14', 'Calcium Carbonate'),
Row('999', 'Adcal (Calcium) 500mg', ' ', 'adcal (calcium) 500mg', 'APID', '12', 'Adcal'),
Row('770', 'Adcal and Paracetamol', ' ', 'adcal and paracetamol', 'VPID', '1', 'Adcal and Paracetamol'),
Row('467', 'Glucose 500mg', ' ', 'glucose 500mg', 'VPID', '7', 'Glucose'),
Row('456', 'Sodium Chloride 500mg', ' ', 'sodium chloride 500mg', 'VPID', '6', 'Sodium Chloride'),
Row('739', 'Adcal-D3 500mg', ' ', 'adcal-d3 500mg', 'VPID', '3', 'Adcal-D3'),
Row('123', 'Paracetamol 500mg in Sodium Chloride', ' ', 'paracetamol 500mg in sodium chloride', 'VPID', '9', 'Paracetamol'),
Row('885', 'Calcium Carbonate 500mg', ' ', 'calcium carbonate 500mg', 'VPID', '4', 'Calcium Carbonate'),
Row('999', 'Adcal (Calcium) 500mg', ' ', 'adcal (calcium) 500mg', 'VPID', '2', 'Adcal')
], schema=schema_df_expected)
df_expected_unmappable = spark.createDataFrame([
Row('Adcal and Calcium', ' '),
], schema=schema_df_expected_unmappable)
df_result, df_unmappable = partial_entity_match(df_input, df_refdata_apid, df_refdata_vpid, id_col='epma_id', original_text_col='original_epma_description',
form_in_text_col='form_in_text', text_col='epma_description', match_id_col='match_id', match_level_col='match_level',
match_term_col='match_term', ref_id_col='_id', ref_text_col='text_col')
assert compare_results(df_result, df_expected, join_columns=['original_epma_description', 'match_id'])
assert compare_results(df_unmappable, df_expected_unmappable, join_columns=['original_epma_description'])
# COMMAND ----------
@suite.add_test
def test_entity_match():
schema_df_input = StructType([
StructField("epma_id", StringType(), False),
StructField("original_epma_description", StringType(), False),
StructField("form_in_text", StringType(), False),
StructField("epma_description", StringType(), False)
])
df_input = spark.createDataFrame([
Row('123', 'Trelegy Ellipta 92 mcg inhalation powder', ' ', 'Trelegy Ellipta 92 mcg inhalation powder'),
Row('456', 'Vitamin 78 mg oil', ' ', 'Vitamin 78 mg oil')
], schema=schema_df_input)
schema_df_refdata = StructType([
StructField("MOIETY", StringType(), True),
StructField("MOIETY_2", StringType(), True),
StructField("MOIETY_3", StringType(), True),
StructField("MOIETY_4", StringType(), True),
StructField("MOIETY_5", StringType(), True),
StructField("SITE", StringType(), True),
StructField("DOSEFORM", StringType(), True),
StructField("SVN", StringType(), True),
StructField("SVU", StringType(), True),
StructField("SVN2", StringType(), True),
StructField("SVU2", StringType(), True),
StructField("SVD", StringType(), True),
StructField("SDU", StringType(), True),
StructField("SVN_2", StringType(), True),
StructField("SVU_2", StringType(), True),
StructField("SVD_2", StringType(), True),
StructField("SDU_2", StringType(), True),
StructField("SVN_3", StringType(), True),
StructField("SVU_3", StringType(), True),
StructField("ref_id", StringType(), False),
])
df_refdata = spark.createDataFrame([
Row("Trelegy", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, "7536"),
Row("trelegy", "ellipta", None, None, None, None, None, "20", "ml", None, None, None, None, None, None, None, None, None, None, "120"),
Row("trelegy", "ellipta", None, None, None, None, None, "92", "mcg", None, None, None, None, None, None, None, None, None, None, "66"),
Row("Vitamin", None, None, None, None, None, None, "78", "mg", None, None, None, None, None, None, None, None, None, None, "8264"),
Row("vitamin", None, None, None, None, None, None, "78", "mg", None, None, None, None, None, None, None, None, None, None, "8348"),
Row("Paracetamol", None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, "13412"),
Row("Trelegy", "Paracetamol", None, None, None, None,None, None, None, None, None, None, None, None, None, None, None, None, None, "9574"),
], schema=schema_df_refdata)
schema_df_expected = StructType([
StructField("epma_id", StringType(), False),
StructField("original_epma_description", StringType(), False),
StructField("form_in_text", StringType(), False),
StructField("epma_description", StringType(), False),
StructField("match_id", StringType(), False),
StructField("id_level", StringType(), False),
StructField("match_level", StringType(), False),
])
df_expected = spark.createDataFrame([
('123', 'Trelegy Ellipta 92 mcg inhalation powder', ' ', 'Trelegy Ellipta 92 mcg inhalation powder', '66', 'VPID', "entity"),
], schema=schema_df_expected)
df_actual = entity_match(df_input, df_refdata=df_refdata, ref_id_level="VPID", dose_form_list_bc=dose_form_list_bc,
id_col='epma_id', original_text_col='original_epma_description', form_in_text_col='form_in_text', text_col='epma_description',
match_id_col='match_id', id_level_col='id_level', match_level_col='match_level', match_datetime_col='match_datetime', ref_id_col='ref_id') \
.drop('match_datetime')
assert compare_results(df_actual, df_expected, join_columns=['match_id'])
# COMMAND ----------
@suite.add_test
def test_remove_substrings():
# note: the prefix "_dif" is used here in the column names, but don't use "_diff" as this prefix is used in compare_results.
schema_df_input = StructType([StructField('id', StringType(), False),
StructField('_dif_moieties', ArrayType(StringType()), False)])
df_input = spark.createDataFrame([
('1', ['adcal', 'adcal-d3'],),
('2', ['deep heat', 'deep'],),
('3', ['adcal', 'adcal-d3', 'calcium'],)
], schema_df_input)
schema_df_expected = StructType([StructField('id', StringType(), False),
StructField('_dif_moieties', ArrayType(StringType()), False),
StructField('_dif_moieties_except_substrings', ArrayType(StringType()), False)])
df_expected = spark.createDataFrame([
('1', ['adcal', 'adcal-d3'], ['adcal-d3']),
('2', ['deep heat', 'deep'], ['deep heat']),
('3', ['adcal', 'adcal-d3', 'calcium'], ['adcal-d3', 'calcium'])
], schema=schema_df_expected)
df_output = df_input.withColumn('_dif_moieties_except_substrings', remove_substrings_udf(F.col('_dif_moieties')))
assert compare_results(df_output, df_expected, join_columns=['id'])
# COMMAND ----------
suite.run()
| 48.34275
| 174
| 0.666693
| 3,138
| 25,670
| 5.254302
| 0.083174
| 0.176613
| 0.230713
| 0.262009
| 0.848981
| 0.809437
| 0.759643
| 0.739993
| 0.700995
| 0.639738
| 0
| 0.033343
| 0.15411
| 25,670
| 531
| 175
| 48.34275
| 0.725983
| 0.030347
| 0
| 0.520384
| 0
| 0
| 0.263926
| 0.032217
| 0
| 0
| 0
| 0
| 0.074341
| 1
| 0.035971
| false
| 0
| 0.004796
| 0
| 0.040767
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4cfc8b0bb4505bf91eeaf5e842a734dc2a8ec641
| 7,393
|
py
|
Python
|
invoicer/migrations/0003_auto__add_field_lineitem_item.py
|
gabrielhurley/django-invoicer
|
11674908995104a029b3073ceda2787460ff2d8e
|
[
"BSD-3-Clause"
] | 3
|
2016-07-26T13:56:59.000Z
|
2019-09-26T06:56:51.000Z
|
invoicer/migrations/0003_auto__add_field_lineitem_item.py
|
gabrielhurley/django-invoicer
|
11674908995104a029b3073ceda2787460ff2d8e
|
[
"BSD-3-Clause"
] | null | null | null |
invoicer/migrations/0003_auto__add_field_lineitem_item.py
|
gabrielhurley/django-invoicer
|
11674908995104a029b3073ceda2787460ff2d8e
|
[
"BSD-3-Clause"
] | null | null | null |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'LineItem.item'
db.add_column('invoicer_lineitem', 'item', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['invoicer.Item'], null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'LineItem.item'
db.delete_column('invoicer_lineitem', 'item_id')
models = {
'invoicer.client': {
'Meta': {'object_name': 'Client'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'contact_person': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '80', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone_number': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'blank': 'True'}),
'project': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'state': ('django.contrib.localflavor.us.models.USStateField', [], {'max_length': '2'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'invoicer.company': {
'Meta': {'object_name': 'Company'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_email': ('django.db.models.fields.EmailField', [], {'max_length': '80', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'contact_person': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '80', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'numbering_prefix': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'phone_number': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'blank': 'True'}),
'state': ('django.contrib.localflavor.us.models.USStateField', [], {'max_length': '2'}),
'tax_rate': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '2'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '100', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'invoicer.invoice': {
'Meta': {'object_name': 'Invoice'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invoices'", 'to': "orm['invoicer.Client']"}),
'company': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invoices'", 'to': "orm['invoicer.Company']"}),
'due_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'invoice_number': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'status_notes': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'terms': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['invoicer.Terms']"})
},
'invoicer.item': {
'Meta': {'object_name': 'Item'},
'cost': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '7', 'decimal_places': '2', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}),
'taxable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'invoicer.lineitem': {
'Meta': {'object_name': 'LineItem'},
'cost': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '7', 'decimal_places': '2', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_items'", 'to': "orm['invoicer.Invoice']"}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['invoicer.Item']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}),
'quantity': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}),
'taxable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'invoicer.stylesheet': {
'Meta': {'object_name': 'Stylesheet'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stylesheets'", 'to': "orm['invoicer.Company']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'feedback_text': ('django.db.models.fields.TextField', [], {'max_length': '256', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'introduction_text': ('django.db.models.fields.TextField', [], {'max_length': '256', 'blank': 'True'}),
'misc_text': ('django.db.models.fields.TextField', [], {'max_length': '256', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'stylesheet': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'thank_you_text': ('django.db.models.fields.TextField', [], {'max_length': '256', 'blank': 'True'})
},
'invoicer.terms': {
'Meta': {'object_name': 'Terms'},
'description': ('django.db.models.fields.TextField', [], {'max_length': '256'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
}
complete_apps = ['invoicer']
| 70.409524
| 173
| 0.561748
| 762
| 7,393
| 5.329396
| 0.15748
| 0.112288
| 0.193056
| 0.275794
| 0.779365
| 0.766806
| 0.766806
| 0.747353
| 0.715587
| 0.651564
| 0
| 0.017788
| 0.193967
| 7,393
| 104
| 174
| 71.086538
| 0.663702
| 0.010145
| 0
| 0.369565
| 0
| 0
| 0.567268
| 0.30886
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021739
| false
| 0
| 0.043478
| 0
| 0.097826
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e27cc31b75505ef8b7d85957eb503bb0de8552fb
| 180
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/margay/calculators/calc_utilities.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 69
|
2021-12-16T01:34:09.000Z
|
2022-03-31T08:27:39.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/margay/calculators/calc_utilities.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/margay/calculators/calc_utilities.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 21
|
2021-12-20T09:05:45.000Z
|
2022-03-28T02:52:28.000Z
|
from pyradioconfig.parts.ocelot.calculators.calc_utilities import CALC_Utilities_Ocelot
class CALC_Utilities_Margay(CALC_Utilities_Ocelot):
#Inherit all from Ocelot
pass
| 25.714286
| 87
| 0.838889
| 23
| 180
| 6.26087
| 0.565217
| 0.361111
| 0.263889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 180
| 6
| 88
| 30
| 0.90566
| 0.127778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
2ca378addb464328f21686ccc172d00bdc8d49ac
| 97
|
py
|
Python
|
src/fortresstools/commander/__init__.py
|
gembcior/FortressTools
|
0230a9625034038ad6839c8dfab7f6b6a7f2108b
|
[
"MIT"
] | null | null | null |
src/fortresstools/commander/__init__.py
|
gembcior/FortressTools
|
0230a9625034038ad6839c8dfab7f6b6a7f2108b
|
[
"MIT"
] | null | null | null |
src/fortresstools/commander/__init__.py
|
gembcior/FortressTools
|
0230a9625034038ad6839c8dfab7f6b6a7f2108b
|
[
"MIT"
] | null | null | null |
from .base import Commander
from .base import NoScriptDefinition
from .base import WrongArgument
| 24.25
| 36
| 0.845361
| 12
| 97
| 6.833333
| 0.5
| 0.292683
| 0.512195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123711
| 97
| 3
| 37
| 32.333333
| 0.964706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e2e983db5d8798f1eb9a7365b82b8371b42a4ffb
| 2,122
|
py
|
Python
|
Topological.py
|
dokren/SSSP
|
68b777e59eb860a87baf72888d92f5cb0b90b47c
|
[
"MIT"
] | null | null | null |
Topological.py
|
dokren/SSSP
|
68b777e59eb860a87baf72888d92f5cb0b90b47c
|
[
"MIT"
] | null | null | null |
Topological.py
|
dokren/SSSP
|
68b777e59eb860a87baf72888d92f5cb0b90b47c
|
[
"MIT"
] | null | null | null |
import collections
GRAY, BLACK = 0, 1
# Topological algorithm
def topological1(graph, start):
order = collections.deque()
enter = set(graph)
state = {}
def dfs(root):
state[root] = GRAY
for k in graph[root]:
sk = state.get(k, None)
if sk == GRAY:
raise ValueError("cycle")
if sk == BLACK:
continue
enter.discard(k)
dfs(k)
order.appendleft(root)
state[root] = BLACK
while enter:
dfs(enter.pop())
top_order = list(order)
distance = {}
parent = {}
for node in graph:
distance[node] = 'inf'
parent[node] = 'nil'
distance[start] = 0
for vertex in top_order[top_order.index(start):]:
if distance[vertex] == 'inf':
continue
for neighbour in graph[vertex]:
dist = distance[vertex] + graph[vertex][neighbour]
if distance[neighbour] == 'inf' or dist < distance[neighbour]:
distance[neighbour] = dist
parent[neighbour] = vertex
return distance, parent
def topological2(graph, start):
order = collections.deque()
state = {}
def dfs(root):
state[root] = GRAY
for k in graph[root]:
sk = state.get(k, None)
if sk == GRAY:
raise ValueError("cycle")
if sk == BLACK:
continue
dfs(k)
order.appendleft(root)
state[root] = BLACK
dfs(start)
top_order = list(order)
distance = {}
parent = {}
for node in graph:
distance[node] = 'inf'
parent[node] = 'nil'
distance[start] = 0
for vertex in top_order[top_order.index(start):]:
if distance[vertex] == 'inf':
continue
for neighbour in graph[vertex]:
dist = distance[vertex] + graph[vertex][neighbour]
if distance[neighbour] == 'inf' or dist < distance[neighbour]:
distance[neighbour] = dist
parent[neighbour] = vertex
return distance, parent
| 23.577778
| 74
| 0.529218
| 227
| 2,122
| 4.920705
| 0.211454
| 0.037601
| 0.046553
| 0.046553
| 0.880931
| 0.825425
| 0.825425
| 0.825425
| 0.759176
| 0.759176
| 0
| 0.004422
| 0.360509
| 2,122
| 90
| 75
| 23.577778
| 0.818718
| 0.009896
| 0
| 0.865672
| 0
| 0
| 0.01619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059701
| false
| 0
| 0.014925
| 0
| 0.104478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
393938dda583b6717323ee645164007d12831045
| 114
|
py
|
Python
|
info/moduls/passport/__init__.py
|
qwer5785933/flask-web
|
5af25fa419ab12714d0421f285ef176c39fee544
|
[
"BSD-2-Clause"
] | 5
|
2019-06-09T03:56:14.000Z
|
2020-06-16T02:04:49.000Z
|
info/moduls/passport/__init__.py
|
qwer5785933/flask-web
|
5af25fa419ab12714d0421f285ef176c39fee544
|
[
"BSD-2-Clause"
] | 6
|
2019-06-11T02:47:13.000Z
|
2022-03-11T23:51:16.000Z
|
info/moduls/passport/__init__.py
|
qwer5785933/flask-web
|
5af25fa419ab12714d0421f285ef176c39fee544
|
[
"BSD-2-Clause"
] | 1
|
2019-06-11T02:23:17.000Z
|
2019-06-11T02:23:17.000Z
|
from flask import Blueprint
passport_=Blueprint('passport',__name__,url_prefix='/passport')
from .views import *
| 22.8
| 63
| 0.798246
| 14
| 114
| 6.071429
| 0.642857
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087719
| 114
| 5
| 64
| 22.8
| 0.817308
| 0
| 0
| 0
| 0
| 0
| 0.147826
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
|
0
| 7
|
393a74c82877d6c2fa3a9244cddbc79bb06a97bc
| 9,040
|
py
|
Python
|
pc/migrations/0001_initial.py
|
RainYang0925/testcase_web
|
6698190c426be56bfc54e92b6f99a3de335d5e82
|
[
"CC-BY-4.0"
] | 7
|
2017-08-03T08:02:11.000Z
|
2021-02-22T02:25:03.000Z
|
pc/migrations/0001_initial.py
|
kian11/testcase_web
|
6698190c426be56bfc54e92b6f99a3de335d5e82
|
[
"CC-BY-4.0"
] | null | null | null |
pc/migrations/0001_initial.py
|
kian11/testcase_web
|
6698190c426be56bfc54e92b6f99a3de335d5e82
|
[
"CC-BY-4.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-10-14 09:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='PCTestCase1',
fields=[
('id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('case_id', models.CharField(max_length=45)),
('case_name', models.CharField(max_length=45)),
('url', models.URLField(blank=True, null=True)),
('name', models.CharField(blank=True, max_length=45, null=True)),
('action', models.CharField(blank=True, max_length=45, null=True)),
('value', models.TextField(blank=True, null=True)),
('expected', models.TextField(blank=True, null=True)),
('actual', models.TextField(blank=True, editable=False, null=True)),
('result', models.CharField(blank=True, editable=False, max_length=5, null=True)),
('state', models.IntegerField(default='1')),
],
options={
'ordering': ['id'],
'db_table': 'p_illness_project_released',
'verbose_name': 'PC-\u5927\u75c5\u6551\u52a9\u9879\u76ee\u53d1\u5e03',
'verbose_name_plural': 'PC-\u5927\u75c5\u6551\u52a9\u9879\u76ee\u53d1\u5e03',
},
),
migrations.CreateModel(
name='PCTestCase2',
fields=[
('id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('case_id', models.CharField(max_length=45)),
('case_name', models.CharField(max_length=45)),
('url', models.URLField(blank=True, null=True)),
('name', models.CharField(blank=True, max_length=45, null=True)),
('action', models.CharField(blank=True, max_length=45, null=True)),
('value', models.TextField(blank=True, null=True)),
('expected', models.TextField(blank=True, null=True)),
('actual', models.TextField(blank=True, editable=False, null=True)),
('result', models.CharField(blank=True, editable=False, max_length=5, null=True)),
('state', models.IntegerField(default='1')),
],
options={
'ordering': ['id'],
'db_table': 'p_disaster_project_released',
'verbose_name': 'PC-\u707e\u96be\u6551\u52a9\u9879\u76ee\u53d1\u5e03',
'verbose_name_plural': 'PC-\u707e\u96be\u6551\u52a9\u9879\u76ee\u53d1\u5e03',
},
),
migrations.CreateModel(
name='PCTestCase3',
fields=[
('id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('case_id', models.CharField(max_length=45)),
('case_name', models.CharField(max_length=45)),
('url', models.URLField(blank=True, null=True)),
('name', models.CharField(blank=True, max_length=45, null=True)),
('action', models.CharField(blank=True, max_length=45, null=True)),
('value', models.TextField(blank=True, null=True)),
('expected', models.TextField(blank=True, null=True)),
('actual', models.TextField(blank=True, editable=False, null=True)),
('result', models.CharField(blank=True, editable=False, max_length=5, null=True)),
('state', models.IntegerField(default='1')),
],
options={
'ordering': ['id'],
'db_table': 'p_animal_project_released',
'verbose_name': 'PC-\u52a8\u7269\u4fdd\u62a4\u9879\u76ee\u53d1\u5e03',
'verbose_name_plural': 'PC-\u52a8\u7269\u4fdd\u62a4\u9879\u76ee\u53d1\u5e03',
},
),
migrations.CreateModel(
name='PCTestCase4',
fields=[
('id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('case_id', models.CharField(max_length=45)),
('case_name', models.CharField(max_length=45)),
('url', models.URLField(blank=True, null=True)),
('name', models.CharField(blank=True, max_length=45, null=True)),
('action', models.CharField(blank=True, max_length=45, null=True)),
('value', models.TextField(blank=True, null=True)),
('expected', models.TextField(blank=True, null=True)),
('actual', models.TextField(blank=True, editable=False, null=True)),
('result', models.CharField(blank=True, editable=False, max_length=5, null=True)),
('state', models.IntegerField(default='1')),
],
options={
'ordering': ['id'],
'db_table': 'p_poverty_project_released',
'verbose_name': 'PC-\u6276\u8d2b\u52a9\u5b66\u9879\u76ee\u53d1\u5e03',
'verbose_name_plural': 'PC-\u6276\u8d2b\u52a9\u5b66\u9879\u76ee\u53d1\u5e03',
},
),
migrations.CreateModel(
name='PCTestCase5',
fields=[
('id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('case_id', models.CharField(max_length=45)),
('case_name', models.CharField(max_length=45)),
('url', models.URLField(blank=True, null=True)),
('name', models.CharField(blank=True, max_length=45, null=True)),
('action', models.CharField(blank=True, max_length=45, null=True)),
('value', models.TextField(blank=True, null=True)),
('expected', models.TextField(blank=True, null=True)),
('actual', models.TextField(blank=True, editable=False, null=True)),
('result', models.CharField(blank=True, editable=False, max_length=5, null=True)),
('state', models.IntegerField(default='1')),
],
options={
'ordering': ['id'],
'db_table': 'p_other_project_released',
'verbose_name': 'PC-\u5176\u4ed6\u9879\u76ee\u53d1\u5e03',
'verbose_name_plural': 'PC-\u5176\u5b83\u9879\u76ee\u53d1\u5e03',
},
),
migrations.CreateModel(
name='PCTestCase6',
fields=[
('id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('case_id', models.CharField(max_length=45)),
('case_name', models.CharField(max_length=45)),
('url', models.URLField(blank=True, null=True)),
('name', models.CharField(blank=True, max_length=45, null=True)),
('action', models.CharField(blank=True, max_length=45, null=True)),
('value', models.TextField(blank=True, null=True)),
('expected', models.TextField(blank=True, null=True)),
('actual', models.TextField(blank=True, editable=False, null=True)),
('result', models.CharField(blank=True, editable=False, max_length=5, null=True)),
('state', models.IntegerField(default='1')),
],
options={
'ordering': ['id'],
'db_table': 'p_presale_project_released',
'verbose_name': 'PC-\u5c1d\u9c9c\u9884\u552e\u9879\u76ee\u53d1\u5e03',
'verbose_name_plural': 'PC-\u5c1d\u9c9c\u9884\u552e\u9879\u76ee\u53d1\u5e03',
},
),
migrations.CreateModel(
name='PCTestCase7',
fields=[
('id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('case_id', models.CharField(max_length=45)),
('case_name', models.CharField(max_length=45)),
('url', models.URLField(blank=True, null=True)),
('name', models.CharField(blank=True, max_length=45, null=True)),
('action', models.CharField(blank=True, max_length=45, null=True)),
('value', models.TextField(blank=True, null=True)),
('expected', models.TextField(blank=True, null=True)),
('actual', models.TextField(blank=True, editable=False, null=True)),
('result', models.CharField(blank=True, editable=False, max_length=5, null=True)),
('state', models.IntegerField(default='1')),
],
options={
'ordering': ['id'],
'db_table': 'p_dream_project_released',
'verbose_name': 'PC-\u68a6\u60f3\u6e05\u5355\u9879\u76ee\u53d1\u5e03',
'verbose_name_plural': 'PC-\u68a6\u60f3\u6e05\u5355\u9879\u76ee\u53d1\u5e03',
},
),
]
| 52.865497
| 98
| 0.551549
| 926
| 9,040
| 5.265659
| 0.117711
| 0.090443
| 0.063167
| 0.073216
| 0.929861
| 0.892125
| 0.892125
| 0.867514
| 0.84557
| 0.745078
| 0
| 0.065523
| 0.290929
| 9,040
| 170
| 99
| 53.176471
| 0.695164
| 0.007412
| 0
| 0.734568
| 1
| 0
| 0.191973
| 0.096767
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012346
| 0
| 0.037037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
39472fa7c0b581030c7eadbedb6c098dd60a4f4a
| 1,555
|
py
|
Python
|
tools/fileinfo/bugs/dot-net-detection-1/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 8
|
2017-12-14T14:25:17.000Z
|
2019-03-09T03:29:12.000Z
|
tools/fileinfo/bugs/dot-net-detection-1/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 10
|
2019-06-14T09:12:55.000Z
|
2021-10-01T12:15:43.000Z
|
tools/fileinfo/bugs/dot-net-detection-1/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 8
|
2019-05-10T14:59:48.000Z
|
2022-03-07T16:34:23.000Z
|
from regression_tests import *
class DotNetTest(Test):
settings = TestSettings(
tool='fileinfo',
args='--json --verbose',
input=files_in_dir('inputs')
)
def test_fileinfo_json_output_is_correctly_parsed(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['fileFormat'], 'PE')
self.assertEqual(self.fileinfo.output['dataDirectories']['numberOfDataDirectories'], '16')
self.assertEqual(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['index'], '14')
self.assertEqual(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['type'], 'CLR runtime header')
self.assertTrue(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['address'] != '0')
self.assertTrue(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['size'] != '0')
self.assertEqual(self.fileinfo.output['languages'][0]['name'], 'CIL/.NET')
self.assertTrue(self.fileinfo.output['languages'][0]['bytecode'])
class PackedDotNetTest(Test):
settings = TestSettings(
tool='fileinfo',
args='--json --verbose',
input=files_in_dir('packed_inputs')
)
def test_fileinfo_json_output_is_correctly_parsed(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['fileFormat'], 'PE')
self.assertEqual(self.fileinfo.output['languages'][0]['name'], 'CIL/.NET')
self.assertTrue(self.fileinfo.output['languages'][0]['bytecode'])
| 47.121212
| 123
| 0.682958
| 161
| 1,555
| 6.484472
| 0.31677
| 0.149425
| 0.189655
| 0.181034
| 0.873563
| 0.873563
| 0.859195
| 0.859195
| 0.592912
| 0.592912
| 0
| 0.013647
| 0.151768
| 1,555
| 32
| 124
| 48.59375
| 0.777862
| 0
| 0
| 0.571429
| 0
| 0
| 0.250161
| 0.014791
| 0
| 0
| 0
| 0
| 0.464286
| 1
| 0.071429
| false
| 0
| 0.035714
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a37182b0f64521ae63fd19ea4e658caaeb4cb91
| 158
|
py
|
Python
|
cw02/zad9.py
|
BartoszHolubowicz/projekt-psi
|
e1d753e543ed2676a21ba1d99191e36dbe484ae5
|
[
"bzip2-1.0.6"
] | null | null | null |
cw02/zad9.py
|
BartoszHolubowicz/projekt-psi
|
e1d753e543ed2676a21ba1d99191e36dbe484ae5
|
[
"bzip2-1.0.6"
] | null | null | null |
cw02/zad9.py
|
BartoszHolubowicz/projekt-psi
|
e1d753e543ed2676a21ba1d99191e36dbe484ae5
|
[
"bzip2-1.0.6"
] | null | null | null |
from cw02.file_manager import FileManager
print(FileManager.read_file("file_manager_test.txt"))
FileManager.update_file("file_manager_test.txt", "\nBardzo")
| 31.6
| 60
| 0.829114
| 22
| 158
| 5.636364
| 0.545455
| 0.266129
| 0.241935
| 0.306452
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013333
| 0.050633
| 158
| 4
| 61
| 39.5
| 0.813333
| 0
| 0
| 0
| 0
| 0
| 0.316456
| 0.265823
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1a641dd3892c0f2dc93cdf2746ca0ae70d28d896
| 17,611
|
py
|
Python
|
zcrmsdk/src/com/zoho/crm/api/tags/tags_operations.py
|
zoho/zohocrm-python-sdk-2.0
|
3a93eb3b57fed4e08f26bd5b311e101cb2995411
|
[
"Apache-2.0"
] | null | null | null |
zcrmsdk/src/com/zoho/crm/api/tags/tags_operations.py
|
zoho/zohocrm-python-sdk-2.0
|
3a93eb3b57fed4e08f26bd5b311e101cb2995411
|
[
"Apache-2.0"
] | null | null | null |
zcrmsdk/src/com/zoho/crm/api/tags/tags_operations.py
|
zoho/zohocrm-python-sdk-2.0
|
3a93eb3b57fed4e08f26bd5b311e101cb2995411
|
[
"Apache-2.0"
] | null | null | null |
try:
from zcrmsdk.src.com.zoho.crm.api.exception import SDKException
from zcrmsdk.src.com.zoho.crm.api.parameter_map import ParameterMap
from zcrmsdk.src.com.zoho.crm.api.util import APIResponse, CommonAPIHandler, Constants
from zcrmsdk.src.com.zoho.crm.api.param import Param
except Exception:
from ..exception import SDKException
from ..parameter_map import ParameterMap
from ..util import APIResponse, CommonAPIHandler, Constants
from ..param import Param
class TagsOperations(object):
def __init__(self):
"""Creates an instance of TagsOperations"""
pass
def get_tags(self, param_instance=None):
"""
The method to get tags
Parameters:
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/settings/tags'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_GET)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)
handler_instance.set_param(param_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.response_handler import ResponseHandler
except Exception:
from .response_handler import ResponseHandler
return handler_instance.api_call(ResponseHandler.__module__, 'application/json')
def create_tags(self, request, param_instance=None):
"""
The method to create tags
Parameters:
request (BodyWrapper) : An instance of BodyWrapper
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.tags.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/settings/tags'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_mandatory_checker(True)
handler_instance.set_param(param_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def update_tags(self, request, param_instance=None):
"""
The method to update tags
Parameters:
request (BodyWrapper) : An instance of BodyWrapper
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.tags.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/settings/tags'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_PUT)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_mandatory_checker(True)
handler_instance.set_param(param_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def update_tag(self, id, request, param_instance=None):
"""
The method to update tag
Parameters:
id (int) : An int representing the id
request (BodyWrapper) : An instance of BodyWrapper
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.tags.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/settings/tags/'
api_path = api_path + str(id)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_PUT)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_param(param_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def delete_tag(self, id):
"""
The method to delete tag
Parameters:
id (int) : An int representing the id
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/settings/tags/'
api_path = api_path + str(id)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_DELETE)
handler_instance.set_category_method(Constants.REQUEST_METHOD_DELETE)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def merge_tags(self, id, request):
"""
The method to merge tags
Parameters:
id (int) : An int representing the id
request (MergeWrapper) : An instance of MergeWrapper
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.tags.merge_wrapper import MergeWrapper
except Exception:
from .merge_wrapper import MergeWrapper
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if request is not None and not isinstance(request, MergeWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: MergeWrapper', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/settings/tags/'
api_path = api_path + str(id)
api_path = api_path + '/actions/merge'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_mandatory_checker(True)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def add_tags_to_record(self, record_id, module_api_name, param_instance=None):
"""
The method to add tags to record
Parameters:
record_id (int) : An int representing the record_id
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(record_id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: record_id EXPECTED TYPE: int', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(record_id)
api_path = api_path + '/actions/add_tags'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)
handler_instance.set_mandatory_checker(True)
handler_instance.set_param(param_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.record_action_handler import RecordActionHandler
except Exception:
from .record_action_handler import RecordActionHandler
return handler_instance.api_call(RecordActionHandler.__module__, 'application/json')
def remove_tags_from_record(self, record_id, module_api_name, param_instance=None):
"""
The method to remove tags from record
Parameters:
record_id (int) : An int representing the record_id
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(record_id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: record_id EXPECTED TYPE: int', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(record_id)
api_path = api_path + '/actions/remove_tags'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)
handler_instance.set_mandatory_checker(True)
handler_instance.set_param(param_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.record_action_handler import RecordActionHandler
except Exception:
from .record_action_handler import RecordActionHandler
return handler_instance.api_call(RecordActionHandler.__module__, 'application/json')
def add_tags_to_multiple_records(self, module_api_name, param_instance=None):
"""
The method to add tags to multiple records
Parameters:
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/actions/add_tags'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)
handler_instance.set_mandatory_checker(True)
handler_instance.set_param(param_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.record_action_handler import RecordActionHandler
except Exception:
from .record_action_handler import RecordActionHandler
return handler_instance.api_call(RecordActionHandler.__module__, 'application/json')
def remove_tags_from_multiple_records(self, module_api_name, param_instance=None):
"""
The method to remove tags from multiple records
Parameters:
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/actions/remove_tags'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)
handler_instance.set_mandatory_checker(True)
handler_instance.set_param(param_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.record_action_handler import RecordActionHandler
except Exception:
from .record_action_handler import RecordActionHandler
return handler_instance.api_call(RecordActionHandler.__module__, 'application/json')
def get_record_count_for_tag(self, id, param_instance=None):
"""
The method to get record count for tag
Parameters:
id (int) : An int representing the id
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2/settings/tags/'
api_path = api_path + str(id)
api_path = api_path + '/actions/records_count'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_GET)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)
handler_instance.set_param(param_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.tags.count_handler import CountHandler
except Exception:
from .count_handler import CountHandler
return handler_instance.api_call(CountHandler.__module__, 'application/json')
class GetTagsParam(object):
module = Param('module', 'com.zoho.crm.api.Tags.GetTagsParam')
my_tags = Param('my_tags', 'com.zoho.crm.api.Tags.GetTagsParam')
class CreateTagsParam(object):
module = Param('module', 'com.zoho.crm.api.Tags.CreateTagsParam')
class UpdateTagsParam(object):
module = Param('module', 'com.zoho.crm.api.Tags.UpdateTagsParam')
class UpdateTagParam(object):
module = Param('module', 'com.zoho.crm.api.Tags.UpdateTagParam')
class AddTagsToRecordParam(object):
tag_names = Param('tag_names', 'com.zoho.crm.api.Tags.AddTagsToRecordParam')
over_write = Param('over_write', 'com.zoho.crm.api.Tags.AddTagsToRecordParam')
class RemoveTagsFromRecordParam(object):
tag_names = Param('tag_names', 'com.zoho.crm.api.Tags.RemoveTagsFromRecordParam')
class AddTagsToMultipleRecordsParam(object):
tag_names = Param('tag_names', 'com.zoho.crm.api.Tags.AddTagsToMultipleRecordsParam')
ids = Param('ids', 'com.zoho.crm.api.Tags.AddTagsToMultipleRecordsParam')
over_write = Param('over_write', 'com.zoho.crm.api.Tags.AddTagsToMultipleRecordsParam')
class RemoveTagsFromMultipleRecordsParam(object):
tag_names = Param('tag_names', 'com.zoho.crm.api.Tags.RemoveTagsFromMultipleRecordsParam')
ids = Param('ids', 'com.zoho.crm.api.Tags.RemoveTagsFromMultipleRecordsParam')
class GetRecordCountForTagParam(object):
module = Param('module', 'com.zoho.crm.api.Tags.GetRecordCountForTagParam')
| 36.162218
| 111
| 0.782977
| 2,302
| 17,611
| 5.726759
| 0.049957
| 0.04832
| 0.077828
| 0.056285
| 0.920049
| 0.907001
| 0.881817
| 0.865964
| 0.855875
| 0.82174
| 0
| 0.000719
| 0.131395
| 17,611
| 486
| 112
| 36.236626
| 0.861084
| 0.168134
| 0
| 0.783088
| 0
| 0
| 0.147187
| 0.05408
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044118
| false
| 0.003676
| 0.139706
| 0
| 0.3125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ab9a6fc5bfbb90c4ab603ce0a273484a3d4b52b
| 22,913
|
py
|
Python
|
pysnmp/BLADESPPALT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/BLADESPPALT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/BLADESPPALT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module BLADESPPALT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BLADESPPALT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:22:04 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
enterprises, iso, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, NotificationType, TimeTicks, Bits, Counter32, Integer32, Gauge32, NotificationType, ObjectIdentity, MibIdentifier, Unsigned32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "enterprises", "iso", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "NotificationType", "TimeTicks", "Bits", "Counter32", "Integer32", "Gauge32", "NotificationType", "ObjectIdentity", "MibIdentifier", "Unsigned32", "IpAddress")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
ibm = MibIdentifier((1, 3, 6, 1, 4, 1, 2))
ibmProd = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6))
supportProcessor = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 158))
mmRemoteSupTrapMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 158, 3))
remoteSupTrapMibObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1))
spTrapInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1))
spTrapDateTime = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapDateTime.setStatus('mandatory')
spTrapAppId = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapAppId.setStatus('mandatory')
spTrapSpTxtId = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapSpTxtId.setStatus('mandatory')
spTrapSysUuid = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapSysUuid.setStatus('mandatory')
spTrapSysSern = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapSysSern.setStatus('mandatory')
spTrapAppType = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapAppType.setStatus('mandatory')
spTrapPriority = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapPriority.setStatus('mandatory')
spTrapMsgText = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapMsgText.setStatus('mandatory')
spTrapHostContact = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapHostContact.setStatus('mandatory')
spTrapHostLocation = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapHostLocation.setStatus('mandatory')
spTrapBladeName = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapBladeName.setStatus('mandatory')
spTrapBladeSern = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapBladeSern.setStatus('mandatory')
spTrapBladeUuid = MibScalar((1, 3, 6, 1, 4, 1, 2, 6, 158, 3, 1, 1, 13), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spTrapBladeUuid.setStatus('mandatory')
mmTrapTempC = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,0)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapVoltC = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,1)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapTampC = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,2)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapMffC = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,3)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapPsC = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,4)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mTrapHdC = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,5)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapVrmC = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,6)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapSffC = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,11)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapMsC = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,31)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapIhcC = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,36)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapRdpsN = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,10)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapTempN = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,12)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapVoltN = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,13)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapRmN = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,32)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapSecDvS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,15)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapPostToS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,20)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapOsToS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,21)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapAppS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,22)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapPoffS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,23)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapPonS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,24)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapBootS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,25)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapLdrToS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,26)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapPFAS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,27)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapKVMSwitchS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,33)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapSysInvS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,34)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapSysLogS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,35)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapNwChangeS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,37)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapBlThrS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,39)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mmTrapPwrMgntS = NotificationType((1, 3, 6, 1, 4, 1, 2, 6, 158, 3) + (0,40)).setObjects(("BLADESPPALT-MIB", "spTrapDateTime"), ("BLADESPPALT-MIB", "spTrapAppId"), ("BLADESPPALT-MIB", "spTrapSpTxtId"), ("BLADESPPALT-MIB", "spTrapSysUuid"), ("BLADESPPALT-MIB", "spTrapSysSern"), ("BLADESPPALT-MIB", "spTrapAppType"), ("BLADESPPALT-MIB", "spTrapPriority"), ("BLADESPPALT-MIB", "spTrapMsgText"), ("BLADESPPALT-MIB", "spTrapHostContact"), ("BLADESPPALT-MIB", "spTrapHostLocation"), ("BLADESPPALT-MIB", "spTrapBladeName"), ("BLADESPPALT-MIB", "spTrapBladeSern"), ("BLADESPPALT-MIB", "spTrapBladeUuid"))
mibBuilder.exportSymbols("BLADESPPALT-MIB", mmTrapSysInvS=mmTrapSysInvS, mmTrapKVMSwitchS=mmTrapKVMSwitchS, spTrapBladeSern=spTrapBladeSern, ibm=ibm, spTrapSysSern=spTrapSysSern, mmTrapOsToS=mmTrapOsToS, spTrapAppId=spTrapAppId, mmTrapTampC=mmTrapTampC, mmTrapNwChangeS=mmTrapNwChangeS, mmTrapTempN=mmTrapTempN, spTrapAppType=spTrapAppType, spTrapSysUuid=spTrapSysUuid, spTrapBladeName=spTrapBladeName, mmTrapVoltC=mmTrapVoltC, mmTrapPsC=mmTrapPsC, mmTrapPonS=mmTrapPonS, mmTrapLdrToS=mmTrapLdrToS, mmTrapPwrMgntS=mmTrapPwrMgntS, spTrapHostLocation=spTrapHostLocation, mmTrapVoltN=mmTrapVoltN, remoteSupTrapMibObjects=remoteSupTrapMibObjects, mmTrapBlThrS=mmTrapBlThrS, mmTrapPostToS=mmTrapPostToS, mmTrapVrmC=mmTrapVrmC, mmTrapTempC=mmTrapTempC, mmTrapSffC=mmTrapSffC, mmRemoteSupTrapMIB=mmRemoteSupTrapMIB, spTrapInfo=spTrapInfo, mmTrapMffC=mmTrapMffC, mmTrapAppS=mmTrapAppS, mmTrapSysLogS=mmTrapSysLogS, spTrapPriority=spTrapPriority, spTrapMsgText=spTrapMsgText, mTrapHdC=mTrapHdC, spTrapSpTxtId=spTrapSpTxtId, ibmProd=ibmProd, spTrapHostContact=spTrapHostContact, mmTrapSecDvS=mmTrapSecDvS, mmTrapMsC=mmTrapMsC, mmTrapPoffS=mmTrapPoffS, mmTrapIhcC=mmTrapIhcC, mmTrapPFAS=mmTrapPFAS, spTrapBladeUuid=spTrapBladeUuid, mmTrapRmN=mmTrapRmN, mmTrapRdpsN=mmTrapRdpsN, mmTrapBootS=mmTrapBootS, spTrapDateTime=spTrapDateTime, supportProcessor=supportProcessor)
| 301.486842
| 1,357
| 0.720115
| 2,207
| 22,913
| 7.476212
| 0.077028
| 0.322424
| 0.008727
| 0.011636
| 0.821636
| 0.809636
| 0.773636
| 0.773636
| 0.772424
| 0.771152
| 0
| 0.036487
| 0.070615
| 22,913
| 75
| 1,358
| 305.506667
| 0.738342
| 0.01414
| 0
| 0
| 0
| 0
| 0.517892
| 0.001949
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.088235
| 0
| 0.088235
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2011317c2d42a405fd95c8c1f635618c881d50d1
| 239,385
|
py
|
Python
|
tests/adapters/switches/juniper_test.py
|
idjaw/netman
|
58ba898de6e450a24b4f1721ce274ad3e12f9d33
|
[
"Apache-2.0"
] | 1
|
2016-01-28T17:56:51.000Z
|
2016-01-28T17:56:51.000Z
|
tests/adapters/switches/juniper_test.py
|
idjaw/netman
|
58ba898de6e450a24b4f1721ce274ad3e12f9d33
|
[
"Apache-2.0"
] | 2
|
2021-12-13T20:55:50.000Z
|
2022-03-29T22:07:13.000Z
|
tests/adapters/switches/juniper_test.py
|
idjaw/netman
|
58ba898de6e450a24b4f1721ce274ad3e12f9d33
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
import textwrap
import unittest
import mock
from flexmock import flexmock, flexmock_teardown
from hamcrest import assert_that, has_length, equal_to, contains_string, has_key, \
is_, instance_of
from ncclient.devices.junos import JunosDeviceHandler
from ncclient.operations import RPCError, TimeoutExpiredError
from ncclient.xml_ import NCElement, to_ele, to_xml
from netman.adapters.switches import juniper
from netman.adapters.switches.juniper import Juniper
from netman.adapters.switches.juniper.standard import JuniperCustomStrategies
from netman.core.objects.access_groups import OUT, IN
from netman.core.objects.exceptions import LockedSwitch, VlanAlreadyExist, BadVlanNumber, BadVlanName, UnknownVlan, \
InterfaceInWrongPortMode, UnknownInterface, AccessVlanNotSet, NativeVlanNotSet, TrunkVlanNotSet, VlanAlreadyInTrunk, \
BadBondNumber, UnknownBond, InterfaceNotInBond, BondAlreadyExist, OperationNotCompleted, InvalidMtuSize
from netman.core.objects.interface_states import OFF, ON
from netman.core.objects.port_modes import ACCESS, TRUNK, BOND_MEMBER
from netman.core.objects.switch_descriptor import SwitchDescriptor
from netman.core.objects.switch_transactional import FlowControlSwitch
from tests import ignore_deprecation_warnings
@ignore_deprecation_warnings
def test_factory():
lock = mock.Mock()
switch = juniper.standard_factory(SwitchDescriptor(hostname='hostname', model='juniper', username='username', password='password', port=22), lock)
assert_that(switch, instance_of(FlowControlSwitch))
assert_that(switch.wrapped_switch, instance_of(Juniper))
assert_that(switch.lock, is_(lock))
assert_that(switch.switch_descriptor.hostname, equal_to("hostname"))
assert_that(switch.switch_descriptor.model, equal_to("juniper"))
assert_that(switch.switch_descriptor.username, equal_to("username"))
assert_that(switch.switch_descriptor.password, equal_to("password"))
assert_that(switch.switch_descriptor.port, equal_to(22))
class JuniperTest(unittest.TestCase):
def setUp(self):
self.switch = juniper.standard.netconf(SwitchDescriptor(model='juniper', hostname="toto"))
self.netconf_mock = flexmock()
self.switch.netconf = self.netconf_mock
self.switch.in_transaction = True
def tearDown(self):
flexmock_teardown()
def test_switch_has_a_logger_configured_with_the_switch_name(self):
assert_that(self.switch.logger.name, is_(Juniper.__module__ + ".toto"))
def test_get_vlans(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<vlans />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
<description>my-description</description>
</vlan>
<vlan>
<name>NO-VLAN-ID</name>
<description>shizzle</description>
</vlan>
<vlan>
<name>WITH-IF</name>
<vlan-id>20</vlan-id>
<l3-interface>vlan.20</l3-interface>
</vlan>
<vlan>
<name>WITH-IF-MULTI-IP</name>
<vlan-id>40</vlan-id>
<l3-interface>vlan.70</l3-interface>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>vlan</name>
<unit>
<name>20</name>
<family>
<inet>
<address>
<name>1.1.1.1/24</name>
</address>
<filter>
<input>
<filter-name>AC-IN</filter-name>
</input>
<output>
<filter-name>AC-OUT</filter-name>
</output>
</filter>
</inet>
</family>
</unit>
<unit>
<name>40</name>
</unit>
<unit>
<name>70</name>
<family>
<inet>
<address>
<name>2.1.1.1/24</name>
</address>
<address>
<name>4.1.1.1/24</name>
</address>
<address>
<name>3.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan10, vlan20, vlan40 = self.switch.get_vlans()
assert_that(vlan10.number, equal_to(10))
assert_that(vlan10.name, equal_to("my-description"))
assert_that(vlan10.access_groups[IN], equal_to(None))
assert_that(vlan10.access_groups[OUT], equal_to(None))
assert_that(vlan10.ips, has_length(0))
assert_that(vlan20.number, equal_to(20))
assert_that(vlan20.name, equal_to(None))
assert_that(vlan20.access_groups[IN], equal_to("AC-IN"))
assert_that(vlan20.access_groups[OUT], equal_to("AC-OUT"))
assert_that(vlan20.ips, has_length(1))
vlan20ip1 = vlan20.ips[0]
assert_that(str(vlan20ip1.ip), equal_to("1.1.1.1"))
assert_that(vlan20ip1.prefixlen, equal_to(24))
assert_that(vlan40.number, equal_to(40))
assert_that(vlan40.name, equal_to(None))
assert_that(vlan40.access_groups[IN], equal_to(None))
assert_that(vlan40.access_groups[OUT], equal_to(None))
vlan40ip1, vlan40ip2, vlan40ip3 = vlan40.ips
assert_that(str(vlan40ip1.ip), equal_to("2.1.1.1"))
assert_that(vlan40ip1.prefixlen, equal_to(24))
assert_that(str(vlan40ip2.ip), equal_to("3.1.1.1"))
assert_that(vlan40ip2.prefixlen, equal_to(24))
assert_that(str(vlan40ip3.ip), equal_to("4.1.1.1"))
assert_that(vlan40ip3.prefixlen, equal_to(24))
def test_get_vlans_where_vlan_interfaces_can_also_be_called_irb(self):
self.switch.in_transaction = True
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>ON_VLAN</name>
<vlan-id>10</vlan-id>
<l3-interface>vlan.10</l3-interface>
</vlan>
<vlan>
<name>ON_IRB</name>
<vlan-id>20</vlan-id>
<l3-interface>irb.20</l3-interface>
</vlan>
<vlan>
<name>ON_WHATEVER</name>
<vlan-id>30</vlan-id>
<l3-interface>whatever.30</l3-interface>
</vlan>
<vlan>
<name>ON_NOTFOUND</name>
<vlan-id>40</vlan-id>
<l3-interface>notfound.20</l3-interface>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/1</name>
</interface>
<interface>
<name>vlan</name>
<unit>
<name>10</name>
<family>
<inet>
<address>
<name>1.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
<interface>
<name>irb</name>
<unit>
<name>20</name>
<family>
<inet>
<address>
<name>2.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
<interface>
<name>whatever</name>
<unit>
<name>30</name>
<family>
<inet>
<address>
<name>3.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan10, vlan20, vlan30, vlan40 = self.switch.get_vlans()
assert_that(str(vlan10.ips[0].ip), equal_to("1.1.1.1"))
assert_that(str(vlan20.ips[0].ip), equal_to("2.1.1.1"))
assert_that(str(vlan30.ips[0].ip), equal_to("3.1.1.1"))
assert_that(vlan40.ips, has_length(0))
def test_get_vlan_interfaces(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<vlans>
<vlan>
<vlan-id>705</vlan-id>
</vlan>
</vlans>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>VLAN705</name>
<vlan-id>705</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>xe-0/0/6</name>
<unit>
<family>
<ethernet-switching>
<vlan>
<members>687</members>
<members>705</members>
<members>708</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/7</name>
<unit>
<family>
<ethernet-switching>
<vlan>
<members>705</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/8</name>
<unit>
<family>
<ethernet-switching>
<vlan>
<members>456</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>xe-0/0/9</name>
<unit>
<family>
<ethernet-switching>
<vlan>
<members>700-800</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan_interfaces = self.switch.get_vlan_interfaces(705)
assert_that(vlan_interfaces, equal_to(["xe-0/0/6", "xe-0/0/7", "xe-0/0/9"]))
def test_get_vlan_interfaces_with_name_as_member(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<vlans>
<vlan>
<vlan-id>705</vlan-id>
</vlan>
</vlans>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>bleu</name>
<vlan-id>705</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>xe-0/0/9</name>
<unit>
<family>
<ethernet-switching>
<vlan>
<members>bleu</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan_interfaces = self.switch.get_vlan_interfaces(705)
assert_that(vlan_interfaces, equal_to(["xe-0/0/9"]))
def test_get_vlan_interfaces_nonexisting_vlan(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<vlans>
<vlan>
<vlan-id>9999999</vlan-id>
</vlan>
</vlans>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans />
<interfaces>
<interface>
<name>xe-0/0/9</name>
<unit>
<family>
<ethernet-switching>
<vlan>
<members>705</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
with self.assertRaises(UnknownVlan):
self.switch.get_vlan_interfaces("9999999")
def test_get_vlan_with_no_interface(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<vlans>
<vlan>
<vlan-id>10</vlan-id>
</vlan>
</vlans>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
<description>my-description</description>
</vlan>
</vlans>
"""))
vlan = self.switch.get_vlan(10)
assert_that(vlan.number, equal_to(10))
assert_that(vlan.name, equal_to("my-description"))
assert_that(vlan.access_groups[IN], equal_to(None))
assert_that(vlan.access_groups[OUT], equal_to(None))
assert_that(vlan.ips, has_length(0))
def test_get_vlan_with_unknown_vlan(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<vlans>
<vlan>
<vlan-id>10</vlan-id>
</vlan>
</vlans>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
"""))
with self.assertRaises(UnknownVlan) as expect:
self.switch.get_vlan(10)
assert_that(str(expect.exception), equal_to("Vlan 10 not found"))
def test_get_vlan_with_interface(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<vlans>
<vlan>
<vlan-id>20</vlan-id>
</vlan>
</vlans>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>WITH-IF</name>
<vlan-id>20</vlan-id>
<l3-interface>vlan.20</l3-interface>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/1</name>
</interface>
<interface>
<name>vlan</name>
<unit>
<name>20</name>
<family>
<inet>
<address>
<name>1.1.1.1/24</name>
</address>
<filter>
<input>
<filter-name>AC-IN</filter-name>
</input>
<output>
<filter-name>AC-OUT</filter-name>
</output>
</filter>
</inet>
</family>
</unit>
<unit>
<name>40</name>
</unit>
<unit>
<name>70</name>
<family>
<inet>
<address>
<name>2.1.1.1/24</name>
</address>
<address>
<name>4.1.1.1/24</name>
</address>
<address>
<name>3.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan = self.switch.get_vlan(20)
assert_that(vlan.number, equal_to(20))
assert_that(vlan.name, equal_to(None))
assert_that(vlan.access_groups[IN], equal_to("AC-IN"))
assert_that(vlan.access_groups[OUT], equal_to("AC-OUT"))
assert_that(vlan.ips, has_length(1))
vlan20ip1 = vlan.ips[0]
assert_that(str(vlan20ip1.ip), equal_to("1.1.1.1"))
assert_that(vlan20ip1.prefixlen, equal_to(24))
def test_get_vlan_with_interface_multi_ip(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<vlans>
<vlan>
<vlan-id>40</vlan-id>
</vlan>
</vlans>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>WITH-IF-MULTI-IP</name>
<vlan-id>40</vlan-id>
<l3-interface>vlan.70</l3-interface>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/1</name>
</interface>
<interface>
<name>vlan</name>
<unit>
<name>20</name>
<family>
<inet>
<address>
<name>1.1.1.1/24</name>
</address>
<filter>
<input>
<filter-name>AC-IN</filter-name>
</input>
<output>
<filter-name>AC-OUT</filter-name>
</output>
</filter>
</inet>
</family>
</unit>
<unit>
<name>40</name>
</unit>
<unit>
<name>70</name>
<family>
<inet>
<address>
<name>2.1.1.1/24</name>
</address>
<address>
<name>4.1.1.1/24</name>
</address>
<address>
<name>3.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan = self.switch.get_vlan(40)
assert_that(vlan.number, equal_to(40))
assert_that(vlan.name, equal_to(None))
assert_that(vlan.access_groups[IN], equal_to(None))
assert_that(vlan.access_groups[OUT], equal_to(None))
vlanip1, vlanip2, vlanip3 = vlan.ips
assert_that(str(vlanip1.ip), equal_to("2.1.1.1"))
assert_that(vlanip1.prefixlen, equal_to(24))
assert_that(str(vlanip2.ip), equal_to("3.1.1.1"))
assert_that(vlanip2.prefixlen, equal_to(24))
assert_that(str(vlanip3.ip), equal_to("4.1.1.1"))
assert_that(vlanip3.prefixlen, equal_to(24))
def test_get_vlan_where_vlan_interfaces_can_also_be_called_irb(self):
self.switch.in_transaction = True
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans>
<vlan>
<vlan-id>20</vlan-id>
</vlan>
</vlans>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>ON_IRB</name>
<vlan-id>20</vlan-id>
<l3-interface>irb.20</l3-interface>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/1</name>
</interface>
<interface>
<name>vlan</name>
<unit>
<name>10</name>
<family>
<inet>
<address>
<name>1.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
<interface>
<name>irb</name>
<unit>
<name>20</name>
<family>
<inet>
<address>
<name>2.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
<interface>
<name>whatever</name>
<unit>
<name>30</name>
<family>
<inet>
<address>
<name>3.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan = self.switch.get_vlan(20)
assert_that(str(vlan.ips[0].ip), equal_to("2.1.1.1"))
def test_get_vlan_where_vlan_interfaces_not_found(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<vlans>
<vlan>
<vlan-id>40</vlan-id>
</vlan>
</vlans>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>NOT_FOUND</name>
<vlan-id>40</vlan-id>
<l3-interface>notfound.20</l3-interface>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/1</name>
</interface>
<interface>
<name>vlan</name>
<unit>
<name>10</name>
<family>
<inet>
<address>
<name>1.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
<interface>
<name>irb</name>
<unit>
<name>20</name>
<family>
<inet>
<address>
<name>2.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
<interface>
<name>whatever</name>
<unit>
<name>30</name>
<family>
<inet>
<address>
<name>3.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
</interfaces>
"""))
vlan = self.switch.get_vlan(40)
assert_that(vlan.ips, has_length(0))
def test_get_interface(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/1</name>
</interface>
</interfaces>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
interface = self.switch.get_interface('ge-0/0/1')
assert_that(interface.name, equal_to("ge-0/0/1"))
assert_that(interface.shutdown, equal_to(False))
assert_that(interface.port_mode, equal_to(ACCESS))
assert_that(interface.access_vlan, equal_to(None))
assert_that(interface.trunk_native_vlan, equal_to(None))
assert_that(interface.trunk_vlans, equal_to([]))
assert_that(interface.auto_negotiation, equal_to(None))
assert_that(interface.mtu, equal_to(None))
def test_get_unconfigured_but_existing_interface_returns_an_empty_interface(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/27</name>
</interface>
</interfaces>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces/>
<vlans/>
"""))
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/27
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
interface = self.switch.get_interface('ge-0/0/27')
assert_that(interface.name, equal_to("ge-0/0/27"))
assert_that(interface.shutdown, equal_to(False))
assert_that(interface.port_mode, equal_to(ACCESS))
assert_that(interface.access_vlan, equal_to(None))
assert_that(interface.trunk_native_vlan, equal_to(None))
assert_that(interface.trunk_vlans, equal_to([]))
def test_get_unconfigured_interface_could_be_disabled(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/27</name>
</interface>
</interfaces>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces/>
<vlans/>
"""))
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/27
</name>
<admin-status>
down
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
assert_that(self.switch.get_interface('ge-0/0/27').shutdown, equal_to(True))
def test_get_nonexistent_interface_raises(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/INEXISTENT</name>
</interface>
</interfaces>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces/>
<vlans/>
"""))
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/1
</name>
<admin-status>
down
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
with self.assertRaises(UnknownInterface) as expect:
self.switch.get_interface('ge-0/0/INEXISTENT')
assert_that(str(expect.exception), equal_to("Unknown interface ge-0/0/INEXISTENT"))
def test_get_interfaces(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/1
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<logical-interface>
<name>
ge-0/0/1.0
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<filter-information>
</filter-information>
<address-family>
<address-family-name>
eth-switch
</address-family-name>
</address-family>
</logical-interface>
</physical-interface>
<physical-interface>
<name>
ge-0/0/2
</name>
<admin-status>
down
</admin-status>
<oper-status>
down
</oper-status>
<logical-interface>
<name>
ge-0/0/2.0
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<filter-information>
</filter-information>
<address-family>
<address-family-name>
eth-switch
</address-family-name>
</address-family>
</logical-interface>
</physical-interface>
<physical-interface>
<name>
ge-0/0/3
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<logical-interface>
<name>
ge-0/0/3.0
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<filter-information>
</filter-information>
<address-family>
<address-family-name>
eth-switch
</address-family-name>
</address-family>
</logical-interface>
</physical-interface>
<physical-interface>
<name>
ge-0/0/4
</name>
<admin-status>up</admin-status>
<oper-status>down</oper-status>
<logical-interface>
<name>
ge-0/0/4.0
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<filter-information>
</filter-information>
<address-family>
<address-family-name>
eth-switch
</address-family-name>
</address-family>
</logical-interface>
</physical-interface>
<physical-interface>
<name>
ge-0/0/5
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces />
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/2</name>
<disable />
<description>Howdy</description>
<mtu>5000</mtu>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/3</name>
<ether-options>
<no-auto-negotiation/>
</ether-options>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>999-1001</members>
<members>1000</members>
</vlan>
<native-vlan-id>2000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/4</name>
<ether-options>
<auto-negotiation/>
</ether-options>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/5</name>
<ether-options>
<speed>
<ethernet-100m/>
</speed>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
<interface>
<name>vlan</name>
<unit>
<name>40</name>
</unit>
</interface>
<interface>
<name>ae10</name>
<aggregated-ether-options>
<lacp>
<active/>
<periodic>slow</periodic>
</lacp>
</aggregated-ether-options>
<unit>
<name>0</name>
<family>
<ethernet-switching />
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
if1, if2, if3, if4, if5 = self.switch.get_interfaces()
assert_that(if1.name, equal_to("ge-0/0/1"))
assert_that(if1.shutdown, equal_to(False))
assert_that(if1.port_mode, equal_to(ACCESS))
assert_that(if1.access_vlan, equal_to(None))
assert_that(if1.trunk_native_vlan, equal_to(None))
assert_that(if1.trunk_vlans, equal_to([]))
assert_that(if1.auto_negotiation, equal_to(None))
assert_that(if1.mtu, equal_to(None))
assert_that(if2.name, equal_to("ge-0/0/2"))
assert_that(if2.shutdown, equal_to(True))
assert_that(if2.port_mode, equal_to(ACCESS))
assert_that(if2.access_vlan, equal_to(1000))
assert_that(if2.trunk_native_vlan, equal_to(None))
assert_that(if2.trunk_vlans, equal_to([]))
assert_that(if2.mtu, equal_to(5000))
assert_that(if3.name, equal_to("ge-0/0/3"))
assert_that(if3.port_mode, equal_to(TRUNK))
assert_that(if3.access_vlan, equal_to(None))
assert_that(if3.trunk_native_vlan, equal_to(2000))
assert_that(if3.trunk_vlans, equal_to([999, 1000, 1001]))
assert_that(if3.auto_negotiation, equal_to(False))
assert_that(if4.name, equal_to("ge-0/0/4"))
assert_that(if4.trunk_native_vlan, equal_to(None))
assert_that(if4.trunk_vlans, equal_to([]))
assert_that(if4.auto_negotiation, equal_to(True))
assert_that(if5.name, equal_to("ge-0/0/5"))
assert_that(if5.port_mode, equal_to(BOND_MEMBER))
assert_that(if5.bond_master, equal_to(10))
def test_get_interfaces_lists_configuration_less_interfaces(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/1
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
<physical-interface>
<name>
ge-0/0/2
</name>
<admin-status>
down
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces />
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces />
<vlans/>
"""))
if1, if2 = self.switch.get_interfaces()
assert_that(if1.name, equal_to("ge-0/0/1"))
assert_that(if1.shutdown, equal_to(False))
assert_that(if1.port_mode, equal_to(ACCESS))
assert_that(if1.access_vlan, equal_to(None))
assert_that(if1.trunk_native_vlan, equal_to(None))
assert_that(if1.trunk_vlans, equal_to([]))
assert_that(if2.name, equal_to("ge-0/0/2"))
assert_that(if2.shutdown, equal_to(True))
def test_get_interfaces_supports_named_vlans(self):
self.switch.in_transaction = True
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/1
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<logical-interface>
<name>
ge-0/0/1.0
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<filter-information>
</filter-information>
<address-family>
<address-family-name>
eth-switch
</address-family-name>
</address-family>
</logical-interface>
</physical-interface>
</interface-information>
""")))
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces />
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>MON_VLAN_PREFERE</name>
<vlan-id>1234</vlan-id>
<description>Oh yeah</description>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>MON_VLAN_PREFERE</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
if1, = self.switch.get_interfaces()
assert_that(if1.name, equal_to("ge-0/0/1"))
assert_that(if1.access_vlan, equal_to(1234))
def test_add_vlan(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>900</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<vlans>
<vlan>
<name>VLAN1000</name>
<vlan-id>1000</vlan-id>
<description>Shizzle</description>
</vlan>
</vlans>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.add_vlan(1000, name="Shizzle")
def test_add_vlan_already_in_use_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(VlanAlreadyExist) as expect:
self.switch.add_vlan(1000)
assert_that(str(expect.exception), contains_string("Vlan 1000 already exist"))
def test_add_existing_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>VLAN1000</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(VlanAlreadyExist) as expect:
self.switch.add_vlan(1000)
assert_that(str(expect.exception), contains_string("Vlan 1000 already exist"))
def test_add_vlan_bad_vlan_id(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<vlans>
<vlan>
<name>VLAN9000</name>
<vlan-id>9000</vlan-id>
</vlan>
</vlans>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-info>
<bad-element>9000</bad-element>
</error-info>
<error-message>Value 9000 is not within range (1..4094)</error-message>
</rpc-error>
"""))))
with self.assertRaises(BadVlanNumber) as expect:
self.switch.add_vlan(9000)
assert_that(str(expect.exception), equal_to("Vlan number is invalid"))
def test_add_vlan_bad_vlan_name(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<vlans>
<vlan>
<name>VLAN1000</name>
<vlan-id>1000</vlan-id>
<description>a</description>
</vlan>
</vlans>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-info>
<bad-element>a</bad-element>
</error-info>
<error-message>Length 1 is not within range (2..255)</error-message>
</rpc-error>
"""))))
with self.assertRaises(BadVlanName) as expect:
self.switch.add_vlan(1000, "a")
assert_that(str(expect.exception), equal_to("Vlan name is invalid"))
def test_remove_vlan_also_removes_associated_vlan_interface(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>MEH</name>
<vlan-id>5</vlan-id>
</vlan>
<vlan>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
<l3-interface>vlan.25</l3-interface>
</vlan>
<vlan>
<name>MEH2</name>
<vlan-id>15</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<vlans>
<vlan operation="delete">
<name>STANDARD</name>
</vlan>
</vlans>
<interfaces>
<interface>
<name>vlan</name>
<unit operation="delete">
<name>25</name>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_vlan(10)
def test_remove_vlan_also_removes_associated_vlan_interface_even_if_non_standard_name(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>MEH</name>
<vlan-id>5</vlan-id>
</vlan>
<vlan>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
<l3-interface>irb.25</l3-interface>
</vlan>
<vlan>
<name>MEH2</name>
<vlan-id>15</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<vlans>
<vlan operation="delete">
<name>STANDARD</name>
</vlan>
</vlans>
<interfaces>
<interface>
<name>irb</name>
<unit operation="delete">
<name>25</name>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_vlan(10)
def test_remove_vlan_ignores_removing_interface_not_created(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<vlans>
<vlan operation="delete">
<name>STANDARD</name>
</vlan>
</vlans>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_vlan(10)
def test_remove_vlan_invalid_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>ANOTHER</name>
<vlan-id>10</vlan-id>
</vlan>
</vlans>
"""))
with self.assertRaises(UnknownVlan) as expect:
self.switch.remove_vlan(20)
assert_that(str(expect.exception), equal_to("Vlan 20 not found"))
def test_remove_vlan_in_use_deletes_all_usages(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>9</members>
<members>10</members>
<members>11</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/2</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>9-15</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/3</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members>12</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/4</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members>STANDARD</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/5</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members>ANOTHER_NAME</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<vlans>
<vlan operation="delete">
<name>STANDARD</name>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members operation="delete">10</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/2</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members operation="delete">9-15</members>
<members>9</members>
<members>11-15</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/4</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members operation="delete">STANDARD</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.remove_vlan(10)
def test_remove_vlan_delete_usage_and_interface_at_same_time(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<vlans />
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>STANDARD</name>
<vlan-id>10</vlan-id>
<l3-interface>vlan.10</l3-interface>
</vlan>
</vlans>
<interfaces>
<interface>
<name>name</name>
<unit>
<name>10</name>
<family>
<inet>
<address>
<name>1.1.1.1/24</name>
</address>
</inet>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>10</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<vlans>
<vlan operation="delete">
<name>STANDARD</name>
</vlan>
</vlans>
<interfaces>
<interface>
<name>vlan</name>
<unit operation="delete">
<name>10</name>
</unit>
</interface>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members operation="delete">10</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_vlan(10)
def test_port_mode_access_with_no_port_mode_or_vlan_set_just_sets_the_port_mode(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_mode("ge-0/0/6")
def test_port_mode_access_with_no_mode_and_1_vlan_does_not_remove_it(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>2998</members>
<members>2998</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_mode("ge-0/0/6")
def test_port_mode_access_with_trunk_mode_and_1_vlan_does_remove_it(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>2998</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan operation="delete" />
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_mode("ge-0/0/6")
def test_port_mode_access_with_trunk_mode_and_no_attributes_just_sets_mode(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_mode("ge-0/0/6")
def test_port_mode_access_already_in_access_mode_does_nothing(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.set_access_mode("ge-0/0/6")
def test_port_mode_access_on_unknown_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces/>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..63 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface) as expect:
self.switch.set_access_mode("ge-0/0/99")
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_port_mode_access_on_default_interface_works(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces/>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_mode("ge-0/0/6")
def test_port_mode_access_with_trunk_mode_wipes_all_trunk_stuff(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>123</members>
<members>456</members>
</vlan>
<native-vlan-id>999</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan operation="delete" />
<native-vlan-id operation="delete" />
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_mode("ge-0/0/6")
def test_port_mode_trunk_with_no_port_mode_or_vlan_set_just_sets_the_port_mode(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_trunk_mode("ge-0/0/6")
def test_port_mode_trunk_with_no_port_mode_and_1_vlan_removes_it(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan operation="delete">
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_trunk_mode("ge-0/0/6")
def test_port_mode_trunk_with_access_port_mode_and_1_vlan_removes_it(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan operation="delete">
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_trunk_mode("ge-0/0/6")
def test_port_mode_trunk_already_in_trunk_mode_does_nothing(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>1000</members>
<members>1001</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.set_trunk_mode("ge-0/0/6")
def test_port_mode_trunk_on_unknown_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration())
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(a_port_value_outside_range_rpc_error())
with self.assertRaises(UnknownInterface) as expect:
self.switch.set_trunk_mode("ge-0/0/99")
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_port_mode_trunk_on_default_interface_works(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration())
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_trunk_mode("ge-0/0/6")
def test_set_access_vlan_on_interface_with_access_mode_and_no_vlan_succeeds_easily(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_vlan("ge-0/0/6", 1000)
def test_set_access_vlan_on_interface_that_already_has_it_does_nothing(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.set_access_vlan("ge-0/0/6", 1000)
def test_set_access_vlan_on_interface_that_has_no_port_mode_sets_it(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_vlan("ge-0/0/6", 1000)
def test_set_access_vlan_on_interface_replaces_the_actual_ones(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
<vlan>
<name>PATATE2</name>
<vlan-id>2000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>2000</members>
<members>2000-2000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members operation="delete">2000</members>
<members operation="delete">2000-2000</members>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_vlan("ge-0/0/6", 1000)
def test_set_access_vlan_on_interface_in_trunk_mode_should_raise(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.set_access_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a trunk mode interface"))
def test_set_access_vlan_on_unknown_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>3333</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(UnknownVlan) as expect:
self.switch.set_access_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Vlan 1000 not found"))
def test_set_access_vlan_on_default_interface_works(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_vlan("ge-0/0/6", 1000)
def test_set_access_vlan_on_unknown_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(a_port_value_outside_range_rpc_error())
with self.assertRaises(UnknownInterface) as expect:
self.switch.set_access_vlan("ge-0/0/99", 1000)
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_reset_interface_works(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="delete">
<name>ge-0/0/6</name>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.reset_interface('ge-0/0/6')
def test_reset_port_value_outside_range_interface_raises_unknown_interface(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="delete">
<name>ge-0/0/99</name>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..63 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface) as expect:
self.switch.reset_interface("ge-0/0/99")
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_reset_interface_with_invalid_interface_raises(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="delete">
<name>ne-0/0/9</name>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
invalid interface type in 'ne-0/0/9'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface):
self.switch.reset_interface("ne-0/0/9")
def test_reset_interface_with_unknown_rpcerror_raises(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="delete">
<name>ne-0/0/9</name>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
Unknown error
</error-message>
</rpc-error>"""))))
with self.assertRaises(RPCError) as expect:
self.switch.reset_interface("ne-0/0/9")
assert_that(str(expect.exception), contains_string("Unknown error"))
def test_unset_interface_access_vlan_removes_the_vlan_members(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>1000</members>
<members>1000-1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan operation="delete" />
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.unset_interface_access_vlan("ge-0/0/6")
def test_unset_interface_access_vlan_with_no_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(AccessVlanNotSet) as expect:
self.switch.unset_interface_access_vlan("ge-0/0/6")
assert_that(str(expect.exception), contains_string("Access Vlan is not set on interface ge-0/0/6"))
def test_unset_interface_access_vlan_on_trunk_mode_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>123</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.unset_interface_access_vlan("ge-0/0/6")
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a trunk mode interface"))
def test_unset_interface_access_vlan_on_default_interface_works(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(AccessVlanNotSet):
self.switch.unset_interface_access_vlan("ge-0/0/6")
def test_set_interface_native_vlan_on_interface_with_trunk_mode_and_no_native_vlan_succeeds_easily(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<native-vlan-id>1000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_native_vlan("ge-0/0/6", 1000)
def test_set_interface_native_vlan_on_interface_that_already_has_it_does_nothing(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<native-vlan-id>1000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.set_interface_native_vlan("ge-0/0/6", 1000)
def test_set_interface_native_vlan_on_interface_that_has_no_port_mode_sets_it(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<native-vlan-id>1000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_native_vlan("ge-0/0/6", 1000)
def test_set_interface_native_vlan_on_interface_replaces_the_actual_ones(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
<vlan>
<name>PATATE2</name>
<vlan-id>2000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<native-vlan-id>2000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<native-vlan-id>1000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_native_vlan("ge-0/0/6", 1000)
def test_set_interface_native_vlan_on_interface_in_access_mode_should_raise(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.set_interface_native_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a access mode interface"))
def test_set_interface_native_vlan_on_interface_that_is_already_a_member_of_the_trunk_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE0</name>
<vlan-id>999</vlan-id>
</vlan>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
<vlan>
<name>PATATE2</name>
<vlan-id>1001</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>999-1001</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(VlanAlreadyInTrunk) as expect:
self.switch.set_interface_native_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Vlan 1000 cannot be set as native vlan because it is already a member of the trunk"))
def test_set_interface_native_vlan_on_unknown_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>3333</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(UnknownVlan) as expect:
self.switch.set_interface_native_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Vlan 1000 not found"))
def test_set_interface_native_vlan_on_unknown_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<native-vlan-id>1000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(a_port_value_outside_range_rpc_error())
with self.assertRaises(UnknownInterface) as expect:
self.switch.set_interface_native_vlan("ge-0/0/99", 1000)
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_set_interface_native_vlan_on_default_interface_works(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<native-vlan-id>1000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_native_vlan("ge-0/0/6", 1000)
def test_unset_interface_native_vlan_succeeds(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<native-vlan-id>1000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<native-vlan-id operation="delete" />
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.unset_interface_native_vlan("ge-0/0/6")
def test_unset_interface_native_vlan_when_none_is_set_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(NativeVlanNotSet) as expect:
self.switch.unset_interface_native_vlan("ge-0/0/6")
assert_that(str(expect.exception), contains_string("Trunk native Vlan is not set on interface ge-0/0/6"))
def test_unset_interface_native_vlan_on_default_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(NativeVlanNotSet):
self.switch.unset_interface_native_vlan("ge-0/0/6")
def test_set_interface_auto_negotiation_state_ON_works(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<ether-options>
<auto-negotiation/>
</ether-options>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_auto_negotiation_state("ge-0/0/6", ON)
def test_set_interface_auto_negotiation_state_OFF_works(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<ether-options>
<no-auto-negotiation/>
</ether-options>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_auto_negotiation_state("ge-0/0/6", OFF)
def test_set_interface_auto_negotiation_raises_on_unknown_interface(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/128</name>
<ether-options>
<no-auto-negotiation/>
</ether-options>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(a_port_value_outside_range_rpc_error())
with self.assertRaises(UnknownInterface):
self.switch.set_interface_auto_negotiation_state("ge-0/0/128", OFF)
def test_unset_interface_auto_negotiation_state_works_when_enabled(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<ether-options>
<auto-negotiation/>
</ether-options>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<ether-options>
<auto-negotiation operation=\"delete\"/>
</ether-options>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.unset_interface_auto_negotiation_state("ge-0/0/6")
def test_unset_interface_auto_negotiation_state_does_nothing_on_default_interface(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/6
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
self.switch.unset_interface_auto_negotiation_state("ge-0/0/6")
def test_unset_interface_auto_negotiation_state_works_when_disabled(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<ether-options>
<no-auto-negotiation/>
</ether-options>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<ether-options>
<no-auto-negotiation operation=\"delete\"/>
</ether-options>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.unset_interface_auto_negotiation_state("ge-0/0/6")
def test_unset_interface_auto_negotiation_state_raises_on_unknown_interface(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/6
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
with self.assertRaises(UnknownInterface):
self.switch.unset_interface_auto_negotiation_state("ge-0/0/99")
def test_add_trunk_vlan_on_interface_with_trunk_mode_and_no_vlan_succeeds_easily(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.add_trunk_vlan("ge-0/0/6", 1000)
def test_add_trunk_vlan_on_interface_that_already_has_it_does_nothing(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>900-1100</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.add_trunk_vlan("ge-0/0/6", 1000)
def test_add_trunk_vlan_on_interface_that_has_no_port_mode_and_no_vlan_sets_it(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.add_trunk_vlan("ge-0/0/6", 1000)
def test_add_trunk_vlan_on_interface_adds_to_the_list(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>2000</members>
<members>2100-2200</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.add_trunk_vlan("ge-0/0/6", 1000)
def test_add_trunk_vlan_on_interface_that_has_no_port_mode_with_a_vlan_assumes_access_mode_and_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>500</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.add_trunk_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a access mode interface"))
def test_add_trunk_vlan_on_interface_in_access_mode_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members>500</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.add_trunk_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a access mode interface"))
def test_add_trunk_vlan_on_unknown_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(UnknownVlan) as expect:
self.switch.add_trunk_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Vlan 1000 not found"))
def test_add_trunk_vlan_on_unknown_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>PATATE</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(a_port_value_outside_range_rpc_error())
with self.assertRaises(UnknownInterface) as expect:
self.switch.add_trunk_vlan("ge-0/0/99", 1000)
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_remove_trunk_vlan_removes_the_vlan_members_in_every_possible_way(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>1000</members>
<members>1000-1001</members>
<members>999-1000</members>
<members>999-1001</members>
<members>998-1002</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members operation="delete">1000</members>
<members operation="delete">1000-1001</members>
<members>1001</members>
<members operation="delete">999-1000</members>
<members>999</members>
<members operation="delete">999-1001</members>
<members>999</members>
<members>1001</members>
<members operation="delete">998-1002</members>
<members>998-999</members>
<members>1001-1002</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_trunk_vlan("ge-0/0/6", 1000)
def test_remove_trunk_vlan_removes_the_vlan_even_if_referenced_by_name(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>1000</members>
<members>VLAN_NAME</members>
<members>SOEMTHING</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members operation="delete">1000</members>
<members operation="delete">VLAN_NAME</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_trunk_vlan("ge-0/0/6", 1000)
def test_remove_trunk_vlan_not_in_members_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>500-999</members>
<members>1001-4000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(TrunkVlanNotSet) as expect:
self.switch.remove_trunk_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Trunk Vlan is not set on interface ge-0/0/6"))
def test_remove_trunk_vlan_on_access_with_the_correct_vlan_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>access</port-mode>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.remove_trunk_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a access mode interface"))
def test_remove_trunk_vlan_on_no_port_mode_interface_with_the_correct_vlan_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(InterfaceInWrongPortMode) as expect:
self.switch.remove_trunk_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Operation cannot be performed on a access mode interface"))
def test_remove_trunk_vlan_on_unknown_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<vlans>
<vlan>
<name>VLAN_NAME</name>
<vlan-id>1000</vlan-id>
</vlan>
</vlans>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(UnknownInterface) as expect:
self.switch.remove_trunk_vlan("ge-0/0/6", 1000)
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/6"))
def test_set_interface_description_succeeds(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<description>Resistance is futile</name>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_description("ge-0/0/6", "Resistance is futile")
def test_set_interface_description_on_unkown_interface_raises(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<description>Resistance is futile</name>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..47 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface) as expect:
self.switch.set_interface_description("ge-0/0/99", "Resistance is futile")
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_unset_interface_description_succeeds(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<description operation="delete" />
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.unset_interface_description("ge-0/0/6")
def test_unset_interface_description_on_unkown_interface_raises(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<description operation="delete" />
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..47 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface) as expect:
self.switch.unset_interface_description("ge-0/0/99")
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_unset_interface_description_on_interface_with_no_description_just_ignores_it(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<description operation="delete" />
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>warning</error-severity>
<error-path>[edit interfaces ge-0/0/6]</error-path>
<error-message>statement not found: description</error-message>
</rpc-error>"""))))
self.switch.unset_interface_description("ge-0/0/99")
def test_edit_interface_spanning_tree_enable_edge_from_nothing(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<edge />
<no-root-port />
</interface>
</rstp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.edit_interface_spanning_tree('ge-0/0/6', edge=True)
def test_edit_interface_spanning_tree_enable_edge_when_all_is_already_set(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<edge/>
<no-root-port/>
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.edit_interface_spanning_tree('ge-0/0/6', edge=True)
def test_edit_interface_spanning_tree_enable_edge_when_only_edge_is_already_set(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<edge/>
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<no-root-port />
</interface>
</rstp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.edit_interface_spanning_tree('ge-0/0/6', edge=True)
def test_edit_interface_spanning_tree_enable_edge_when_only_no_root_port_is_already_set(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<no-root-port />
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<edge />
</interface>
</rstp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.edit_interface_spanning_tree('ge-0/0/6', edge=True)
def test_edit_interface_spanning_tree_disable_edge_when_all_is_set(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<edge/>
<no-root-port/>
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<edge operation="delete" />
<no-root-port operation="delete" />
</interface>
</rstp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.edit_interface_spanning_tree('ge-0/0/6', edge=False)
def test_edit_interface_spanning_tree_disable_edge_when_all_is_only_edge_is_set(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<edge/>
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<edge operation="delete" />
</interface>
</rstp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.edit_interface_spanning_tree('ge-0/0/6', edge=False)
def test_edit_interface_spanning_tree_disable_edge_when_all_is_only_no_root_port_is_set(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<no-root-port />
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
<no-root-port operation="delete" />
</interface>
</rstp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.edit_interface_spanning_tree('ge-0/0/6', edge=False)
def test_edit_interface_spanning_tree_disable_edge_when_nothing_is_set(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/6</name>
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.edit_interface_spanning_tree('ge-0/0/6', edge=False)
def test_edit_interface_spanning_tree_unknown_interface(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ge-0/0/99</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<rstp>
<interface>
<name>ge-0/0/99</name>
<edge />
<no-root-port />
</interface>
</rstp>
</protocols>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..47 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface) as expect:
self.switch.edit_interface_spanning_tree('ge-0/0/99', edge=True)
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_set_interface_state_to_on_succeeds(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<disable operation="delete" />
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_state("ge-0/0/6", ON)
def test_set_interface_state_to_off_succeeds(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<disable />
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_state("ge-0/0/6", OFF)
def test_unset_interface_state_succeeds(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<disable operation="delete" />
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.unset_interface_state("ge-0/0/6")
def test_unset_interface_state_raises_on_unknown_interface(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<disable operation="delete" />
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..47 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface) as expect:
self.switch.unset_interface_state("ge-0/0/99")
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_unset_interface_state_without_disabled(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<disable operation="delete" />
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>warning</error-severity>
<error-path>[edit interfaces ge-0/0/6]</error-path>
<error-message>statement not found: </error-message>
</rpc-error>"""))))
self.switch.unset_interface_state("ge-0/0/6")
def test_set_interface_state_to_on_unknown_interface_raises(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<disable operation="delete"/>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..47 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface) as expect:
self.switch.set_interface_state("ge-0/0/99", ON)
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_set_interface_state_to_off_unknown_interface_raises(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<disable />
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..47 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface) as expect:
self.switch.set_interface_state("ge-0/0/99", OFF)
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_add_bond(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ae6</name>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ae6</name>
<aggregated-ether-options>
<lacp>
<active/>
<periodic>slow</periodic>
</lacp>
</aggregated-ether-options>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.add_bond(6)
def test_add_bond_already_created_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ae6</name>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae6</name>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").never()
with self.assertRaises(BondAlreadyExist) as expect:
self.switch.add_bond(6)
assert_that(str(expect.exception), equal_to("Bond 6 already exists"))
def test_add_bond_bad_bond_number(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ae9000</name>
</interface>
</interfaces>
</configuration>
</filter>
""")).and_return(a_configuration(""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ae9000</name>
<aggregated-ether-options>
<lacp>
<active/>
<periodic>slow</periodic>
</lacp>
</aggregated-ether-options>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
device value outside range 0..31 for '9000' in 'ae9000'
</error-message>
</rpc-error>
"""))))
with self.assertRaises(BadBondNumber) as expect:
self.switch.add_bond(9000)
assert_that(str(expect.exception), equal_to("Bond number is invalid"))
def test_remove_bond(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<protocols>
<rstp>
<interface>
<name>ae10</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
</interface>
<interface>
<name>ge-4/3/3</name>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="delete">
<name>ae10</name>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_bond(10)
def test_remove_bond_also_removes_rstp_protocol(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<protocols>
<rstp>
<interface>
<name>ae10</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
</interface>
<interface>
<name>ge-4/3/3</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface>
<name>ae10</name>
<edge/>
<no-root-port/>
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="delete">
<name>ae10</name>
</interface>
</interfaces>
<protocols>
<rstp>
<interface operation="delete">
<name>ae10</name>
</interface>
</rstp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.remove_bond(10)
def test_remove_bond_invalid_number_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<protocols>
<rstp>
<interface>
<name>ae7</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration())
with self.assertRaises(UnknownBond) as expect:
self.switch.remove_bond(007)
assert_that(str(expect.exception), equal_to("Bond 7 not found"))
def test_remove_bond_delete_slaves_and_interface_at_same_time(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces />
<protocols>
<rstp>
<interface>
<name>ae10</name>
</interface>
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
</interface>
<interface>
<name>ge-0/0/1</name>
<ether-options>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
<interface>
<name>ge-0/0/2</name>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="delete">
<name>ae10</name>
</interface>
<interface>
<name>ge-0/0/1</name>
<ether-options>
<ieee-802.3ad operation="delete" />
</ether-options>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.remove_bond(10)
def test_add_interface_to_bond(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
<protocols>
<rstp>
<interface />
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
</interface>
<interface>
<name>ge-0/0/1</name>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="replace">
<name>ge-0/0/1</name>
<ether-options>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.add_interface_to_bond('ge-0/0/1', 10)
def test_add_interface_to_bond_gets_up_to_speed_and_removes_existing_rstp_protocol(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
<protocols>
<rstp>
<interface />
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
<aggregated-ether-options>
<link-speed>1g</link-speed>
</aggregated-ether-options>
</interface>
<interface>
<name>ge-0/0/1</name>
</interface>
</interfaces>
<vlans/>
<protocols>
<rstp>
<interface>
<name>ge-0/0/1</name>
<edge />
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="replace">
<name>ge-0/0/1</name>
<ether-options>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
<speed>
<ethernet-1g/>
</speed>
</ether-options>
</interface>
</interfaces>
<protocols>
<rstp>
<interface operation="delete">
<name>ge-0/0/1</name>
</interface>
</rstp>
</protocols>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.add_interface_to_bond('ge-0/0/1', 10)
def test_add_interface_to_bond_without_bond(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
<protocols>
<rstp>
<interface />
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/1</name>
</interface>
</interfaces>
<vlans/>
"""))
with self.assertRaises(UnknownBond):
self.switch.add_interface_to_bond('ge-0/0/1', 10)
def test_add_interface_to_bond_without_interface(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
<protocols>
<rstp>
<interface />
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="replace">
<name>ge-0/0/99</name>
<ether-options>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
</interfaces>
</configuration>
</config>""")).and_raise(
RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..47 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface):
self.switch.add_interface_to_bond('ge-0/0/99', 10)
def test_remove_interface_from_bond(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/1</name>
<ether-options>
<ieee-802.3ad operation="delete" />
</ether-options>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.remove_interface_from_bond('ge-0/0/1')
def test_remove_interface_from_bond_not_in_bond(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/1</name>
<ether-options>
<ieee-802.3ad operation="delete" />
</ether-options>
</interface>
</interfaces>
</configuration>
</config>""")).and_raise(
RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
statement not found: 802.3ad
</error-message>
</rpc-error>"""))))
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/1
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
with self.assertRaises(InterfaceNotInBond):
self.switch.remove_interface_from_bond('ge-0/0/1')
def test_remove_interface_from_bond_unknown_interface_raises(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/27</name>
<ether-options>
<ieee-802.3ad operation="delete" />
</ether-options>
</interface>
</interfaces>
</configuration>
</config>""")).and_raise(
RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
statement not found: 802.3ad
</error-message>
</rpc-error>"""))))
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/1
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
with self.assertRaises(UnknownInterface):
self.switch.remove_interface_from_bond('ge-0/0/27')
def test_change_bond_speed_update_slaves_and_interface_at_same_time(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
</interface>
<interface>
<name>ge-0/0/1</name>
<ether-options>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
<interface>
<name>ge-0/0/2</name>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ae10</name>
<aggregated-ether-options>
<link-speed>1g</link-speed>
</ether-options>
</interface>
<interface>
<name>ge-0/0/1</name>
<ether-options>
<speed>
<ethernet-1g/>
</speed>
</ether-options>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.set_bond_link_speed(10, '1g')
def test_change_bond_speed_on_unknown_bond(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
</interface>
<interface>
<name>ge-0/0/1</name>
<ether-options>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
<interface>
<name>ge-0/0/2</name>
</interface>
</interfaces>
"""))
with self.assertRaises(UnknownBond):
self.switch.set_bond_link_speed(20, '1g')
def test_get_bond(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae3</name>
<aggregated-ether-options>
<link-speed>1g</link-speed>
<lacp>
<active/>
<periodic>slow</periodic>
</lacp>
</aggregated-ether-options>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>999-1001</members>
<members>1000</members>
</vlan>
<native-vlan-id>2000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-1/0/1</name>
<ether-options>
<speed>
<ethernet-100m/>
</speed>
<ieee-802.3ad>
<bundle>ae3</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
</interfaces>
"""))
if3 = self.switch.get_bond(3)
assert_that(if3.number, equal_to(3))
assert_that(if3.link_speed, equal_to('1g'))
assert_that(if3.port_mode, equal_to(TRUNK))
assert_that(if3.access_vlan, equal_to(None))
assert_that(if3.trunk_native_vlan, equal_to(2000))
assert_that(if3.trunk_vlans, equal_to([999, 1000, 1001]))
assert_that(if3.members, equal_to(['ge-1/0/1']))
def test_get_unknown_bond(self):
self.switch.in_transaction = True
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration(""))
with self.assertRaises(UnknownBond):
self.switch.get_bond(3)
def test_get_bonds(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae1</name>
<aggregated-ether-options>
<lacp>
<active/>
<periodic>slow</periodic>
</lacp>
</aggregated-ether-options>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ae2</name>
<disable />
<description>Howdy</description>
<mtu>5000</mtu>
<aggregated-ether-options>
<link-speed>10g</link-speed>
<lacp>
<active/>
<periodic>slow</periodic>
</lacp>
</aggregated-ether-options>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ae3</name>
<aggregated-ether-options>
<link-speed>1g</link-speed>
<lacp>
<active/>
<periodic>slow</periodic>
</lacp>
</aggregated-ether-options>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
<vlan>
<members>999-1001</members>
<members>1000</members>
</vlan>
<native-vlan-id>2000</native-vlan-id>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/4</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<port-mode>trunk</port-mode>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-1/0/1</name>
<ether-options>
<speed>
<ethernet-100m/>
</speed>
<ieee-802.3ad>
<bundle>ae3</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
<interface>
<name>vlan</name>
<unit>
<name>40</name>
</unit>
</interface>
</interfaces>
"""))
if1, if2, if3 = self.switch.get_bonds()
assert_that(if1.number, equal_to(1))
assert_that(if1.link_speed, equal_to(None))
assert_that(if1.shutdown, equal_to(False))
assert_that(if1.port_mode, equal_to(ACCESS))
assert_that(if1.access_vlan, equal_to(None))
assert_that(if1.trunk_native_vlan, equal_to(None))
assert_that(if1.trunk_vlans, equal_to([]))
assert_that(if1.mtu, equal_to(None))
assert_that(if1.members, equal_to([]))
assert_that(if2.number, equal_to(2))
assert_that(if2.link_speed, equal_to('10g'))
assert_that(if2.shutdown, equal_to(True))
assert_that(if2.port_mode, equal_to(ACCESS))
assert_that(if2.access_vlan, equal_to(1000))
assert_that(if2.trunk_native_vlan, equal_to(None))
assert_that(if2.trunk_vlans, equal_to([]))
assert_that(if2.mtu, equal_to(5000))
assert_that(if2.members, equal_to([]))
assert_that(if3.number, equal_to(3))
assert_that(if3.link_speed, equal_to('1g'))
assert_that(if3.port_mode, equal_to(TRUNK))
assert_that(if3.access_vlan, equal_to(None))
assert_that(if3.trunk_native_vlan, equal_to(2000))
assert_that(if3.trunk_vlans, equal_to([999, 1000, 1001]))
assert_that(if3.members, equal_to(['ge-1/0/1']))
def test_set_interface_lldp_state_from_nothing(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
</interface>
</lldp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
</interface>
</lldp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_lldp_state('ge-0/0/6', True)
def test_set_interface_lldp_state_from_default_interface(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
</interface>
</lldp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
</interface>
</lldp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_lldp_state('ge-0/0/6', True)
def test_set_interface_lldp_state_from_unknown_interface_raises(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
</interface>
</interfaces>
<protocols>
<lldp>
<interface>
<name>ge-0/0/99</name>
</interface>
</lldp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<lldp>
<interface>
<name>ge-0/0/99</name>
</interface>
</lldp>
</protocols>
</configuration>
</config>
""")).and_raise(a_port_value_outside_range_rpc_error())
with self.assertRaises(UnknownInterface) as expect:
self.switch.set_interface_lldp_state('ge-0/0/99', True)
assert_that(str(expect.exception), contains_string("Unknown interface ge-0/0/99"))
def test_set_interface_lldp_state_when_disabled(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
</interface>
</lldp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
<disable/>
</interface>
</lldp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
<disable operation="delete"/>
</interface>
</lldp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_lldp_state('ge-0/0/6', True)
def test_disable_lldp_when_disabled(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
</interface>
</lldp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
<disable/>
</interface>
</lldp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").never()
self.switch.set_interface_lldp_state('ge-0/0/6', False)
def test_disable_lldp_when_enabled(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
</interface>
</lldp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<protocols>
<lldp>
<interface>
<name>ge-0/0/6</name>
<disable />
</interface>
</lldp>
</protocols>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_lldp_state('ge-0/0/6', False)
def test_set_interface_mtu_success(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<mtu>5000</mtu>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_interface_mtu('ge-0/0/6', 5000)
def test_set_interface_mtu_wrong_value_raises(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<mtu>100</mtu>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
Value 100 is not within range (256..9216)
</error-message>
</rpc-error>"""))))
with self.assertRaises(InvalidMtuSize) as expect:
self.switch.set_interface_mtu('ge-0/0/6', 100)
assert_that(str(expect.exception), contains_string("Value 100 is not within range (256..9216)"))
def test_set_interface_mtu_unknown_interface_raises(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<mtu>5000</mtu>
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..63 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface):
self.switch.set_interface_mtu('ge-0/0/99', 5000)
def test_unset_interface_mtu_success(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<mtu operation="delete" />
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.unset_interface_mtu('ge-0/0/6')
def test_unset_interface_mtu_unknown_intercace_raises(self):
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/99</name>
<mtu operation="delete" />
</interface>
</interfaces>
</configuration>
</config>
""")).and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..63 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>"""))))
with self.assertRaises(UnknownInterface):
self.switch.unset_interface_mtu('ge-0/0/99')
def test_bond_port_mode_access(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.set_access_mode = mock.Mock()
switch.set_bond_access_mode(6)
switch.set_access_mode.assert_called_with('ae6')
def test_bond_port_mode_trunk(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.set_trunk_mode = mock.Mock()
switch.set_bond_trunk_mode(6)
switch.set_trunk_mode.assert_called_with('ae6')
def test_set_bond_description_succeeds(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.set_interface_description = mock.Mock()
switch.set_bond_description(6, "Resistance is futile")
switch.set_interface_description.assert_called_with('ae6', "Resistance is futile")
def test_unset_bond_description_succeeds(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.unset_interface_description = mock.Mock()
switch.unset_bond_description(6)
switch.unset_interface_description.assert_called_with('ae6')
def test_set_bond_mtu_succeeds(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.set_interface_mtu = mock.Mock()
switch.set_bond_mtu(6, 5000)
switch.set_interface_mtu.assert_called_with('ae6', 5000)
def test_unset_bond_mtu_succeeds(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.unset_interface_mtu = mock.Mock()
switch.unset_bond_mtu(6)
switch.unset_interface_mtu.assert_called_with('ae6')
def test_add_bond_trunk_vlan(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.add_trunk_vlan = mock.Mock()
switch.add_bond_trunk_vlan(6, 1000)
switch.add_trunk_vlan.assert_called_with('ae6', 1000)
def test_remove_bond_trunk_vlan(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.remove_trunk_vlan = mock.Mock()
switch.remove_bond_trunk_vlan(6, 1000)
switch.remove_trunk_vlan.assert_called_with('ae6', 1000)
def test_set_bond_native_vlan(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.set_interface_native_vlan = mock.Mock()
switch.set_bond_native_vlan(6, 1000)
switch.set_interface_native_vlan.assert_called_with('ae6', 1000)
def test_unset_bond_native_vlan(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.unset_interface_native_vlan = mock.Mock()
switch.unset_bond_native_vlan(6)
switch.unset_interface_native_vlan.assert_called_with('ae6')
def test_edit_bond_spanning_tree(self):
switch = juniper.standard.netconf(SwitchDescriptor(model='', hostname=''))
switch.edit_interface_spanning_tree = mock.Mock()
switch.edit_bond_spanning_tree(6, edge=False)
switch.edit_interface_spanning_tree.assert_called_with('ae6', edge=False)
@mock.patch("ncclient.manager.connect")
def test_connect(self, connect_mock):
connect_mock.return_value = self.netconf_mock
self.netconf_mock._session = mock.Mock()
self.switch = Juniper(
SwitchDescriptor(model='juniper', hostname="toto", username="tutu", password="titi", port=8000),
custom_strategies=JuniperCustomStrategies(), timeout=120)
self.switch.connect()
connect_mock.assert_called_with(
host="toto",
username="tutu",
password="titi",
hostkey_verify=False,
device_params={'name':'junos'},
port=8000,
timeout=120
)
@mock.patch("ncclient.manager.connect")
def test_connect_without_port_uses_default(self, connect_mock):
connect_mock.return_value = self.netconf_mock
self.netconf_mock._session = mock.Mock()
self.switch = Juniper(
SwitchDescriptor(model='juniper', hostname="toto", username="tutu", password="titi"),
custom_strategies=JuniperCustomStrategies(), timeout=120)
self.switch.connect()
connect_mock.assert_called_with(
host="toto",
username="tutu",
password="titi",
hostkey_verify=False,
device_params={'name':'junos'},
timeout=120
)
def test_disconnect(self):
self.netconf_mock.should_receive("close_session").once().ordered()
self.switch.disconnect()
def test_disconnect_doesnt_fail_if_close_session_does(self):
self.netconf_mock.should_receive("close_session").once().ordered().and_raise(TimeoutExpiredError)
self.switch.disconnect()
def test_start_transaction_locks_the_candidate(self):
self.netconf_mock.should_receive("lock").with_args(target="candidate").once().ordered()
self.switch.start_transaction()
def test_start_transaction_fails_discard_changes_and_retries(self):
self.netconf_mock.should_receive("lock").with_args(target="candidate").twice()\
.and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
configuration database modified
</error-message>
<database-status-information>
<database-status>
<user>admin</user>
<terminal>p0</terminal>
<pid>9511</pid>
<start-time junos:seconds="1416432176">2014-11-19 16:22:56 EST</start-time>
<idle-time junos:seconds="197">00:03:17</idle-time>
<edit-path>[edit]</edit-path>
</database-status>
</database-status-information>
</rpc-error>"""))))\
.and_return()
self.netconf_mock.should_receive("discard_changes").with_args().once().and_return(an_ok_response())
self.switch.start_transaction()
def test_start_transaction_locking_fails_already_in_use_raises(self):
self.netconf_mock.should_receive("lock").with_args(target="candidate").once().ordered().and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
Configuration database is already open
</error-message>
</rpc-error>
"""))))
with self.assertRaises(LockedSwitch) as expect:
self.switch.start_transaction()
assert_that(str(expect.exception), equal_to("Switch is locked and can't be modified"))
def test_start_transaction_locking_fails_of_unknown_reason_raises(self):
self.netconf_mock.should_receive("lock").with_args(target="candidate").once().ordered().and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
Whatever right?
</error-message>
</rpc-error>
"""))))
with self.assertRaises(RPCError) as expect:
self.switch.start_transaction()
assert_that(str(expect.exception), contains_string("Whatever right?"))
def test_end_transaction(self):
self.netconf_mock.should_receive("unlock").with_args(target="candidate").once().ordered()
self.switch.end_transaction()
def test_commit_succeeds(self):
self.netconf_mock.should_receive("commit").with_args().once().ordered()
self.switch.commit_transaction()
def test_commit_transaction_failing_to_commit_discard_changes_and_raises(self):
self.netconf_mock.should_receive("commit").with_args().once().ordered().and_raise(RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<source-daemon>
eswd
</source-daemon>
<error-message>
tag value 1000 is being used by more than one vlan <VLAN1000> and <SOMETHINGELSE>
</error-message>
</rpc-error>
"""))))
with self.assertRaises(OperationNotCompleted) as expect:
self.switch.commit_transaction()
assert_that(str(expect.exception), equal_to("An error occured while completing operation, no modifications have been applied : tag value 1000 is being used by more than one vlan <VLAN1000> and <SOMETHINGELSE>"))
def test_rollback_succeeds(self):
self.netconf_mock.should_receive("discard_changes").with_args().once().ordered()
self.switch.rollback_transaction()
def a_configuration(inner_data=""):
return an_rpc_response("""
<data>
<configuration>{}</configuration>
</data>
""".format(inner_data))
def an_ok_response():
return an_rpc_response(textwrap.dedent("""
<ok/>
"""))
def an_rpc_response(data):
return NCElement(textwrap.dedent("""
<rpc-reply message-id="urn:uuid:34c41736-bed3-11e4-8c40-7c05070fe456">
{}
</rpc-reply>""".format(data)), JunosDeviceHandler(None).transform_reply())
def a_port_value_outside_range_rpc_error():
return RPCError(to_ele(textwrap.dedent("""
<rpc-error xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/11.4R1/junos" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<error-severity>error</error-severity>
<error-message>
port value outside range 0..63 for '99' in 'ge-0/0/99'
</error-message>
</rpc-error>""")))
def is_xml(string):
return IsXmlFlexmockArgMatcher(string)
class IsXmlFlexmockArgMatcher(object):
def __init__(self, expected):
self.expected = to_ele(expected)
def __eq__(self, other):
otherxml = other if not isinstance(other, basestring) else to_ele(other)
try:
self.compare(self.expected, otherxml)
return True
except AssertionError as e:
logging.warning("Given XML : \n" + to_xml(otherxml, pretty_print=True) +
"\n\ndiffers from expected : \n" + to_xml(self.expected, pretty_print=True) +
"Because : " + str(e))
return False
def compare(self, expected, actual):
for i, node in enumerate(expected):
assert_that(node.tag, equal_to(unqualify(actual[i].tag)))
assert_that(node, has_length(len(actual[i])))
assert_that(actual[i].attrib, has_length(len(node.attrib)))
if node.text is not None:
if node.text.strip() == "":
assert_that(actual[i].text is None or actual[i].text.strip() == "")
else:
assert_that(actual[i].text is not None, "Node is " + node.tag)
assert_that(node.text.strip(), equal_to(actual[i].text.strip()))
for name, value in node.attrib.items():
assert_that(actual[i].attrib, has_key(name))
assert_that(actual[i].attrib[name], equal_to(value))
self.compare(node, actual[i])
def unqualify(tag):
return re.sub("\{[^\}]*\}", "", tag)
| 36.12268
| 219
| 0.452104
| 20,469
| 239,385
| 5.093752
| 0.028433
| 0.007788
| 0.015
| 0.050957
| 0.931683
| 0.916376
| 0.902219
| 0.884946
| 0.864929
| 0.85412
| 0
| 0.026153
| 0.425131
| 239,385
| 6,626
| 220
| 36.128132
| 0.731497
| 0.002281
| 0
| 0.906476
| 0
| 0.006903
| 0.702058
| 0.09262
| 0
| 0
| 0
| 0
| 0.044214
| 0
| null | null | 0.000986
| 0.003287
| null | null | 0.000329
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
202b6473f3efb39edf7d57515bb51bc743ba4960
| 3,374
|
py
|
Python
|
src/CRKSPH/gradientCRKSPHInst.cc.py
|
markguozhiming/spheral
|
bbb982102e61edb8a1d00cf780bfa571835e1b61
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 1
|
2020-10-21T01:56:55.000Z
|
2020-10-21T01:56:55.000Z
|
src/CRKSPH/gradientCRKSPHInst.cc.py
|
markguozhiming/spheral
|
bbb982102e61edb8a1d00cf780bfa571835e1b61
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | null | null | null |
src/CRKSPH/gradientCRKSPHInst.cc.py
|
markguozhiming/spheral
|
bbb982102e61edb8a1d00cf780bfa571835e1b61
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | null | null | null |
text = """
//------------------------------------------------------------------------------
// Explicit instantiation.
//------------------------------------------------------------------------------
#include "gradientCRKSPH.cc"
#include "Geometry/Dimension.hh"
#include "SPH/NodeCoupling.hh"
namespace Spheral {
template
FieldList<Dim< %(ndim)s >, MathTraits<Dim< %(ndim)s >, Dim< %(ndim)s >::Scalar>::GradientType>
gradientCRKSPH<Dim< %(ndim)s >, Dim< %(ndim)s >::Scalar>(const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Scalar>& fieldList,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Vector>& position,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Scalar>& weight,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::SymTensor>& Hfield,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Scalar>& A,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Vector>& B,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Tensor>& C,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Vector>& gradA,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Tensor>& gradB,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::ThirdRankTensor>& gradC,
const ConnectivityMap<Dim< %(ndim)s > >& connectivityMap,
const CRKOrder correctionOrder,
const TableKernel< Dim< %(ndim)s > >& kernel,
const NodeCoupling& nodeCoupling);
template
FieldList<Dim< %(ndim)s >, MathTraits<Dim< %(ndim)s >, Dim< %(ndim)s >::Vector>::GradientType>
gradientCRKSPH<Dim< %(ndim)s >, Dim< %(ndim)s >::Vector>(const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Vector>& fieldList,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Vector>& position,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Scalar>& weight,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::SymTensor>& Hfield,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Scalar>& A,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Vector>& B,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Tensor>& C,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Vector>& gradA,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::Tensor>& gradB,
const FieldList<Dim< %(ndim)s >, Dim< %(ndim)s >::ThirdRankTensor>& gradC,
const ConnectivityMap<Dim< %(ndim)s > >& connectivityMap,
const CRKOrder correctionOrder,
const TableKernel< Dim< %(ndim)s > >& kernel,
const NodeCoupling& nodeCoupling);
}
"""
| 73.347826
| 128
| 0.421458
| 296
| 3,374
| 4.804054
| 0.135135
| 0.265823
| 0.303797
| 0.185654
| 0.919831
| 0.919831
| 0.919831
| 0.902954
| 0.843882
| 0.793249
| 0
| 0
| 0.385299
| 3,374
| 45
| 129
| 74.977778
| 0.685632
| 0
| 0
| 0.714286
| 0
| 0.52381
| 0.995851
| 0.075282
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
2037c0cb7e6589d57c63c36d0b4e21c82fa2620b
| 250,834
|
py
|
Python
|
iengage_client/apis/complaint_api.py
|
iEngage/python-sdk
|
76cc6ed697d7599ce9af74124c12d33ad5aff419
|
[
"Apache-2.0"
] | null | null | null |
iengage_client/apis/complaint_api.py
|
iEngage/python-sdk
|
76cc6ed697d7599ce9af74124c12d33ad5aff419
|
[
"Apache-2.0"
] | null | null | null |
iengage_client/apis/complaint_api.py
|
iEngage/python-sdk
|
76cc6ed697d7599ce9af74124c12d33ad5aff419
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Stakeholder engagement API
This API enables Intelligent Engagement for your Business. iEngage is a platform that combines process, augmented intelligence and rewards to help you intelligently engage customers.
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ComplaintApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_complaint(self, category_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Share complaint without attachment
Allows the user to share complaint. Returns complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_complaint(category_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str complaint_title: Complaint Title (required)
:param str complaint_description: Describe complaint (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_complaint_with_http_info(category_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.add_complaint_with_http_info(category_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, **kwargs)
return data
def add_complaint_with_http_info(self, category_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Share complaint without attachment
Allows the user to share complaint. Returns complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_complaint_with_http_info(category_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str complaint_title: Complaint Title (required)
:param str complaint_description: Describe complaint (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'complaint_title', 'complaint_description', 'logged_in_user_id', 'access_token', 'client_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_complaint" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params) or (params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `add_complaint`")
# verify the required parameter 'complaint_title' is set
if ('complaint_title' not in params) or (params['complaint_title'] is None):
raise ValueError("Missing the required parameter `complaint_title` when calling `add_complaint`")
# verify the required parameter 'complaint_description' is set
if ('complaint_description' not in params) or (params['complaint_description'] is None):
raise ValueError("Missing the required parameter `complaint_description` when calling `add_complaint`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `add_complaint`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `add_complaint`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `add_complaint`")
collection_formats = {}
resource_path = '/complaints'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'category_id' in params:
form_params.append(('categoryId', params['category_id']))
if 'complaint_title' in params:
form_params.append(('complaintTitle', params['complaint_title']))
if 'complaint_description' in params:
form_params.append(('complaintDescription', params['complaint_description']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaint',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_complaint_0(self, body, body2, body3, logged_in_user_id, access_token, client_token, **kwargs):
"""
Share complaint with attachment
Allows the user to share complaints. Returns the complaint object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_complaint_0(body, body2, body3, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int body: categoryId (required)
:param str body2: complaintTitle (required)
:param str body3: complaintDescription (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param list[Attachment] body4:
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_complaint_0_with_http_info(body, body2, body3, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.add_complaint_0_with_http_info(body, body2, body3, logged_in_user_id, access_token, client_token, **kwargs)
return data
def add_complaint_0_with_http_info(self, body, body2, body3, logged_in_user_id, access_token, client_token, **kwargs):
"""
Share complaint with attachment
Allows the user to share complaints. Returns the complaint object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_complaint_0_with_http_info(body, body2, body3, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int body: categoryId (required)
:param str body2: complaintTitle (required)
:param str body3: complaintDescription (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param list[Attachment] body4:
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'body2', 'body3', 'logged_in_user_id', 'access_token', 'client_token', 'body4']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_complaint_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_complaint_0`")
# verify the required parameter 'body2' is set
if ('body2' not in params) or (params['body2'] is None):
raise ValueError("Missing the required parameter `body2` when calling `add_complaint_0`")
# verify the required parameter 'body3' is set
if ('body3' not in params) or (params['body3'] is None):
raise ValueError("Missing the required parameter `body3` when calling `add_complaint_0`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `add_complaint_0`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `add_complaint_0`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `add_complaint_0`")
collection_formats = {}
resource_path = '/complaints/attachment'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
if 'body4' in params:
body_params = params['body4']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['multipart/form-data'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaint',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_solution(self, complaint_id, solution, logged_in_user_id, access_token, client_token, **kwargs):
"""
Share solution on complaint
Allows the user to share a solution on complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_solution(complaint_id, solution, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str solution: solution (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_solution_with_http_info(complaint_id, solution, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.add_solution_with_http_info(complaint_id, solution, logged_in_user_id, access_token, client_token, **kwargs)
return data
def add_solution_with_http_info(self, complaint_id, solution, logged_in_user_id, access_token, client_token, **kwargs):
"""
Share solution on complaint
Allows the user to share a solution on complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_solution_with_http_info(complaint_id, solution, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str solution: solution (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'solution', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_solution" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `add_solution`")
# verify the required parameter 'solution' is set
if ('solution' not in params) or (params['solution'] is None):
raise ValueError("Missing the required parameter `solution` when calling `add_solution`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `add_solution`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `add_solution`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `add_solution`")
collection_formats = {}
resource_path = '/complaints/{complaintId}/solutions'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'solution' in params:
form_params.append(('solution', params['solution']))
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseSolution',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_complaint_category(self, name, description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Create complaint category
Allows the user to create complaint category. Returns the created complaint category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_complaint_category(name, description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: Name (required)
:param str description: description (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int organization_id: OrganizationId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_complaint_category_with_http_info(name, description, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.create_complaint_category_with_http_info(name, description, logged_in_user_id, access_token, client_token, **kwargs)
return data
def create_complaint_category_with_http_info(self, name, description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Create complaint category
Allows the user to create complaint category. Returns the created complaint category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_complaint_category_with_http_info(name, description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: Name (required)
:param str description: description (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int organization_id: OrganizationId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'description', 'logged_in_user_id', 'access_token', 'client_token', 'organization_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_complaint_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `create_complaint_category`")
# verify the required parameter 'description' is set
if ('description' not in params) or (params['description'] is None):
raise ValueError("Missing the required parameter `description` when calling `create_complaint_category`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `create_complaint_category`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `create_complaint_category`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `create_complaint_category`")
collection_formats = {}
resource_path = '/complaints/categories'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'organization_id' in params:
form_params.append(('OrganizationId', params['organization_id']))
if 'name' in params:
form_params.append(('name', params['name']))
if 'description' in params:
form_params.append(('description', params['description']))
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintCategory',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_complaint(self, complaint_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete complaint
Allows the user to delete complaint. Returns the deleted complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_complaint(complaint_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.delete_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def delete_complaint_with_http_info(self, complaint_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete complaint
Allows the user to delete complaint. Returns the deleted complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_complaint" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `delete_complaint`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `delete_complaint`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `delete_complaint`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `delete_complaint`")
collection_formats = {}
resource_path = '/complaints/{complaintId}'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaint',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_complaint_category(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete complaint cotegory
Returns the deleted complaint category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_complaint_category(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_complaint_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.delete_complaint_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def delete_complaint_category_with_http_info(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete complaint cotegory
Returns the deleted complaint category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_complaint_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_complaint_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params) or (params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `delete_complaint_category`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `delete_complaint_category`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `delete_complaint_category`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `delete_complaint_category`")
collection_formats = {}
resource_path = '/complaints/categories/{categoryId}'.replace('{format}', 'json')
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintCategory',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_solution(self, solution_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete solution
Allows the user to delete solution. Returns the deleted solution
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_solution(solution_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int solution_id: solutionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_solution_with_http_info(solution_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.delete_solution_with_http_info(solution_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def delete_solution_with_http_info(self, solution_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete solution
Allows the user to delete solution. Returns the deleted solution
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_solution_with_http_info(solution_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int solution_id: solutionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['solution_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_solution" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'solution_id' is set
if ('solution_id' not in params) or (params['solution_id'] is None):
raise ValueError("Missing the required parameter `solution_id` when calling `delete_solution`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `delete_solution`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `delete_solution`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `delete_solution`")
collection_formats = {}
resource_path = '/complaints/solutions/{solutionId}'.replace('{format}', 'json')
path_params = {}
if 'solution_id' in params:
path_params['solutionId'] = params['solution_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseSolution',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dislike_solution(self, complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Dislike Solution
Allows the user to dislike the solution. Returns the disliked solution
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.dislike_solution(complaint_id, solution_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int solution_id: solutionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.dislike_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.dislike_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def dislike_solution_with_http_info(self, complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Dislike Solution
Allows the user to dislike the solution. Returns the disliked solution
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.dislike_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int solution_id: solutionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'solution_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dislike_solution" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `dislike_solution`")
# verify the required parameter 'solution_id' is set
if ('solution_id' not in params) or (params['solution_id'] is None):
raise ValueError("Missing the required parameter `solution_id` when calling `dislike_solution`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `dislike_solution`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `dislike_solution`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `dislike_solution`")
collection_formats = {}
resource_path = '/complaints/{complaintId}/solutions/{solutionId}/dislike'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
if 'solution_id' in params:
path_params['solutionId'] = params['solution_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseSolution',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_complaint(self, complaint_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get complaint by id
Returns the complaint by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_complaint(complaint_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_complaint_with_http_info(self, complaint_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get complaint by id
Returns the complaint by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_complaint" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `get_complaint`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_complaint`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_complaint`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_complaint`")
collection_formats = {}
resource_path = '/complaints/{complaintId}'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
query_params = {}
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaint',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_complaint_categories(self, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of complaint category
Returns the list of complaint category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_complaint_categories(start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategoryList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_complaint_categories_with_http_info(start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_complaint_categories_with_http_info(start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_complaint_categories_with_http_info(self, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of complaint category
Returns the list of complaint category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_complaint_categories_with_http_info(start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategoryList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_complaint_categories" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_complaint_categories`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_complaint_categories`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_complaint_categories`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_complaint_categories`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_complaint_categories`")
collection_formats = {}
resource_path = '/complaints/categories'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintCategoryList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_complaints_for_user(self, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of all complaint visible for user
Returns the list of all complaints visible for user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_complaints_for_user(complaint_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str complaint_status: Complaint status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_complaints_for_user_with_http_info(complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_complaints_for_user_with_http_info(complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_complaints_for_user_with_http_info(self, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of all complaint visible for user
Returns the list of all complaints visible for user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_complaints_for_user_with_http_info(complaint_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str complaint_status: Complaint status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_status', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'category_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_complaints_for_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_status' is set
if ('complaint_status' not in params) or (params['complaint_status'] is None):
raise ValueError("Missing the required parameter `complaint_status` when calling `get_complaints_for_user`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_complaints_for_user`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_complaints_for_user`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_complaints_for_user`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_complaints_for_user`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_complaints_for_user`")
collection_formats = {}
resource_path = '/complaints'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'complaint_status' in params:
query_params['complaintStatus'] = params['complaint_status']
if 'category_id' in params:
query_params['categoryId'] = params['category_id']
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_friends_complaints(self, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of complaints shared by your friends
Returns the list of complaints shared by friends
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_friends_complaints(complaint_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str complaint_status: Complaint status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_friends_complaints_with_http_info(complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_friends_complaints_with_http_info(complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_friends_complaints_with_http_info(self, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of complaints shared by your friends
Returns the list of complaints shared by friends
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_friends_complaints_with_http_info(complaint_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str complaint_status: Complaint status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_status', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'category_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_friends_complaints" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_status' is set
if ('complaint_status' not in params) or (params['complaint_status'] is None):
raise ValueError("Missing the required parameter `complaint_status` when calling `get_friends_complaints`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_friends_complaints`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_friends_complaints`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_friends_complaints`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_friends_complaints`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_friends_complaints`")
collection_formats = {}
resource_path = '/complaints/friends'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'complaint_status' in params:
query_params['complaintStatus'] = params['complaint_status']
if 'category_id' in params:
query_params['categoryId'] = params['category_id']
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recommend_complaint(self, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended complaints
Returns the list of recommended complaints
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommend_complaint(start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_recommend_complaint_with_http_info(start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_recommend_complaint_with_http_info(start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_recommend_complaint_with_http_info(self, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended complaints
Returns the list of recommended complaints
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommend_complaint_with_http_info(start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recommend_complaint" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_recommend_complaint`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_recommend_complaint`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_recommend_complaint`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_recommend_complaint`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_recommend_complaint`")
collection_formats = {}
resource_path = '/complaints/recommend'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recommended_complaints_from_db(self, user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended complaints from DB
Returns the list of recommended complaints from DB
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommended_complaints_from_db(user_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_recommended_complaints_from_db_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_recommended_complaints_from_db_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_recommended_complaints_from_db_with_http_info(self, user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended complaints from DB
Returns the list of recommended complaints from DB
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommended_complaints_from_db_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recommended_complaints_from_db" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_recommended_complaints_from_db`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_recommended_complaints_from_db`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_recommended_complaints_from_db`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_recommended_complaints_from_db`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_recommended_complaints_from_db`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_recommended_complaints_from_db`")
collection_formats = {}
resource_path = '/complaints/{userId}/recommendedComplaints'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recommended_users_from_db(self, complaint_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended Users from DB
Returns the list of recommended users from DB
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommended_users_from_db(complaint_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)userId<br/>2)firstName<br/>3)lastName<br/>4)profileImage<br/><b>A) Available values-</b><br/>1)userId<br/>2)firstName<br/>3)lastName<br/>4)emailId<br/>5)profileImage<br/>6)birthDate<br/>7)currentUserFollowing<br/>8)currentUserFriend<br/>9)equityScore
:return: VerveResponseUserList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_recommended_users_from_db_with_http_info(complaint_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_recommended_users_from_db_with_http_info(complaint_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_recommended_users_from_db_with_http_info(self, complaint_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended Users from DB
Returns the list of recommended users from DB
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommended_users_from_db_with_http_info(complaint_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)userId<br/>2)firstName<br/>3)lastName<br/>4)profileImage<br/><b>A) Available values-</b><br/>1)userId<br/>2)firstName<br/>3)lastName<br/>4)emailId<br/>5)profileImage<br/>6)birthDate<br/>7)currentUserFollowing<br/>8)currentUserFriend<br/>9)equityScore
:return: VerveResponseUserList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recommended_users_from_db" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `get_recommended_users_from_db`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_recommended_users_from_db`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_recommended_users_from_db`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_recommended_users_from_db`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_recommended_users_from_db`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_recommended_users_from_db`")
collection_formats = {}
resource_path = '/complaints/{complaintId}/recommendedUsers'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseUserList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_solutions(self, complaint_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of solutions by ComplaintId
Return the list of solutions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_solutions(complaint_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolutionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_solutions_with_http_info(complaint_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_solutions_with_http_info(complaint_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_solutions_with_http_info(self, complaint_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of solutions by ComplaintId
Return the list of solutions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_solutions_with_http_info(complaint_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolutionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_solutions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `get_solutions`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_solutions`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_solutions`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_solutions`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_solutions`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_solutions`")
collection_formats = {}
resource_path = '/complaints/{complaintId}/solutions'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseSolutionList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_complaints(self, user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of complaints shared by user
Returns the list of complaints shared by the user himself
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_complaints(user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param str complaint_status: Complaint status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_user_complaints_with_http_info(user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_user_complaints_with_http_info(user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_user_complaints_with_http_info(self, user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of complaints shared by user
Returns the list of complaints shared by the user himself
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_complaints_with_http_info(user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param str complaint_status: Complaint status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'complaint_status', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'category_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_complaints" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_user_complaints`")
# verify the required parameter 'complaint_status' is set
if ('complaint_status' not in params) or (params['complaint_status'] is None):
raise ValueError("Missing the required parameter `complaint_status` when calling `get_user_complaints`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_user_complaints`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_user_complaints`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_user_complaints`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_user_complaints`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_user_complaints`")
collection_formats = {}
resource_path = '/complaints/{userId}/shared'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'complaint_status' in params:
query_params['complaintStatus'] = params['complaint_status']
if 'category_id' in params:
query_params['categoryId'] = params['category_id']
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_subscribed_complaint_categories(self, user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of Complaint categories subscribed by user
Returns the list of complaint categories subscribed by user himself
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_subscribed_complaint_categories(user_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategoryList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_user_subscribed_complaint_categories_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_user_subscribed_complaint_categories_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_user_subscribed_complaint_categories_with_http_info(self, user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of Complaint categories subscribed by user
Returns the list of complaint categories subscribed by user himself
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_subscribed_complaint_categories_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategoryList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_subscribed_complaint_categories" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_user_subscribed_complaint_categories`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_user_subscribed_complaint_categories`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_user_subscribed_complaint_categories`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_user_subscribed_complaint_categories`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_user_subscribed_complaint_categories`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_user_subscribed_complaint_categories`")
collection_formats = {}
resource_path = '/complaints/categories/{userId}/subscribe'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintCategoryList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_subscribed_complaints(self, user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of complaints subscribed by user
Returns the list of complaints subscribed by user himself
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_subscribed_complaints(user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param str complaint_status: Complaint status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_user_subscribed_complaints_with_http_info(user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_user_subscribed_complaints_with_http_info(user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_user_subscribed_complaints_with_http_info(self, user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of complaints subscribed by user
Returns the list of complaints subscribed by user himself
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_subscribed_complaints_with_http_info(user_id, complaint_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param str complaint_status: Complaint status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaintList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'complaint_status', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'category_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_subscribed_complaints" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_user_subscribed_complaints`")
# verify the required parameter 'complaint_status' is set
if ('complaint_status' not in params) or (params['complaint_status'] is None):
raise ValueError("Missing the required parameter `complaint_status` when calling `get_user_subscribed_complaints`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_user_subscribed_complaints`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_user_subscribed_complaints`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_user_subscribed_complaints`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_user_subscribed_complaints`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_user_subscribed_complaints`")
collection_formats = {}
resource_path = '/complaints/{userId}/subscribe'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'complaint_status' in params:
query_params['complaintStatus'] = params['complaint_status']
if 'category_id' in params:
query_params['categoryId'] = params['category_id']
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def like_solution(self, complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Like solution
Allows the user to like the solution. Returns the liked solution
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.like_solution(complaint_id, solution_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int solution_id: solutionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.like_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.like_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def like_solution_with_http_info(self, complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Like solution
Allows the user to like the solution. Returns the liked solution
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.like_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int solution_id: solutionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'solution_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method like_solution" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `like_solution`")
# verify the required parameter 'solution_id' is set
if ('solution_id' not in params) or (params['solution_id'] is None):
raise ValueError("Missing the required parameter `solution_id` when calling `like_solution`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `like_solution`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `like_solution`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `like_solution`")
collection_formats = {}
resource_path = '/complaints/{complaintId}/solutions/{solutionId}/like'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
if 'solution_id' in params:
path_params['solutionId'] = params['solution_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseSolution',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def mark_as_an_solution(self, complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Mark solution as a solution
Allows the user to mark a solution. This means user is satisfied with the solution & the complaint will be closed
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.mark_as_an_solution(complaint_id, solution_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int solution_id: solutionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.mark_as_an_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.mark_as_an_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def mark_as_an_solution_with_http_info(self, complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Mark solution as a solution
Allows the user to mark a solution. This means user is satisfied with the solution & the complaint will be closed
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.mark_as_an_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int solution_id: solutionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'solution_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method mark_as_an_solution" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `mark_as_an_solution`")
# verify the required parameter 'solution_id' is set
if ('solution_id' not in params) or (params['solution_id'] is None):
raise ValueError("Missing the required parameter `solution_id` when calling `mark_as_an_solution`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `mark_as_an_solution`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `mark_as_an_solution`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `mark_as_an_solution`")
collection_formats = {}
resource_path = '/complaints/{complaintId}/solutions/{solutionId}/mark'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
if 'solution_id' in params:
path_params['solutionId'] = params['solution_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseSolution',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_complaints(self, search_text, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of complaints by search
Returns the list of matching complaints
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.search_complaints(search_text, complaint_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str search_text: Enter text to be searched (required)
:param str complaint_status: Complaint status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.search_complaints_with_http_info(search_text, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.search_complaints_with_http_info(search_text, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def search_complaints_with_http_info(self, search_text, complaint_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of complaints by search
Returns the list of matching complaints
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.search_complaints_with_http_info(search_text, complaint_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str search_text: Enter text to be searched (required)
:param str complaint_status: Complaint status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['search_text', 'complaint_status', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_complaints" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'search_text' is set
if ('search_text' not in params) or (params['search_text'] is None):
raise ValueError("Missing the required parameter `search_text` when calling `search_complaints`")
# verify the required parameter 'complaint_status' is set
if ('complaint_status' not in params) or (params['complaint_status'] is None):
raise ValueError("Missing the required parameter `complaint_status` when calling `search_complaints`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `search_complaints`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `search_complaints`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `search_complaints`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `search_complaints`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `search_complaints`")
collection_formats = {}
resource_path = '/complaints/search'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'search_text' in params:
query_params['searchText'] = params['search_text']
if 'complaint_status' in params:
query_params['complaintStatus'] = params['complaint_status']
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaint',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def subscribe_complaint(self, complaint_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Subscribe complaint
Allows the user to subscribe a complaint. Returns the subscibed complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.subscribe_complaint(complaint_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.subscribe_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.subscribe_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def subscribe_complaint_with_http_info(self, complaint_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Subscribe complaint
Allows the user to subscribe a complaint. Returns the subscibed complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.subscribe_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method subscribe_complaint" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `subscribe_complaint`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `subscribe_complaint`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `subscribe_complaint`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `subscribe_complaint`")
collection_formats = {}
resource_path = '/complaints/{complaintId}/subscribe'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaint',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def subscribe_complaint_category(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Subscribe complaint category
Allows the user to subscribe complaint category. Returns the subscribed complaint category.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.subscribe_complaint_category(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.subscribe_complaint_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.subscribe_complaint_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def subscribe_complaint_category_with_http_info(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Subscribe complaint category
Allows the user to subscribe complaint category. Returns the subscribed complaint category.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.subscribe_complaint_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method subscribe_complaint_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params) or (params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `subscribe_complaint_category`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `subscribe_complaint_category`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `subscribe_complaint_category`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `subscribe_complaint_category`")
collection_formats = {}
resource_path = '/complaints/categories/{categoryId}/subscribe'.replace('{format}', 'json')
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintCategory',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unmark_as_an_solution(self, complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unmark solution as a solution
Allows the user to unmark a solution. This will remove the marked solution.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unmark_as_an_solution(complaint_id, solution_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int solution_id: solutionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.unmark_as_an_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.unmark_as_an_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def unmark_as_an_solution_with_http_info(self, complaint_id, solution_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unmark solution as a solution
Allows the user to unmark a solution. This will remove the marked solution.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unmark_as_an_solution_with_http_info(complaint_id, solution_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param int solution_id: solutionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'solution_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unmark_as_an_solution" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `unmark_as_an_solution`")
# verify the required parameter 'solution_id' is set
if ('solution_id' not in params) or (params['solution_id'] is None):
raise ValueError("Missing the required parameter `solution_id` when calling `unmark_as_an_solution`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `unmark_as_an_solution`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `unmark_as_an_solution`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `unmark_as_an_solution`")
collection_formats = {}
resource_path = '/complaints/{complaintId}/solutions/{solutionId}/unmark'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
if 'solution_id' in params:
path_params['solutionId'] = params['solution_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseSolution',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unsubscribe_complaint(self, complaint_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unsubscribe Complaint
Allows the user to unsubscribe complaint. Returns the unsubscribed complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unsubscribe_complaint(complaint_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.unsubscribe_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.unsubscribe_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def unsubscribe_complaint_with_http_info(self, complaint_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unsubscribe Complaint
Allows the user to unsubscribe complaint. Returns the unsubscribed complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unsubscribe_complaint_with_http_info(complaint_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unsubscribe_complaint" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `unsubscribe_complaint`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `unsubscribe_complaint`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `unsubscribe_complaint`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `unsubscribe_complaint`")
collection_formats = {}
resource_path = '/complaints/{complaintId}/unsubscribe'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaint',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unsubscribe_complaint_category(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unsubscribe complaint category
Allows the user to unsubscribe complaint category. Returns the unsubscribed complaint category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unsubscribe_complaint_category(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.unsubscribe_complaint_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.unsubscribe_complaint_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def unsubscribe_complaint_category_with_http_info(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unsubscribe complaint category
Allows the user to unsubscribe complaint category. Returns the unsubscribed complaint category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unsubscribe_complaint_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unsubscribe_complaint_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params) or (params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `unsubscribe_complaint_category`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `unsubscribe_complaint_category`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `unsubscribe_complaint_category`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `unsubscribe_complaint_category`")
collection_formats = {}
resource_path = '/complaints/categories/{categoryId}/unsubscribe'.replace('{format}', 'json')
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintCategory',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_complaint(self, complaint_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update complaint
Allows the user to update complaint. Returns the updated complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_complaint(complaint_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str complaint_title: Complaint Title (required)
:param str complaint_description: Describe Complaint (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_complaint_with_http_info(complaint_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.update_complaint_with_http_info(complaint_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, **kwargs)
return data
def update_complaint_with_http_info(self, complaint_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update complaint
Allows the user to update complaint. Returns the updated complaint
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_complaint_with_http_info(complaint_id, complaint_title, complaint_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int complaint_id: complaintId (required)
:param str complaint_title: Complaint Title (required)
:param str complaint_description: Describe Complaint (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)createdDate<br/><b>A) Availablevalues-</b><br/>1)complaintId<br/>2)complaintTitle<br/>3)complaintDescription<br/>4)issuer<br/>5)noOfSolutions<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseComplaint
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['complaint_id', 'complaint_title', 'complaint_description', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_complaint" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'complaint_id' is set
if ('complaint_id' not in params) or (params['complaint_id'] is None):
raise ValueError("Missing the required parameter `complaint_id` when calling `update_complaint`")
# verify the required parameter 'complaint_title' is set
if ('complaint_title' not in params) or (params['complaint_title'] is None):
raise ValueError("Missing the required parameter `complaint_title` when calling `update_complaint`")
# verify the required parameter 'complaint_description' is set
if ('complaint_description' not in params) or (params['complaint_description'] is None):
raise ValueError("Missing the required parameter `complaint_description` when calling `update_complaint`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `update_complaint`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `update_complaint`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `update_complaint`")
collection_formats = {}
resource_path = '/complaints/{complaintId}'.replace('{format}', 'json')
path_params = {}
if 'complaint_id' in params:
path_params['complaintId'] = params['complaint_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'complaint_title' in params:
form_params.append(('complaintTitle', params['complaint_title']))
if 'complaint_description' in params:
form_params.append(('complaintDescription', params['complaint_description']))
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaint',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_complaint_category(self, category_id, category_name, category_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update complaint category
Returns the updated complaint category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_complaint_category(category_id, category_name, category_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str category_name: Category Name (required)
:param str category_description: Describe category (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_complaint_category_with_http_info(category_id, category_name, category_description, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.update_complaint_category_with_http_info(category_id, category_name, category_description, logged_in_user_id, access_token, client_token, **kwargs)
return data
def update_complaint_category_with_http_info(self, category_id, category_name, category_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update complaint category
Returns the updated complaint category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_complaint_category_with_http_info(category_id, category_name, category_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str category_name: Category Name (required)
:param str category_description: Describe category (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2) categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseComplaintCategory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'category_name', 'category_description', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_complaint_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params) or (params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `update_complaint_category`")
# verify the required parameter 'category_name' is set
if ('category_name' not in params) or (params['category_name'] is None):
raise ValueError("Missing the required parameter `category_name` when calling `update_complaint_category`")
# verify the required parameter 'category_description' is set
if ('category_description' not in params) or (params['category_description'] is None):
raise ValueError("Missing the required parameter `category_description` when calling `update_complaint_category`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `update_complaint_category`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `update_complaint_category`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `update_complaint_category`")
collection_formats = {}
resource_path = '/complaints/categories/{categoryId}'.replace('{format}', 'json')
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'category_name' in params:
form_params.append(('categoryName', params['category_name']))
if 'category_description' in params:
form_params.append(('categoryDescription', params['category_description']))
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseComplaintCategory',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_solution(self, solution_id, solution, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update solution
Allows the user to update solution. Returns the updated solution
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_solution(solution_id, solution, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int solution_id: solutionId (required)
:param str solution: solution (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_solution_with_http_info(solution_id, solution, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.update_solution_with_http_info(solution_id, solution, logged_in_user_id, access_token, client_token, **kwargs)
return data
def update_solution_with_http_info(self, solution_id, solution, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update solution
Allows the user to update solution. Returns the updated solution
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_solution_with_http_info(solution_id, solution, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int solution_id: solutionId (required)
:param str solution: solution (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/><b>A) Available values -</b> <br/>1)solutionId<br/>2)solutionDescription<br/>3)createdDate<br/>4)complaintId<br/>5)solvingUser<br/>6)isMarkedSolution<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseSolution
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['solution_id', 'solution', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_solution" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'solution_id' is set
if ('solution_id' not in params) or (params['solution_id'] is None):
raise ValueError("Missing the required parameter `solution_id` when calling `update_solution`")
# verify the required parameter 'solution' is set
if ('solution' not in params) or (params['solution'] is None):
raise ValueError("Missing the required parameter `solution` when calling `update_solution`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `update_solution`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `update_solution`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `update_solution`")
collection_formats = {}
resource_path = '/complaints/solutions/{solutionId}'.replace('{format}', 'json')
path_params = {}
if 'solution_id' in params:
path_params['solutionId'] = params['solution_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'solution' in params:
form_params.append(('solution', params['solution']))
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseSolution',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 58.633474
| 437
| 0.640236
| 29,233
| 250,834
| 5.277153
| 0.011015
| 0.022169
| 0.035004
| 0.040838
| 0.990691
| 0.988287
| 0.984157
| 0.98089
| 0.977319
| 0.971173
| 0
| 0.00543
| 0.269477
| 250,834
| 4,277
| 438
| 58.647183
| 0.836455
| 0.401812
| 0
| 0.81422
| 0
| 0
| 0.259338
| 0.054065
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027982
| false
| 0
| 0.003211
| 0
| 0.072936
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.