hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
070f70450f78ed2ac85e3d438750237fa1375b01 | 21,233 | py | Python | good_params/usps/test_config_extra.py | Lupin1998/inv-ML | 9f3db461911748292dff18024587538eb66d44bf | [
"MIT"
] | 1 | 2021-12-14T09:16:17.000Z | 2021-12-14T09:16:17.000Z | good_params/usps/test_config_extra.py | Lupin1998/inv-ML | 9f3db461911748292dff18024587538eb66d44bf | [
"MIT"
] | null | null | null | good_params/usps/test_config_extra.py | Lupin1998/inv-ML | 9f3db461911748292dff18024587538eb66d44bf | [
"MIT"
] | 2 | 2021-12-14T09:10:00.000Z | 2022-01-21T16:57:44.000Z | # usps: ML-Enc baseline + Extra-Head
def import_test_config(test_num, mode='encoder'):
if test_num == 1:
return test_1(mode)
elif test_num == 2:
return test_2(mode)
elif test_num ==3:
return test_3(mode)
# elif test_num ==4:
# return test_4(mode)
# elif test_num ==5:
# return test_5(mode)
# elif test_num ==6:
# return test_6(mode)
# elif test_num ==7:
# return test_7(mode)
# elif test_num ==8:
# return test_8(mode)
def base_params(mode):
param = {}
if mode == 'encoder':
param = dict(
# regular
EPOCHS=8000,
ratio = dict(AE=0.005, dist=1, angle=0, push=0.8, orth=0, pad=0),
add_jump = True,
# structure
NetworkStructure = dict(
layer = [784, 784, 784, 784, 784, 784, 784, 784, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
layer = [],
weight = [],
),
# AE
AEWeight = dict(
each = [],
AE_gradual = [0, 0, 1],
),
# LIS
LISWeght = dict(
cross = [1,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 1],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [1, 1, 1],
enc_forward_w = [1, 1, 1],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict( # add 0716: [1 -> 0]
cross_w = [500, 1000, 0],
enc_w = [500, 1000, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
# Orth
OrthWeight = dict(
Orth_gradual = [0, 0, 1],
each = [],
),
### inverse mode
InverseMode = dict(
mode = "pinverse", #"CSinverse",
loss_type = "L2",
padding = [ 0, 0, 0, 0, 0, 0, 0, 0],
pad_w = [ 0, 0, 0, 0, 0, 0, 0, 0],
pad_gradual = [0, 0, 1],
),
)
elif mode == 'decoder':
param = dict(
# regular
EPOCHS= 1,
ratio = dict(AE=0.005, dist=1, angle=0, push=0.8, orth=0, pad=0),
add_jump = True,
# structure
NetworkStructure = dict(
layer = [784, 784, 784, 784, 784, 784, 784, 784, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
layer = [],
weight = [],
),
# AE
AEWeight = dict(
each = [],
AE_gradual = [0, 0, 1],
),
# LIS
LISWeght = dict(
cross = [0,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [0, 0, 0],
enc_forward_w = [0, 0, 0],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict( # [1 -> 0]
cross_w = [0, 0, 0],
enc_w = [0, 0, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
)
# result
return param
# 8layers ML-Enc + Extra-Head for USPS
def test_1(mode):
params = base_params(mode)
if mode == 'encoder':
param = dict(
# regular
DATASET = "usps",
numberClass = 10,
BATCHSIZE = 9298,
N_dataset = 9298,
EPOCHS=2000,
regularB = 100,
MAEK = 30,
PlotForloop = 1000,
ratio = dict(AE=0, dist=1, angle=0, push=0.8,
orth=0,
pad=0),
# structure
NetworkStructure = dict(
layer = [256, 256, 256, 256, 256, 256, 256, 256, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
layer = [0, 40, 10, 10, 10, 8, 4, 2, ],
weight = [ 2, 4, 8, 16, 32, 64, 128, ],
),
# LIS
LISWeght = dict(
cross = [1,],
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 1],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [1, 1, 10],
enc_forward_w = [1, 1, 10],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [1, 1, 10],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict(
cross_w = [6000, 7000, 0], # OK, 0731
enc_w = [6000, 7000, 0], # OK
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [6000, 7000, 0], # OK
),
),
# Orth
)
elif mode == 'decoder':
param = dict(
# regular
DATASET = "usps",
numberClass = 10,
BATCHSIZE = 9298,
N_dataset = 9298,
EPOCHS= 0,
PlotForloop = 1,
ratio = dict(AE=0, dist=1, angle=0, push=0.8, orth=0, pad=0),
# structure
NetworkStructure = dict(
layer = [256, 256, 256, 256, 256, 256, 256, 256, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
Dec_require_gard = [ 1, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# AE layer
AEWeight = dict(
each = [1, 0, 0, 0, 0, 0, 0, 0, 0],
AE_gradual = [0, 0, 1], # [start, end, mode]
),
# LIS
LISWeght = dict(
cross = [0,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_forward = [1, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push], add 0716
cross_w = [0, 0, 0],
enc_forward_w = [0, 0, 0], # [dist, angle, push]
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict( # add
cross_w = [0, 0, 0],
enc_w = [0, 0, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
)
# result
params.update(param)
return params
# usps, 8 layers, ML-Enc + Extra-Head
def test_2(mode):
# test_8_8layers_0806_good_usps
params = base_params(mode)
if mode == 'encoder':
param = dict(
# regular
DATASET = "usps",
numberClass = 10,
BATCHSIZE = 9298,
N_dataset = 9298,
EPOCHS=4000,
regularB = 100,
MAEK = 30,
PlotForloop = 1000,
ratio = dict(AE=0, dist=1, angle=0, push=0.8,
orth=0.1,
pad=0),
# structure
NetworkStructure = dict(
layer = [256, 256, 256, 256, 256, 256, 256, 256, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
layer = [0, 40, 10, 10, 10, 10, 8, 4, ],
weight = [ 2, 4, 8, 16, 32, 64, 128, ],
),
# LIS
LISWeght = dict(
cross = [1,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 1],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [1, 1, 10],
enc_forward_w = [1, 1, 10],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [1, 1, 10],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict( # add 0716: [1 -> 0]
cross_w = [3000, 10000, 0], # 0801-1
enc_w = [4000, 9000, 0], # 0801-1
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [5000, 8000, 0], # 0801-1
),
),
# Orth
)
elif mode == 'decoder': # + ML-AE
param = dict(
# regular
DATASET = "usps",
numberClass = 10,
BATCHSIZE = 9298,
N_dataset = 9298,
EPOCHS= 5000,
PlotForloop = 1000,
MAEK = 30,
ratio = dict(AE=1, dist=1, angle=0, push=0.8, orth=0, pad=0),
# structure
NetworkStructure = dict(
layer = [256, 256, 256, 256, 256, 256, 256, 256, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
inv_Enc=0, inv_Dec=1,
),
# AE layer
AEWeight = dict(
each = [1, 0, 0, 0, 0, 0, 0, 0, 0],
AE_gradual = [0, 0, 1],
),
# LIS
LISWeght = dict(
cross = [1,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 1],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push], add 0716
cross_w = [0, 0, 0],
enc_forward_w = [0, 0, 0],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict(
cross_w = [0, 0, 0],
enc_w = [0, 0, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
)
# result
params.update(param)
return params
# usps, 8 layers, ML-Enc + Extra-Head (class=10)
def test_3(mode):
params = base_params(mode)
if mode == 'encoder':
param = dict(
# regular
DATASET = "usps",
numberClass = 10,
BATCHSIZE = 9298,
N_dataset = 9298,
EPOCHS = 4000,
regularB = 3,
MAEK = 15,
PlotForloop = 1000,
ratio = dict(AE=0, dist=1, angle=0,
# push=0.8,
push=4,
orth=0,
pad=0),
# structure
NetworkStructure = dict(
layer = [256, 256, 256, 256, 256, 256, 256, 256, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
layer = [0, 40, 10, 10, 10, 10, 8, 4, ],
weight = [ 2, 4, 8, 16, 32, 64, 128, ],
push_w = [ 1, 4, 8, 16, 24, 32, 28, ],
),
# LIS
LISWeght = dict(
cross = [1,],
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 1],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [1, 1, 10],
enc_forward_w = [1, 1, 10],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 3],
# gradual
LIS_gradual = [0, 0, 1],
push_gradual = dict(
cross_w = [3000, 10000, 0],
enc_w = [4000, 9000, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [3000, 8000, 0],
),
),
# Orth
OrthWeight = dict(
Orth_gradual = [800, 1500, 1],
each = [ 15000, 10000, 7000, 4000, 2000, 1000, 400, ],
),
)
elif mode == 'decoder':
param = dict(
# regular
DATASET = "usps",
numberClass = 10,
BATCHSIZE = 9298,
N_dataset = 9298,
EPOCHS= 0,
PlotForloop = 1000,
MAEK = 30,
ratio = dict(AE=0, dist=1, angle=0, push=0.8, orth=0, pad=0),
# structure
NetworkStructure = dict(
layer = [256, 256, 256, 256, 256, 256, 256, 256, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
Dec_require_gard = [ 1, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# AE layer
AEWeight = dict(
each = [1, 0, 0, 0, 0, 0, 0, 0, 0],
AE_gradual = [0, 0, 1], # [start, end, mode]
),
# LIS
LISWeght = dict(
cross = [0,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_forward = [1, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push], add 0716
cross_w = [0, 0, 0],
enc_forward_w = [0, 0, 0], # [dist, angle, push]
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict(
cross_w = [0, 0, 0],
enc_w = [0, 0, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
)
# result
params.update(param)
return params
| 43.068966 | 91 | 0.296661 | 2,212 | 21,233 | 2.723779 | 0.055606 | 0.185892 | 0.210622 | 0.199834 | 0.889627 | 0.870207 | 0.847137 | 0.840166 | 0.840166 | 0.836515 | 0 | 0.182836 | 0.583479 | 21,233 | 492 | 92 | 43.156504 | 0.498417 | 0.065653 | 0 | 0.866667 | 0 | 0 | 0.004916 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.012346 | false | 0 | 0.002469 | 0 | 0.032099 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
073d19ce056aca14bfe125f18c6ef9f0c3556230 | 46 | py | Python | cs150/list2.py | icterguru/DrLutchClass | 4ae75e047d00e36af7fd5019a7d751a44bc7daa8 | [
"Apache-2.0"
] | null | null | null | cs150/list2.py | icterguru/DrLutchClass | 4ae75e047d00e36af7fd5019a7d751a44bc7daa8 | [
"Apache-2.0"
] | null | null | null | cs150/list2.py | icterguru/DrLutchClass | 4ae75e047d00e36af7fd5019a7d751a44bc7daa8 | [
"Apache-2.0"
] | 1 | 2018-09-20T20:50:08.000Z | 2018-09-20T20:50:08.000Z | list1 = [1, 2, 3, 4, 5, 6, 7, 8, 9]
l1 = [:]
| 11.5 | 35 | 0.347826 | 11 | 46 | 1.454545 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.354839 | 0.326087 | 46 | 3 | 36 | 15.333333 | 0.16129 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
074a6ca949845fa918e97f08264fe58c614d0798 | 1,783 | py | Python | gamestonk_terminal/custom/quantitative_analysis/qa_api.py | minhhoang1023/GamestonkTerminal | 195dc19b491052df080178c0cc6a9d535a91a704 | [
"MIT"
] | 1 | 2022-03-15T13:05:40.000Z | 2022-03-15T13:05:40.000Z | gamestonk_terminal/custom/quantitative_analysis/qa_api.py | minhhoang1023/GamestonkTerminal | 195dc19b491052df080178c0cc6a9d535a91a704 | [
"MIT"
] | 1 | 2022-01-15T01:24:24.000Z | 2022-01-15T01:24:24.000Z | gamestonk_terminal/custom/quantitative_analysis/qa_api.py | minhhoang1023/GamestonkTerminal | 195dc19b491052df080178c0cc6a9d535a91a704 | [
"MIT"
] | 1 | 2021-11-07T20:59:25.000Z | 2021-11-07T20:59:25.000Z | """Pred context API."""
# flake8: noqa
# pylint: disable=unused-import
# Menu commands
from gamestonk_terminal.common.quantitative_analysis.qa_view import display_raw as raw
from gamestonk_terminal.common.quantitative_analysis.qa_view import (
display_summary as summary,
)
from gamestonk_terminal.common.quantitative_analysis.qa_view import display_line as line
from gamestonk_terminal.common.quantitative_analysis.qa_view import display_hist as hist
from gamestonk_terminal.common.quantitative_analysis.qa_view import display_cdf as cdf
from gamestonk_terminal.common.quantitative_analysis.qa_view import display_bw as bw
from gamestonk_terminal.common.quantitative_analysis.qa_view import (
display_seasonal as decompose,
)
from gamestonk_terminal.common.quantitative_analysis.qa_view import (
display_cusum as cumsum,
)
from gamestonk_terminal.common.quantitative_analysis.qa_view import display_acf as acf
from gamestonk_terminal.common.quantitative_analysis.rolling_view import (
display_mean_std as rolling,
)
from gamestonk_terminal.common.quantitative_analysis.rolling_view import (
display_spread as spread,
)
from gamestonk_terminal.common.quantitative_analysis.rolling_view import (
display_quantile as quantile,
)
from gamestonk_terminal.common.quantitative_analysis.rolling_view import (
display_skew as skew,
)
from gamestonk_terminal.common.quantitative_analysis.rolling_view import (
display_kurtosis as kurtosis,
)
from gamestonk_terminal.common.quantitative_analysis.qa_view import (
display_normality as normality,
)
from gamestonk_terminal.common.quantitative_analysis.qa_view import (
display_qqplot as qqplot,
)
from gamestonk_terminal.common.quantitative_analysis.qa_view import (
display_unitroot as unitroot,
)
| 38.76087 | 88 | 0.842961 | 233 | 1,783 | 6.154506 | 0.180258 | 0.154114 | 0.248954 | 0.320084 | 0.799861 | 0.799861 | 0.799861 | 0.799861 | 0.799861 | 0.799861 | 0 | 0.000625 | 0.102075 | 1,783 | 45 | 89 | 39.622222 | 0.895066 | 0.042064 | 0 | 0.282051 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.435897 | 0 | 0.435897 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 9 |
07671634dc2875b863eb2885edf8beee82527a83 | 2,148 | py | Python | automation_orchestrator/orchestrator/migrations/0006_v022_4.py | basico-ps/AutomationOrchestrator | c2e9e2496acb53f00e51a03e6d2cada17fd7c5e4 | [
"BSD-3-Clause"
] | 26 | 2019-11-11T14:15:55.000Z | 2022-02-07T07:32:33.000Z | automation_orchestrator/orchestrator/migrations/0006_v022_4.py | basico-ps/AutomationOrchestrator | c2e9e2496acb53f00e51a03e6d2cada17fd7c5e4 | [
"BSD-3-Clause"
] | 50 | 2020-01-07T13:38:51.000Z | 2021-11-29T11:31:14.000Z | automation_orchestrator/orchestrator/migrations/0006_v022_4.py | basico-ps/AutomationOrchestrator | c2e9e2496acb53f00e51a03e6d2cada17fd7c5e4 | [
"BSD-3-Clause"
] | 9 | 2020-02-20T12:06:07.000Z | 2022-01-10T12:41:34.000Z | # Generated by Django 3.0.3 on 2020-02-27 19:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('orchestrator', '0005_v022_3'),
]
operations = [
migrations.AddField(
model_name='bot',
name='run_after',
field=models.TimeField(blank=True, help_text='Specify a time to limit the bot to only be active after this time.', null=True),
),
migrations.AddField(
model_name='bot',
name='run_on_week_days',
field=models.BooleanField(default=True, help_text='Specify whether the bot should be active on week days.'),
),
migrations.AddField(
model_name='bot',
name='run_on_weekend_days',
field=models.BooleanField(default=True, help_text='Specify whether the bot should be active on weekend days.'),
),
migrations.AddField(
model_name='bot',
name='run_until',
field=models.TimeField(blank=True, help_text='Specify a time to limit the bot to only be active before this time.', null=True),
),
migrations.AddField(
model_name='historicalbot',
name='run_after',
field=models.TimeField(blank=True, help_text='Specify a time to limit the bot to only be active after this time.', null=True),
),
migrations.AddField(
model_name='historicalbot',
name='run_on_week_days',
field=models.BooleanField(default=True, help_text='Specify whether the bot should be active on week days.'),
),
migrations.AddField(
model_name='historicalbot',
name='run_on_weekend_days',
field=models.BooleanField(default=True, help_text='Specify whether the bot should be active on weekend days.'),
),
migrations.AddField(
model_name='historicalbot',
name='run_until',
field=models.TimeField(blank=True, help_text='Specify a time to limit the bot to only be active before this time.', null=True),
),
]
| 39.777778 | 139 | 0.614525 | 260 | 2,148 | 4.946154 | 0.215385 | 0.111975 | 0.143079 | 0.167963 | 0.888025 | 0.888025 | 0.888025 | 0.864697 | 0.837481 | 0.837481 | 0 | 0.014954 | 0.283985 | 2,148 | 53 | 140 | 40.528302 | 0.821196 | 0.02095 | 0 | 0.851064 | 1 | 0 | 0.324131 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.021277 | 0 | 0.085106 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
db962c13cf884ff51c55cb0f94effb535988511d | 65,709 | py | Python | src/evaluation/SimulationConfigs.py | Zayat/blockchain-models-simulator | abfb643b1e11424e7874016399b05bdc359eecd4 | [
"MIT"
] | 3 | 2021-06-10T08:32:05.000Z | 2021-06-11T08:06:27.000Z | src/evaluation/SimulationConfigs.py | Zayat/blockchain-models-simulator | abfb643b1e11424e7874016399b05bdc359eecd4 | [
"MIT"
] | null | null | null | src/evaluation/SimulationConfigs.py | Zayat/blockchain-models-simulator | abfb643b1e11424e7874016399b05bdc359eecd4 | [
"MIT"
] | null | null | null | import math
from collections import OrderedDict, defaultdict
import numpy as np
import pandas as pd
from bcns import Durations, sim, Simulator, SimulatorCoordinated
from bcns.sim import Equ_LatD, Equ_pooled_LatD, Exp_LatD, Exp_pooled_LatD
def distance_between_2_points(a: tuple, b: tuple) -> float:
x1, y1 = a
x2, y2 = b
return round(math.sqrt((x2 - x1)**2 + (y2 - y1)**2), 6)
def prepare_test_centrality_lat_mat_baseline(nodes):
return Equ_LatD(3, 1, 0).tolist()
def prepare_test_centrality_lat_mat_1(nodes):
lat_mat = OrderedDict.fromkeys(nodes)
M1_lat = np.array([0, 1, 2]) / 2
M2_lat = np.array([1, 0, 1]) / 2
M3_lat = np.array([2, 1, 0]) / 2
latencies = [float('0')] * len(nodes)
for n in nodes:
lat_mat[n] = dict(zip(nodes, latencies))
if n is 'M1':
lat_mat[n] = dict(zip(nodes, M1_lat))
if n is 'M2':
lat_mat[n] = dict(zip(nodes, M2_lat))
if n is 'M3':
lat_mat[n] = dict(zip(nodes, M3_lat))
for n in nodes:
if n is not 'M1' and n is not 'M2' and n is not 'M3':
lat_mat[n]['M1'] = lat_mat['M1'][n]
lat_mat[n]['M2'] = lat_mat['M2'][n]
lat_mat[n]['M3'] = lat_mat['M3'][n]
lat_mat = [[lat_mat[i][j] for i in nodes] for j in nodes]
return lat_mat
def prepare_test_centrality_lat_mat_2(nodes):
lat_mat = OrderedDict.fromkeys(nodes)
M1_lat = np.array([0, 1, 4]) / 4
M2_lat = np.array([1, 0, 3]) / 4
M3_lat = np.array([4, 3, 0]) / 4
latencies = [float('0')] * len(nodes)
for n in nodes:
lat_mat[n] = dict(zip(nodes, latencies))
if n is 'M1':
lat_mat[n] = dict(zip(nodes, M1_lat))
if n is 'M2':
lat_mat[n] = dict(zip(nodes, M2_lat))
if n is 'M3':
lat_mat[n] = dict(zip(nodes, M3_lat))
for n in nodes:
if n is not 'M1' and n is not 'M2' and n is not 'M3':
lat_mat[n]['M1'] = lat_mat['M1'][n]
lat_mat[n]['M2'] = lat_mat['M2'][n]
lat_mat[n]['M3'] = lat_mat['M3'][n]
lat_mat = [[lat_mat[i][j] for i in nodes] for j in nodes]
return lat_mat
def prepare2_lat_mat_asymmetric(nodes):
lat_mat = OrderedDict.fromkeys(nodes)
M1_lat = (np.array([1, 0, 1, 2, 3, 4, 5, 4]) / 4)
M2_lat = (np.array([5, 4, 3, 2, 1, 0, 1, 4]) / 4)
latencies = [float('0')] * len(nodes)
for n in nodes:
lat_mat[n] = dict(zip(nodes, latencies))
if n is 'M1':
lat_mat[n] = dict(zip(nodes, M1_lat))
if n is 'M2':
lat_mat[n] = dict(zip(nodes, M2_lat))
for n in nodes:
if n is not 'M1' and n is not 'M2':
lat_mat[n]['M1'] = lat_mat['M1'][n]
lat_mat[n]['M2'] = lat_mat['M2'][n]
lat_mat = [[lat_mat[i][j] for i in nodes] for j in nodes]
for i in range(len(nodes)):
for j in range(len(nodes)):
if i < j:
lat_mat[i][j] = lat_mat[i][j] * 100
return lat_mat
def prepare2_lat_mat(nodes):
lat_mat = OrderedDict.fromkeys(nodes)
# 'OLM1', 'M1', 'OM1', 'OM', 'OM2', 'M2', 'ORM2', 'OEQ'
OLM1_lat = (np.array([0, 1, 2, 3, 4, 5, 6, 4.5825]) / 4)
M1_lat = (np.array([1, 0, 1, 2, 3, 4, 5, 4]) / 4)
ORM1_lat = (np.array([2, 1, 0, 1, 2, 3, 4, 4.583]) / 4)
OM_lat = (np.array([3, 2, 1, 0, 1, 2, 3, 4.472135955])/4)
OLM2_lat = (np.array([4, 3, 2, 1, 0, 1, 2, 4.583]) / 4)
M2_lat = (np.array([5, 4, 3, 2, 1, 0, 1, 4]) / 4)
ORM2_lat = (np.array([6, 5, 4, 3, 2, 1, 0, 4.5825])/4)
lm1 = (-1, 0)
m1 = (0, 0)
rm1 = (1, 0)
cm12 = (2, 0)
lm2 = (3, 0)
m2 = (4, 0)
rm2 = (5, 0)
m3 = (2, math.sqrt(12))
OEQ_lat = (np.array([distance_between_2_points(lm1, m3),
4,
distance_between_2_points(rm1, m3),
distance_between_2_points(cm12, m3),
distance_between_2_points(m3, lm2),
4,
distance_between_2_points(m3, rm2),
0]) / 4)
lat_mat = [OLM1_lat, M1_lat, ORM1_lat, OM_lat,
OLM2_lat, M2_lat, ORM2_lat, OEQ_lat]
lat_mat = list(map(lambda x: x.tolist(), lat_mat))
return lat_mat
def prepare1_coordinators_lat_mat_proportional(proportion):
C_lat = [0, proportion]
M1_lat = [proportion, 0]
lat_mat = [C_lat, M1_lat]
return lat_mat
def prepare1f_coordinators_lat_mat_proportional(proportion):
C_lat = [0, 0.5, 0.5 + proportion]
M1_lat = [0.5, 0, float('inf')]
M2_lat = [0.5 + proportion, float('inf'), 0]
lat_mat = [C_lat, M1_lat, M2_lat]
return lat_mat
def prepare2_coordinators_lat_mat_proportional(proportion):
C_lat = [0, proportion * 1, (1-proportion) * 1]
M1_lat = [proportion * 1, 0, float('inf')]
M2_lat = [(1-proportion) * 1, float('inf'), 0]
lat_mat = [C_lat, M1_lat, M2_lat]
return lat_mat
def prepare2_coordinators_lat_mat_proportional_M1_Farther(proportion, factor):
C_lat = [0, proportion * factor, (1-proportion) * 1]
M1_lat = [proportion * factor, 0, float('inf')]
M2_lat = [(1-proportion) * 1, float('inf'), 0]
lat_mat = [C_lat, M1_lat, M2_lat]
return lat_mat
def prepare3_coordinators_lat_mat_proportional(proportion):
m1 = (0,0)
m2 = (1,0)
m3 = (0.5, math.sqrt(0.75))
cp = (0.5, math.sqrt(0.75)-proportion)
C_lat = [0, distance_between_2_points(cp, m1), distance_between_2_points(cp, m2), distance_between_2_points(cp, m3)]
M1_lat = [distance_between_2_points(cp, m1), 0, float('inf'), float('inf')]
M2_lat = [distance_between_2_points(cp, m2), float('inf'), 0, float('inf')]
M3_lat = [distance_between_2_points(cp, m3), float('inf'), float('inf'), 0]
lat_mat = [C_lat, M1_lat, M2_lat, M3_lat]
return lat_mat
def prepare4_p2p_lat_mat_proportional(proportion):
m1 = (0,1)
m2 = (1,1)
m3 = (1,0)
m4 = (0,0)
M1_lat = [0, 1, 1.41421, 1]
M2_lat = [1, 0, 1, 1.41421]
M3_lat = [1.41421, 1, 0, 1]
M4_lat = [1, 1.41421, 1, 0]
lat_mat = [M1_lat, M2_lat, M3_lat, M4_lat]
return lat_mat
def prepare4_coordinators_lat_mat_proportional(proportion):
m1 = (0, 1)
m2 = (1, 1)
m3 = (1, 0)
m4 = (0, 0)
cp = (1-proportion, 1-proportion)
C_lat = [0,
distance_between_2_points(cp, m1),
distance_between_2_points(cp, m2),
distance_between_2_points(cp, m3),
distance_between_2_points(cp, m4)]
M1_lat = [distance_between_2_points(cp, m1), 0, float('inf'), float('inf'), float('inf')]
M2_lat = [distance_between_2_points(cp, m2), float('inf'), 0, float('inf'), float('inf')]
M3_lat = [distance_between_2_points(cp, m3), float('inf'), float('inf'), 0, float('inf')]
M4_lat = [distance_between_2_points(cp, m4), float('inf'), float('inf'), float('inf'), 0]
lat_mat = [C_lat, M1_lat, M2_lat, M3_lat, M4_lat]
return lat_mat
def prepare2_coordinators_lat_mat_middle():
C_lat = [0, .5, .5]
M1_lat = [.5, 0, float('inf')]
M2_lat = [.5, float('inf'), 0]
lat_mat = [C_lat, M1_lat, M2_lat]
return lat_mat
def prepare2_coordinators_lat_mat_near_weaker():
C_lat = [0, 0.1, 0.9]
M1_lat = [0.1, 0, float('inf')]
M2_lat = [0.9, float('inf'), 0]
lat_mat = [C_lat, M1_lat, M2_lat]
return lat_mat
def prepare2_coordinators_lat_mat_near_stronger():
C_lat = [0, 0.9, 0.1]
M1_lat = [0.9, 0, float('inf')]
M2_lat = [0.1, float('inf'), 0]
lat_mat = [C_lat, M1_lat, M2_lat]
return lat_mat
def prepare2_coordinators_lat_mat_no_relay(nodes):
M1_lat = (np.array([0, 1, 2]))
C_lat = (np.array([1, 0, 1]) / 1000)
M2_lat = (np.array([2, 1, 0]))
lat_mat = [M1_lat, C_lat, M2_lat]
lat_mat = list(map(lambda x: x.tolist(), lat_mat))
return lat_mat
def prepare3_lat_mat_farther(nodes):
lat_mat = OrderedDict.fromkeys(nodes)
M1_lat = np.array([1, 0, 1, 2, 3, 4, 5, 4*10]) / 4
M2_lat = np.array([5, 4, 3, 2, 1, 0, 1, 4*10]) / 4
M3_lat = np.array([11, 10, 9, 8, 9, 10, 11, 0])
latencies = [float('0')] * len(nodes)
for n in nodes:
lat_mat[n] = dict(zip(nodes, latencies))
if n is 'M1':
lat_mat[n] = dict(zip(nodes, M1_lat))
if n is 'M2':
lat_mat[n] = dict(zip(nodes, M2_lat))
if n is 'M3':
lat_mat[n] = dict(zip(nodes, M3_lat))
for n in nodes:
if n is not 'M1' and n is not 'M2' and n is not 'M3':
lat_mat[n]['M1'] = lat_mat['M1'][n]
lat_mat[n]['M2'] = lat_mat['M2'][n]
lat_mat[n]['M3'] = lat_mat['M3'][n]
lat_mat = [[lat_mat[i][j] for i in nodes] for j in nodes]
return lat_mat
def prepare3_lat_mat_fixed_asymetric(nodes):
lat_mat = OrderedDict.fromkeys(nodes)
M1_lat = (np.array([1, 0, 1, 2, 3, 400, 5, 4]) / 4)
M2_lat = (np.array([5, 4, 3, 2, 1, 0, 1, 4]) / 4)
M3_lat = (
np.array([4.5825, 4, 4.583, 4.472135955, 4.583, 400, 4.5825, 0]) / 4)
latencies = [float('0')] * len(nodes)
for n in nodes:
lat_mat[n] = dict(zip(nodes, latencies))
if n is 'M1':
lat_mat[n] = dict(zip(nodes, M1_lat))
if n is 'M2':
lat_mat[n] = dict(zip(nodes, M2_lat))
if n is 'M3':
lat_mat[n] = dict(zip(nodes, M3_lat))
for n in nodes:
if n is not 'M1' and n is not 'M2' and n is not 'M3':
lat_mat[n]['M1'] = lat_mat['M1'][n]
lat_mat[n]['M2'] = lat_mat['M2'][n]
lat_mat[n]['M3'] = lat_mat['M3'][n]
lat_mat = [[lat_mat[i][j] for i in nodes] for j in nodes]
return lat_mat
def prepare3_lat_mat(nodes):
lat_mat = OrderedDict.fromkeys(nodes)
M1_lat = (np.array([1, 0, 1, 2, 3, 4, 5, 4]) / 4)
M2_lat = (np.array([5, 4, 3, 2, 1, 0, 1, 4]) / 4)
##Coordinates:
lm1 = (-1, 0)
m1 = (0,0)
rm1 = (1,0)
cm12 = (2,0)
lm2 = (3,0)
m2 = (4,0)
rm2 = (5,0)
m3 = (2, math.sqrt(12))
M3_lat = (np.array([distance_between_2_points(lm1, m3),
4,
distance_between_2_points(rm1, m3),
distance_between_2_points(cm12, m3),
distance_between_2_points(m3, lm2),
4,
distance_between_2_points(m3, rm2),
0]) / 4)
#print(M3_lat)
latencies = [float('0')] * len(nodes)
for n in nodes:
lat_mat[n] = dict(zip(nodes, latencies))
if n is 'M1':
lat_mat[n] = dict(zip(nodes, M1_lat))
if n is 'M2':
lat_mat[n] = dict(zip(nodes, M2_lat))
if n is 'M3':
lat_mat[n] = dict(zip(nodes, M3_lat))
for n in nodes:
if n is not 'M1' and n is not 'M2' and n is not 'M3':
lat_mat[n]['M1'] = lat_mat['M1'][n]
lat_mat[n]['M2'] = lat_mat['M2'][n]
lat_mat[n]['M3'] = lat_mat['M3'][n]
lat_mat = [[lat_mat[i][j] for i in nodes] for j in nodes]
return lat_mat
def prepare5_lat_mat_fixed(nodes):
#self.NODES_IDS = ['WA-US', 'SI-CN', 'RE-IS', 'LI-CH', 'MO-RU']
'''# <location_1> <lat_1> <lng_1> <location_2> <lat_2> <lng_2> <dist. (in km)> <latency (in ms)>
WASHINGTON-DC-US 38.9047 -77.0164 SICHUAN-NA-CN 30.1333 102.9333 12338.40 197.41
WASHINGTON-DC-US 38.9047 -77.0164 REYKJAVÍK-NA-IS 64.1333 -21.9333 4512.89 72.21
WASHINGTON-DC-US 38.9047 -77.0164 LINTHAL-NA-CH 46.9167 9.0000 6703.91 107.26
WASHINGTON-DC-US 38.9047 -77.0164 MOSCOW-NA-RU 55.7500 37.6167 7820.54 125.13
SICHUAN-NA-CN 30.1333 102.9333 REYKJAVÍK-NA-IS 64.1333 -21.9333 8489.56 135.83
SICHUAN-NA-CN 30.1333 102.9333 LINTHAL-NA-CH 46.9167 9.0000 7891.06 126.26
SICHUAN-NA-CN 30.1333 102.9333 MOSCOW-NA-RU 55.7500 37.6167 5761.37 92.18
REYKJAVÍK-NA-IS 64.1333 -21.9333 LINTHAL-NA-CH 46.9167 9.0000 2680.24 42.88
REYKJAVÍK-NA-IS 64.1333 -21.9333 MOSCOW-NA-RU 55.7500 37.6167 3307.89 52.93
LINTHAL-NA-CH 46.9167 9.0000 MOSCOW-NA-RU 55.7500 37.61672196.05 35.14
'''
# ['WA-US', 'SI-CN', 'RE-IS', 'LI-CH', 'MO-RU']
WA_lat = np.array([0, 197.41, 72.21, 107.26, 125.13])/ (1000*1.5)
SI_lat = np.array([-1, 0, 135.83, 126.26, 92.18])/ (1000*1.5)
RE_lat = np.array([-1, -1, 0, 42.88, 52.93])/ (1000*1.5)
LI_lat = np.array([-1, -1, -1, 0, 35.14])/ (1000*1.5)
MO_lat = np.array([-1, -1, -1, -1, 0])/ (1000*1.5)
lat_mat = [WA_lat, SI_lat, RE_lat, LI_lat, MO_lat]
for i in range(len(lat_mat)):
for j in range(len(lat_mat)):
if i > j:
lat_mat[i][j] = lat_mat[j][i]
return lat_mat
def prepare100_lat_mat_fixed_centrality(nodes):
latencies = pd.read_csv('evaluation/100_cities.txt', delim_whitespace=True)
lat_dict = defaultdict(dict)
for i in range(len(latencies)):
row = latencies.iloc[i]
lat_dict[row['location_1']][row['location_2']] = row['latency_ms']
lat_dict[row['location_2']][row['location_1']] = row['latency_ms']
lat_mat = [[float('0') for i in nodes] for j in nodes]
for i in range(len(nodes)):
for j in range(len(nodes)):
if i != j:
lat_mat[i][j] = (lat_dict[nodes[i]][nodes[j]] / (1000*1.5))
return lat_mat
def prepare240_lat_mat_fixed_capital_centrality(nodes):
latencies = pd.read_csv(
'evaluation/cities_capitals_lat_lng_latency.txt', delim_whitespace=True)
lat_dict = defaultdict(dict)
for i in range(len(latencies)):
row = latencies.iloc[i]
lat_dict[row['location_1']][row['location_2']] = row['latency_ms']
lat_dict[row['location_2']][row['location_1']] = row['latency_ms']
lat_mat = [[float('0') for i in nodes] for j in nodes]
for i in range(len(nodes)):
for j in range(len(nodes)):
if i != j:
lat_mat[i][j] = (lat_dict[nodes[i]][nodes[j]]/(1000*1.5))
return lat_mat
def prepare15_lat_mat_ls_fixed_capital_centrality(nodes):
latencies = pd.read_csv(
'evaluation/cities_capitals_lat_lng_latency.txt', delim_whitespace=True)
lat_dict = defaultdict(dict)
for i in range(len(latencies)):
row = latencies.iloc[i]
lat_dict[row['location_1']][row['location_2']] = row['latency_ms']
lat_dict[row['location_2']][row['location_1']] = row['latency_ms']
lat_mat = [[float('0') for i in nodes] for j in nodes]
for i in range(len(nodes)):
for j in range(len(nodes)):
if i != j:
lat_mat[i][j] = (lat_dict[nodes[i]][nodes[j]] / (1000*3.2))
return lat_mat
def prepare240_lat_mat_cs_fixed_capital_centrality(nodes):
latencies = pd.read_csv(
'evaluation/cities_capitals_lat_lng_latency.txt', delim_whitespace=True)
lat_dict = defaultdict(dict)
for i in range(len(latencies)):
row = latencies.iloc[i]
lat_dict[row['location_1']][row['location_2']] = row['latency_ms']
lat_dict[row['location_2']][row['location_1']] = row['latency_ms']
lat_mat = [[float('0') for i in nodes] for j in nodes]
for i in range(len(nodes)):
for j in range(len(nodes)):
if i != j:
lat_mat[i][j] = (lat_dict[nodes[i]][nodes[j]] / (1000*3.2*1.5))
return lat_mat
def prepare15_lat_mat_fixed(nodes):
# nodes= ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
# 'LINTHAL-NA-CH', 'MOSCOW-NA-RU', 'TBILISI-NA-GE', 'KIEV-NA-UK',
# 'ANKARA-NA-TR', 'SKOPJE-NA-MK', 'HELSINKI-NA-FI', 'MANNHEIM-BW-DE',
# 'SINGAPORE-NA-SG', 'ASHBURN-VA-US', 'FRANKFURT-HE-DE', 'NUREMBURG-BV-DE']
latencies = pd.read_csv('evaluation/adjlst-2.txt', delim_whitespace=True)
nodes = ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
'LINTHAL-NA-CH', 'MOSCOW-NA-RU', 'TBILISI-NA-GE', 'KIEV-NA-UK',
'ANKARA-NA-TR', 'SKOPJE-NA-MK', 'HELSINKI-NA-FI', 'MANNHEIM-BW-DE',
'SINGAPORE-NA-SG', 'ASHBURN-VA-US', 'FRANKFURT-HE-DE', 'NUREMBURG-BV-DE']
lat_mat = [[float('0') for i in nodes] for j in nodes]
for i in range(len(nodes)):
for j in range(len(nodes)):
if i != j:
f1 = latencies[latencies['location_1'] == nodes[i]
][latencies['location_2'] == nodes[j]]
if len(f1) == 0:
f2 = latencies[latencies['location_2'] == nodes[i]
][latencies['location_1'] == nodes[j]]
result = f2['latency_ms'].iloc[0]
else:
result = f1['latency_ms'].iloc[0]
lat_mat[i][j] = (result/(1000*1.5))
return lat_mat
def prepare15_ls_lat_mat_fixed(nodes):
# nodes= ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
# 'LINTHAL-NA-CH', 'MOSCOW-NA-RU', 'TBILISI-NA-GE', 'KIEV-NA-UK',
# 'ANKARA-NA-TR', 'SKOPJE-NA-MK', 'HELSINKI-NA-FI', 'MANNHEIM-BW-DE',
# 'SINGAPORE-NA-SG', 'ASHBURN-VA-US', 'FRANKFURT-HE-DE', 'NUREMBURG-BV-DE']
latencies = pd.read_csv('evaluation/adjlst-2.txt', delim_whitespace=True)
nodes = ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
'LINTHAL-NA-CH', 'MOSCOW-NA-RU', 'TBILISI-NA-GE', 'KIEV-NA-UK',
'ANKARA-NA-TR', 'SKOPJE-NA-MK', 'HELSINKI-NA-FI', 'MANNHEIM-BW-DE',
'SINGAPORE-NA-SG', 'ASHBURN-VA-US', 'FRANKFURT-HE-DE', 'NUREMBURG-BV-DE']
lat_mat = [[float('0') for i in nodes] for j in nodes]
for i in range(len(nodes)):
for j in range(len(nodes)):
if i != j:
f1 = latencies[latencies['location_1'] == nodes[i]
][latencies['location_2'] == nodes[j]]
if len(f1) == 0:
f2 = latencies[latencies['location_2'] == nodes[i]
][latencies['location_1'] == nodes[j]]
result = f2['latency_ms'].iloc[0]
else:
result = f1['latency_ms'].iloc[0]
lat_mat[i][j] = (result/(1000*3.2))
return lat_mat
def prepare15_cs_lat_mat_fixed(nodes):
# nodes= ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
# 'LINTHAL-NA-CH', 'MOSCOW-NA-RU', 'TBILISI-NA-GE', 'KIEV-NA-UK',
# 'ANKARA-NA-TR', 'SKOPJE-NA-MK', 'HELSINKI-NA-FI', 'MANNHEIM-BW-DE',
# 'SINGAPORE-NA-SG', 'ASHBURN-VA-US', 'FRANKFURT-HE-DE', 'NUREMBURG-BV-DE']
latencies = pd.read_csv('evaluation/adjlst-2.txt', delim_whitespace=True)
nodes = ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
'LINTHAL-NA-CH', 'MOSCOW-NA-RU', 'TBILISI-NA-GE', 'KIEV-NA-UK',
'ANKARA-NA-TR', 'SKOPJE-NA-MK', 'HELSINKI-NA-FI', 'MANNHEIM-BW-DE',
'SINGAPORE-NA-SG', 'ASHBURN-VA-US', 'FRANKFURT-HE-DE', 'NUREMBURG-BV-DE']
lat_mat = [[float('0') for i in nodes] for j in nodes]
for i in range(len(nodes)):
for j in range(len(nodes)):
if i != j:
f1 = latencies[latencies['location_1'] == nodes[i]
][latencies['location_2'] == nodes[j]]
if len(f1) == 0:
f2 = latencies[latencies['location_2'] == nodes[i]
][latencies['location_1'] == nodes[j]]
result = f2['latency_ms'].iloc[0]
else:
result = f1['latency_ms'].iloc[0]
lat_mat[i][j] = (result/(1000*3.2*1.5))
return lat_mat
def to_dataframe_prepare_test_centrality_lat_mat_baseline(experiments_stats, nodes_ids):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(lambda x: f"[{x[0]}, {x[1]}, {x[2]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[0]}, {x[1]}, {x[2]}]")
miner_df.id = miner_df.id.map(dict(zip(range(0, 3), nodes_ids)))
return {'miner': miner_df, 'global': df}
def to_dataframe2(experiments_stats, nodes_ids, nodes_count=2):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(lambda x: f"[{x[1]}, {x[5]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[1]}, {x[5]}]")
miner_df.id = miner_df.id.map(dict(zip(range(0, 8), nodes_ids)))
return {'miner': miner_df, 'global': df}
def to_dataframe1_coordinators(experiments_stats, nodes_ids, nodes_count=2):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(lambda x: f"[{x[0]}, {x[1]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[0]}, {x[1]}]")
miner_df.id = miner_df.id.map(
dict(zip(range(0, len(nodes_ids)), nodes_ids)))
return {'miner': miner_df, 'global': df}
def to_dataframe2_coordinators(experiments_stats, nodes_ids, nodes_count=3):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(lambda x: f"[{x[1]}, {x[2]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[1]}, {x[2]}]")
miner_df.id = miner_df.id.map(
dict(zip(range(0, len(nodes_ids)), nodes_ids)))
return {'miner': miner_df, 'global': df}
def to_dataframe3_coordinators(experiments_stats, nodes_ids, nodes_count=3):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(lambda x: f"[{x[1]}, {x[2]}, {x[3]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[1]}, {x[2]}, {x[3]}]")
miner_df.id = miner_df.id.map(
dict(zip(range(0, len(nodes_ids)), nodes_ids)))
return {'miner': miner_df, 'global': df}
def to_dataframe4(experiments_stats, nodes_ids, nodes_count=3):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(lambda x: f"[{x[0]}, {x[1]}, {x[2]}, {x[3]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[0]}, {x[1]}, {x[2]}, {x[3]}]")
miner_df.id = miner_df.id.map(
dict(zip(range(0, len(nodes_ids)), nodes_ids)))
return {'miner': miner_df, 'global': df}
def to_dataframe4_coordinators(experiments_stats, nodes_ids, nodes_count=3):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(lambda x: f"[{x[1]}, {x[2]}, {x[3]}, {x[4]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[1]}, {x[2]}, {x[3]}, {x[4]}]")
miner_df.id = miner_df.id.map(
dict(zip(range(0, len(nodes_ids)), nodes_ids)))
return {'miner': miner_df, 'global': df}
def to_dataframe3(experiments_stats, nodes_ids, nodes_count=3):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(lambda x: f"[{x[1]}, {x[5]}, {x[7]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[1]}, {x[5]}, {x[7]}]")
miner_df.id = miner_df.id.map(dict(zip(range(0, 8), nodes_ids)))
return {'miner': miner_df, 'global': df}
def to_dataframe5(experiments_stats, nodes_ids, nodes_count=5):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(
lambda x: f"[{x[0]}, {x[1]}, {x[2]}, {x[3]}, {x[4]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[0]}, {x[1]}, {x[2]}, {x[3]}, {x[4]}]")
miner_df.id = miner_df.id.map(dict(zip(range(0, 5), nodes_ids)))
return {'miner': miner_df, 'global': df}
def to_dataframe15(experiments_stats, nodes_ids, nodes_count=15):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(
lambda x: f"[{x[0]}, {x[1]}, {x[2]}, {x[3]}, {x[4]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[0]}, {x[1]}, {x[2]}, {x[3]}, {x[4]}]")
miner_df.id = miner_df.id.map(dict(zip(range(0, nodes_count), nodes_ids)))
return {'miner': miner_df, 'global': df}
def to_dataframe_real_bc(experiments_stats, nodes_ids):
df = pd.DataFrame(experiments_stats)
miner_df = list()
for miner in df.miners:
miner_df.append(pd.DataFrame(miner))
miner_df = pd.concat(miner_df)
df.drop(columns=['miners'], inplace=True)
df.hpd = df.hpd.apply(lambda x: f"[{x[0]}, {x[1]}, {x[2]}]")
miner_df.global_hpd = miner_df.global_hpd.apply(
lambda x: f"[{x[0]}, {x[1]}, {x[2]}]")
miner_df.id = miner_df.id.map(
dict(zip(range(0, len(nodes_ids)), nodes_ids)))
return {'miner': miner_df, 'global': df}
coord_dist = 0
def COORDINATOR_DISTANCE(val=-1):
global coord_dist
if val >= 0:
coord_dist = val
return coord_dist
class Default2SimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "2miners_toy_example_"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 8
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 1000 #100000 #Make it faster for the reviewers :)
self.NUM_ITER = 10
self.NODES_IDS = ['OLM1', 'M1', 'OM1', 'OM', 'OM2', 'M2', 'ORM2', 'OEQ']
self.LATENCY_ADJACECY_MATRIX = prepare2_lat_mat(self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX) # 1
self.MEAN_NETWORK_LATENCY_S = 1
self.HPD_CFG = [(0, 0.5, 0, 0, 0, 0.5, 0, 0),
(0, 0.4, 0, 0, 0, 0.6, 0, 0),
(0, 0.3, 0, 0, 0, 0.7, 0, 0),
(0, 0.2, 0, 0, 0, 0.8, 0, 0),
(0, 0.1, 0, 0, 0, 0.9, 0, 0)]
#self.HPD_CFG = [(0, 0.5, 0, 0, 0, 0.5, 0, 0)]
#self.HPD_CFG = [(0, x, 0, 0, 0, 1-x, 0, 0) for x in np.arange(0,1.01, 0.05)]
self.HARDNESS_CFG = [10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
#8 * self.MEAN_NETWORK_LATENCY_S,
#6 * self.MEAN_NETWORK_LATENCY_S,
#4 * self.MEAN_NETWORK_LATENCY_S,
#5 * self.MEAN_NETWORK_LATENCY_S,
#2 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
#self.MEAN_NETWORK_LATENCY_S / 2,
#self.MEAN_NETWORK_LATENCY_S / 4,
#self.MEAN_NETWORK_LATENCY_S / 5,
#self.MEAN_NETWORK_LATENCY_S / 6,
#self.MEAN_NETWORK_LATENCY_S / 8,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100]
#self.HARDNESS_CFG = [ 1.0*10.0**e for e in np.arange(-2, 4.1, 0.1)]
#self.HARDNESS_CFG = [ 1.0*10.0**e for e in np.arange(-4.2, 4.2, 0.2)]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe2
class Default1fCoordinatorSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "2 nodes 1 moving toy_example_coordinated_" + str(
COORDINATOR_DISTANCE())
self.PERSIST_DATA = True
self.DURATION = 5 * Durations.HOUR
self.NUM_MINERS = 3
NUM_COORDINATORS = 1
NUM_OBSERVERS = 0
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 10000
self.NUM_ITER = 10
self.NODES_IDS = ['C', 'M1', 'M2']
self.LATENCY_ADJACECY_MATRIX = prepare1f_coordinators_lat_mat_proportional(
COORDINATOR_DISTANCE() / 10.0)
print("This is the 1 moving miners coordinated latency matrix")
print(self.LATENCY_ADJACECY_MATRIX)
self.MEAN_NETWORK_LATENCY_S = 1
self.HPD_CFG = [(0, .5, .5),
(0, .7, .3),
(0, .9, .1)]
self.HARDNESS_CFG = [
#10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10]
self.SIMULATOR = SimulatorCoordinated
self.TO_DATAFRAME = to_dataframe2_coordinators
class Default2CoordinatorSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "2coordinators_toy_example_coordinated_"+str(COORDINATOR_DISTANCE())
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 3
NUM_COORDINATORS = 1
NUM_OBSERVERS = 0
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['C', 'M1', 'M2']
self.LATENCY_ADJACECY_MATRIX = prepare2_coordinators_lat_mat_proportional(COORDINATOR_DISTANCE()/10.0)
# self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX) # 1
self.MEAN_NETWORK_LATENCY_S = 1
self.HPD_CFG = [(0, .5, .5),
(0, .4, .6),
(0, .3, .7),
(0, .2, .8),
(0, .1, .9)]
#self.HPD_CFG = [(0, .1, .9)]
#self.HPD_CFG = [(0, 0.5, 0, 0, 0, 0.5, 0, 0)]
#self.HPD_CFG = [(0, x, 0, 0, 0, 1-x, 0, 0) for x in np.arange(0,1.01, 0.05)]
self.HARDNESS_CFG = [
10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
8 * self.MEAN_NETWORK_LATENCY_S,
6 * self.MEAN_NETWORK_LATENCY_S,
4 * self.MEAN_NETWORK_LATENCY_S,
5 * self.MEAN_NETWORK_LATENCY_S,
2 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 2,
self.MEAN_NETWORK_LATENCY_S / 4,
self.MEAN_NETWORK_LATENCY_S / 5,
self.MEAN_NETWORK_LATENCY_S / 6,
self.MEAN_NETWORK_LATENCY_S / 8,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100
]
#self.HARDNESS_CFG = [ self.MEAN_NETWORK_LATENCY_S]
#self.HARDNESS_CFG = [ 1.0*10.0**e for e in np.arange(-2, 4.1, 0.1)]
#self.HARDNESS_CFG = [ 1.0*10.0**e for e in np.arange(-4.2, 4.2, 0.2)]
self.SIMULATOR = SimulatorCoordinated
self.TO_DATAFRAME = to_dataframe2_coordinators
class Default2CoordinatorEquidistantSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "2coordinators_toy_example_coordinated_equidistant" + str(
COORDINATOR_DISTANCE())
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 3
NUM_COORDINATORS = 1
NUM_OBSERVERS = 0
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['C', 'M1', 'M2']
self.LATENCY_ADJACECY_MATRIX = prepare2_coordinators_lat_mat_proportional(
COORDINATOR_DISTANCE() / 10.0)
self.MEAN_NETWORK_LATENCY_S = 1
self.HPD_CFG = [(0, .5, .5),
(0, .99, .01),
(0, .4, .6),
(0, .3, .7),
(0, .2, .8),
(0, .1, .9),
(0, .05, .95)
]
self.HARDNESS_CFG = [
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100
]
self.SIMULATOR = SimulatorCoordinated
self.TO_DATAFRAME = to_dataframe2_coordinators
class Default2CoordinatorFartherSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "2coordinators_toy_example_coordinated_farther" + str(
COORDINATOR_DISTANCE())
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 3
NUM_COORDINATORS = 1
NUM_OBSERVERS = 0
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['C', 'M1', 'M2']
self.LATENCY_ADJACECY_MATRIX = prepare2_coordinators_lat_mat_proportional_M1_Farther(
COORDINATOR_DISTANCE() / 10.0, 2)
# self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX) # 1
self.MEAN_NETWORK_LATENCY_S = 1
self.HPD_CFG = [(0, .5, .5),
(0, .99, .01),
(0, .4, .6),
(0, .3, .7),
(0, .2, .8),
(0, .1, .9),
(0, .05, .95)
]
self.HARDNESS_CFG = [
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100
]
# self.HARDNESS_CFG = [ self.MEAN_NETWORK_LATENCY_S]
# self.HARDNESS_CFG = [ 1.0*10.0**e for e in np.arange(-2, 4.1, 0.1)]
# self.HARDNESS_CFG = [ 1.0*10.0**e for e in np.arange(-4.2, 4.2, 0.2)]
self.SIMULATOR = SimulatorCoordinated
self.TO_DATAFRAME = to_dataframe2_coordinators
class Default3CoordinatorSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "3coordinators_toy_example_coordinated_"+str(COORDINATOR_DISTANCE())
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 4
NUM_COORDINATORS = 1
NUM_OBSERVERS = 0
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['C', 'M1', 'M2', 'M3']
self.LATENCY_ADJACECY_MATRIX = prepare3_coordinators_lat_mat_proportional(COORDINATOR_DISTANCE()/10.0)
print("This is the 3 miners coordinated latency matrix")
print(self.LATENCY_ADJACECY_MATRIX)
self.MEAN_NETWORK_LATENCY_S = 1
#Switch M2 and M3 hash powers to simulate moving the coordinator from the weaker node
self.HPD_CFG = [(0, .33, .33, .33),
(0, .3, .3, .4),
(0, .2, .2, .6),
(0, .1, .1, .8)]
self.HARDNESS_CFG = [
10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100
]
self.SIMULATOR = SimulatorCoordinated
self.TO_DATAFRAME = to_dataframe3_coordinators
class Default4P2PSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "4p2p_toy_example_coordinated_" + str(
COORDINATOR_DISTANCE())
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 4
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['C', 'M1', 'M2', 'M3', 'M4']
self.LATENCY_ADJACECY_MATRIX = prepare4_p2p_lat_mat_proportional(
COORDINATOR_DISTANCE() / 10.0)
print("This is the 3 miners coordinated latency matrix")
print(self.LATENCY_ADJACECY_MATRIX)
self.MEAN_NETWORK_LATENCY_S = 1
self.HPD_CFG = [(0.25, 0.25, 0.25, 0.25),
(0.7, 0.1, 0.1, 0.1)]
self.HARDNESS_CFG = [
10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100
]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe4
class Default4CoordinatorSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "4coordinators_toy_example_coordinated_" + str(
COORDINATOR_DISTANCE())
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 5
NUM_COORDINATORS = 1
NUM_OBSERVERS = 0
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['C', 'M1', 'M2', 'M3', 'M4']
self.LATENCY_ADJACECY_MATRIX = prepare4_coordinators_lat_mat_proportional(
COORDINATOR_DISTANCE() / 10.0)
print("This is the 3 miners coordinated latency matrix")
print(self.LATENCY_ADJACECY_MATRIX)
self.MEAN_NETWORK_LATENCY_S = 1
self.HPD_CFG = [(0, 0.25, 0.25, 0.25, 0.25),
(0, 0.7, 0.1, 0.1, 0.1)]
#(0, 0.1, 0.7, 0.1, 0.1)
self.HARDNESS_CFG = [
10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100
]
self.SIMULATOR = SimulatorCoordinated
self.TO_DATAFRAME = to_dataframe4_coordinators
class Default3SimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "3miners_toy_example_paper_"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 8
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['OLM1', 'M1', 'OM1', 'OM', 'OM2', 'M2', 'ORM2', 'M3']
self.HPD_CFG = [(0, .33, 0, 0, 0, .33, 0, .33),
(0, .3, 0, 0, 0, .4, 0, .3),
(0, .2, 0, 0, 0, .6, 0, .2),
(0, .1, 0, 0, 0, .8, 0, .1)]
#self.HPD_CFG = [(0, .33, 0, 0, 0, .33, 0, .33)]
self.LATENCY_ADJACECY_MATRIX = prepare3_lat_mat(self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)
self.MEAN_NETWORK_LATENCY_S = 1
'''self.HARDNESS_CFG = [100000 * self.MEAN_NETWORK_LATENCY_S,
10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100,
17, 600
]
self.HARDNESS_CFG = [ 1.0*10.0**e for e in np.arange(-4.2, 4.2, 0.2)]'''
self.HARDNESS_CFG = [10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe3
class Default3fSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "3miners_toy_example"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 8
self.SEED = 0
self.MEAN_NETWORK_LATENCY_S = 1
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 1
self.NODES_IDS = ['OLM1', 'M1', 'OM1', 'OM', 'OM2', 'M2', 'ORM2', 'M3']
self.HPD_CFG = [(0, .33, 0, 0, 0, .33, 0, .33)]
self.HARDNESS_CFG = [1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100]
self.LATENCY_ADJACECY_MATRIX = prepare3_lat_mat_farther(self.NODES_IDS)
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe3
class Default5SimulationConfigs(object):
def __init__(self):
'''
# <location_1> <lat_1> <lng_1> <location_2> <lat_2> <lng_2> <dist. (in km)> <latency (in ms)>
WASHINGTON-DC-US 38.9047 -77.0164 SICHUAN-NA-CN 30.1333 102.9333 12338.40 197.41
WASHINGTON-DC-US 38.9047 -77.0164 REYKJAVÍK-NA-IS 64.1333 -21.9333 4512.89 72.21
WASHINGTON-DC-US 38.9047 -77.0164 LINTHAL-NA-CH 46.9167 9.0000 6703.91 107.26
WASHINGTON-DC-US 38.9047 -77.0164 MOSCOW-NA-RU 55.7500 37.6167 7820.54 125.13
SICHUAN-NA-CN 30.1333 102.9333 REYKJAVÍK-NA-IS 64.1333 -21.9333 8489.56 135.83
SICHUAN-NA-CN 30.1333 102.9333 LINTHAL-NA-CH 46.9167 9.0000 7891.06 126.26
SICHUAN-NA-CN 30.1333 102.9333 MOSCOW-NA-RU 55.7500 37.6167 5761.37 92.18
REYKJAVÍK-NA-IS 64.1333 -21.9333 LINTHAL-NA-CH 46.9167 9.0000 2680.24 42.88
REYKJAVÍK-NA-IS 64.1333 -21.9333 MOSCOW-NA-RU 55.7500 37.6167 3307.89 52.93
LINTHAL-NA-CH 46.9167 9.0000 MOSCOW-NA-RU 55.7500 37.61672196.05 35.14
https://datalight.me/blog/researches/infographics/datalight-publishes-a-list-of-countries-with-the-largest-number-of-bitcoin-nodes/
np.array([2625, 411, 698, 159, 276])/4169
array([0.6296474 , 0.09858479, 0.16742624, 0.03813864, 0.06620293])
'''
self.EXPERIMENT_IDENTIFIER = "5miners_paper"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 5
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['WA-US', 'SI-CN', 'RE-IS', 'LI-CH', 'MO-RU']
self.NODES_IDS = ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
'LINTHAL-NA-CH', 'MOSCOW-NA-RU']
self.HPD_CFG = [(0.2, 0.2, 0.2, 0.2, 0.2),
(0.05, 0.8, 0.07, 0.03, 0.05),
(0.62, 0.1, 0.17, 0.04, 0.07),
]
self.LATENCY_ADJACECY_MATRIX = prepare5_lat_mat_fixed(self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)
self.HARDNESS_CFG = [10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100
]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe5
class Default5EthereumSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "5miners_ethereum_paper"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 5
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['WA-US', 'SI-CN', 'RE-IS', 'LI-CH', 'MO-RU']
self.NODES_IDS = ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
'LINTHAL-NA-CH', 'MOSCOW-NA-RU']
self.HPD_CFG = [(0.2, 0.2, 0.2, 0.2, 0.2),
(0.25, 0.25, 0.12, 0.11, 0.05),
(0.25, 0.25, 0.05, 0.11, 0.12),
(0.05, 0.25, 0.12, 0.11, 0.25),
(0.25, 0.05, 0.12, 0.11, 0.25),
(0.05, 0.11, 0.25, 0.25, 0.12),
]
self.LATENCY_ADJACECY_MATRIX = prepare5_lat_mat_fixed(self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)
self.HARDNESS_CFG = [
# 10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
# self.MEAN_NETWORK_LATENCY_S / 100
]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe5
class Default15SimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "15miners_observers_paper_"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 15
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
'LINTHAL-NA-CH', 'MOSCOW-NA-RU', 'TBILISI-NA-GE', 'KIEV-NA-UK',
'ANKARA-NA-TR', 'SKOPJE-NA-MK', 'HELSINKI-NA-FI', 'MANNHEIM-BW-DE',
'SINGAPORE-NA-SG', 'ASHBURN-VA-US', 'FRANKFURT-HE-DE', 'NUREMBURG-BV-DE']
self.LATENCY_ADJACECY_MATRIX = prepare15_lat_mat_fixed(self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX[0:5][0:5])
self.HPD_CFG = [(0.2, 0.2, 0.2, 0.2, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.05, 0.8, 0.07, 0.03, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.62, 0.1, 0.17, 0.04, 0.07, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.05, 0.8, 0.05, 0.05, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.04, 0.85, 0.04, 0.03, 0.04, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
]
self.HARDNESS_CFG = [10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100,
17, 600]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe15
class Default15LSSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "15LSminers_observers_paper_"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 15
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
'LINTHAL-NA-CH', 'MOSCOW-NA-RU', 'TBILISI-NA-GE', 'KIEV-NA-UK',
'ANKARA-NA-TR', 'SKOPJE-NA-MK', 'HELSINKI-NA-FI', 'MANNHEIM-BW-DE',
'SINGAPORE-NA-SG', 'ASHBURN-VA-US', 'FRANKFURT-HE-DE', 'NUREMBURG-BV-DE']
self.LATENCY_ADJACECY_MATRIX = prepare15_ls_lat_mat_fixed(self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)
self.HPD_CFG = [(0.2, 0.2, 0.2, 0.2, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.05, 0.8, 0.07, 0.03, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.62, 0.1, 0.17, 0.04, 0.07, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.05, 0.8, 0.05, 0.05, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.04, 0.85, 0.04, 0.03, 0.04, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
]
self.HARDNESS_CFG = [10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100,
17, 600]
#self.HARDNESS_CFG = [17, 600]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe15
class Default15CSSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "15CSminers_observers_paper_"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 15
self.SEED = 0
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
'LINTHAL-NA-CH', 'MOSCOW-NA-RU', 'TBILISI-NA-GE', 'KIEV-NA-UK',
'ANKARA-NA-TR', 'SKOPJE-NA-MK', 'HELSINKI-NA-FI', 'MANNHEIM-BW-DE',
'SINGAPORE-NA-SG', 'ASHBURN-VA-US', 'FRANKFURT-HE-DE', 'NUREMBURG-BV-DE']
self.LATENCY_ADJACECY_MATRIX = prepare15_cs_lat_mat_fixed(self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)
self.HPD_CFG = [(0.2, 0.2, 0.2, 0.2, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.05, 0.8, 0.07, 0.03, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.62, 0.1, 0.17, 0.04, 0.07, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.05, 0.8, 0.05, 0.05, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0.04, 0.85, 0.04, 0.03, 0.04, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
]
self.HARDNESS_CFG = [10000 * self.MEAN_NETWORK_LATENCY_S,
1000 * self.MEAN_NETWORK_LATENCY_S,
100 * self.MEAN_NETWORK_LATENCY_S,
10 * self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S,
self.MEAN_NETWORK_LATENCY_S / 10,
self.MEAN_NETWORK_LATENCY_S / 100,
17, 600]
#self.HARDNESS_CFG = [17, 600]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe15
class DefaultCentralitySimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "centrality_miners_observers_paper_"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 100
self.SEED = 33
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
cities = pd.read_csv('evaluation/100_cities_lat_lng.txt', delim_whitespace=True)
self.NODES_IDS = cities['city'].tolist()
# print('centrality_miners_observers_paper')
# print(self.NODES_IDS)
self.HPD_CFG = [tuple(1 for i in range(0, 100))]
self.LATENCY_ADJACECY_MATRIX = prepare100_lat_mat_fixed_centrality(self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)
self.HARDNESS_CFG = [17, 600]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe15
class DefaultCapitalsCentralitySimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "capitals_centrality_miners_observers_paper_"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 240
self.SEED = 33
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
cities = pd.read_csv('evaluation/cities_capitals_lat_lng.txt', delim_whitespace=True)
self.NODES_IDS = cities['city'].tolist()
# print(self.NODES_IDS)
self.HPD_CFG = [tuple(1 for i in range(0, 240))]
self.LATENCY_ADJACECY_MATRIX = prepare240_lat_mat_fixed_capital_centrality(self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)
self.HARDNESS_CFG = [self.MEAN_NETWORK_LATENCY_S/10.0]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe15
class DefaultCapitalsCentralityLSSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "capitals_centrality_ls_miners_observers_paper_"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 240
self.SEED = 33
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
cities = pd.read_csv('evaluation/cities_capitals_lat_lng.txt', delim_whitespace=True)
self.NODES_IDS = cities['city'].tolist()
# print(self.NODES_IDS)
self.HPD_CFG = [tuple(1 for i in range(0, 240))]
self.LATENCY_ADJACECY_MATRIX = prepare15_lat_mat_ls_fixed_capital_centrality(
self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)
#self.HARDNESS_CFG = [600]
self.HARDNESS_CFG = [17, 600]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe15
class DefaultCapitalsCentralityCSSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "capitals_centrality_cs_miners_observers_paper_"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 240
self.SEED = 33
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
cities = pd.read_csv('evaluation/cities_capitals_lat_lng.txt', delim_whitespace=True)
self.NODES_IDS = cities['city'].tolist()
# print(self.NODES_IDS)
self.HPD_CFG = [tuple(1 for i in range(0, 240))]
self.LATENCY_ADJACECY_MATRIX = prepare240_lat_mat_cs_fixed_capital_centrality(
self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)
#self.HARDNESS_CFG = [600]
self.HARDNESS_CFG = [17, 600]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe15
class DefaultAttackSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "attack_miners_observers_paper"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 15
self.SEED = 1
self.NUM_BLOCKS_TO_GENERATE = 100000
self.NUM_ITER = 10
self.NODES_IDS = ['WASHINGTON-DC-US', 'SICHUAN-NA-CN', 'REYKJAVÍK-NA-IS',
'LINTHAL-NA-CH', 'MOSCOW-NA-RU', 'TBILISI-NA-GE', 'KIEV-NA-UK',
'ANKARA-NA-TR', 'SKOPJE-NA-MK', 'HELSINKI-NA-FI', 'MANNHEIM-BW-DE',
'SINGAPORE-NA-SG', 'ASHBURN-VA-US', 'FRANKFURT-HE-DE', 'NUREMBURG-BV-DE']
self.HPD_CFG = [ # (0.049, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.2, 0.05),
(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
# (1, 1, 1, 1, 1, 0 ,0, 0, 0, 0, 0, 0, 0, 0, 0),
# (0.02, 0.02, 0.02, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.1, 0.02, 0.02, 0.25, 0.1),
# (0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.45, 0.04)
]
self.LATENCY_ADJACECY_MATRIX = prepare15_lat_mat_fixed(self.NODES_IDS)
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)/1000.0
self.HARDNESS_CFG = [600]
# self.HARDNESS_CFG = [1000*self.MEAN_NETWORK_LATENCY_S, 600]
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe15
class RealLifeExpSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "top_miners_bitcoin_exp"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 19
self.SEED = 0
self.MEAN_NETWORK_LATENCY_S = 0.3
self.NUM_BLOCKS_TO_GENERATE = 1000
self.NUM_ITER = 2
self.NODES_IDS = ["BTC.com",
"AntPool",
"F2Pool",
"SlushPool",
"Poolin",
"ViaBTC",
"BTC.TOP",
"unknown",
"Huobi.pool",
"BitFury",
"BitClub",
"1M1X",
"Bitcoin.com",
"DPOOL",
"WAYI.CN",
"Bixin",
"tigerpool.net",
"KanoPool",
"BitcoinRussia"]
self.HPD_CFG = [(17.19,
12.40,
11.35,
9.90,
9.68,
7.05,
6.85,
6.27,
4.15,
4.06,
2.65,
1.96,
1.96,
1.81,
1.25,
1.00,
0.38,
0.09,
0.02)]
self.HARDNESS_CFG = [1000 * self.MEAN_NETWORK_LATENCY_S, 100 * self.MEAN_NETWORK_LATENCY_S, 10 * self.MEAN_NETWORK_LATENCY_S,
1 * self.MEAN_NETWORK_LATENCY_S, .1 * self.MEAN_NETWORK_LATENCY_S]
lat_mat = Exp_LatD(self.NUM_MINERS, self.MEAN_NETWORK_LATENCY_S, 0).tolist()
self.LATENCY_ADJACECY_MATRIX = lat_mat
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)/1000.0
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe_real_bc
class RealLifeEquSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "top_miners_bitcoin_equ"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 19
self.SEED = 0
self.MEAN_NETWORK_LATENCY_S = 0.3
self.NUM_BLOCKS_TO_GENERATE = 1000
self.NUM_ITER = 2
self.NODES_IDS = ["BTC.com",
"AntPool",
"F2Pool",
"SlushPool",
"Poolin",
"ViaBTC",
"BTC.TOP",
"unknown",
"Huobi.pool",
"BitFury",
"BitClub",
"1M1X",
"Bitcoin.com",
"DPOOL",
"WAYI.CN",
"Bixin",
"tigerpool.net",
"KanoPool",
"BitcoinRussia"]
self.HPD_CFG = [(17.19,
12.40,
11.35,
9.90,
9.68,
7.05,
6.85,
6.27,
4.15,
4.06,
2.65,
1.96,
1.96,
1.81,
1.25,
1.00,
0.38,
0.09,
0.02)]
self.HARDNESS_CFG = [1000 * self.MEAN_NETWORK_LATENCY_S, 100 * self.MEAN_NETWORK_LATENCY_S, 10 * self.MEAN_NETWORK_LATENCY_S,
1 * self.MEAN_NETWORK_LATENCY_S, .1 * self.MEAN_NETWORK_LATENCY_S]
lat_mat = Equ_LatD(self.NUM_MINERS, self.MEAN_NETWORK_LATENCY_S, 0).tolist()
self.LATENCY_ADJACECY_MATRIX = lat_mat
self.MEAN_NETWORK_LATENCY_S = np.mean(self.LATENCY_ADJACECY_MATRIX)/1000.0
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe_real_bc
class RealLifeExpPooledSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "top_miners_bitcoin_exp_pooled"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 19
self.SEED = 0
self.MEAN_NETWORK_LATENCY_S = 0.3
self.NUM_BLOCKS_TO_GENERATE = 10000
self.NUM_ITER = 1
NODES_NAMES = ["BTC.com",
"AntPool",
"F2Pool",
"SlushPool",
"Poolin",
"ViaBTC",
"BTC.TOP",
"unknown",
"Huobi.pool",
"BitFury",
"BitClub",
"1M1X",
"Bitcoin.com",
"DPOOL",
"WAYI.CN",
"Bixin"]
self.HPD_CFG = [(17,
12,
11,
10,
10,
7,
7,
6,
4,
4,
3,
2,
2,
2,
1,
1)]
self.NUM_MINERS = sum(self.HPD_CFG[0])
pools = self.HPD_CFG[0]
self.HPD_CFG = [tuple([(1)] * self.NUM_MINERS)]
self.NODES_IDS = []
for i in range(len(NODES_NAMES)):
for j in range(pools[i]):
self.NODES_IDS.append(NODES_NAMES[i] + "_" + str(j))
self.HARDNESS_CFG = [1000 * self.MEAN_NETWORK_LATENCY_S, 100 * self.MEAN_NETWORK_LATENCY_S, 10 * self.MEAN_NETWORK_LATENCY_S,
1 * self.MEAN_NETWORK_LATENCY_S, .1 * self.MEAN_NETWORK_LATENCY_S]
lat_mat = Exp_pooled_LatD(
self.NUM_MINERS, self.MEAN_NETWORK_LATENCY_S, pools).tolist()
self.LATENCY_ADJACECY_MATRIX = lat_mat
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe_real_bc
class RealLifeEquPooledSimulationConfigs(object):
def __init__(self):
self.EXPERIMENT_IDENTIFIER = "top_miners_bitcoin_equ_pooled"
self.PERSIST_DATA = True
self.DURATION = 2 * Durations.HOUR
self.NUM_MINERS = 19
self.SEED = 0
self.MEAN_NETWORK_LATENCY_S = 0.3
self.NUM_BLOCKS_TO_GENERATE = 10000
self.NUM_ITER = 1
NODES_NAMES = ["BTC.com",
"AntPool",
"F2Pool",
"SlushPool",
"Poolin",
"ViaBTC",
"BTC.TOP",
"unknown",
"Huobi.pool",
"BitFury",
"BitClub",
"1M1X",
"Bitcoin.com",
"DPOOL",
"WAYI.CN",
"Bixin"]
self.HPD_CFG = [(17,
12,
11,
10,
10,
7,
7,
6,
4,
4,
3,
2,
2,
2,
1,
1)]
self.NUM_MINERS = sum(self.HPD_CFG[0])
pools = self.HPD_CFG[0]
self.HPD_CFG = [tuple([(1)] * self.NUM_MINERS)]
self.NODES_IDS = []
for i in range(len(NODES_NAMES)):
for j in range(pools[i]):
self.NODES_IDS.append(NODES_NAMES[i] + "_" + str(j))
self.HARDNESS_CFG = [1000 * self.MEAN_NETWORK_LATENCY_S, 100 * self.MEAN_NETWORK_LATENCY_S, 10 * self.MEAN_NETWORK_LATENCY_S,
1 * self.MEAN_NETWORK_LATENCY_S, .1 * self.MEAN_NETWORK_LATENCY_S]
lat_mat = Equ_pooled_LatD(
self.NUM_MINERS, self.MEAN_NETWORK_LATENCY_S, pools).tolist()
self.LATENCY_ADJACECY_MATRIX = lat_mat
self.SIMULATOR = Simulator
self.TO_DATAFRAME = to_dataframe_real_bc
class DefaultSimulationConfigs(Default2SimulationConfigs):
pass
| 38.381425 | 139 | 0.552847 | 9,186 | 65,709 | 3.72469 | 0.047137 | 0.013152 | 0.081982 | 0.12024 | 0.922432 | 0.90966 | 0.890808 | 0.863306 | 0.85708 | 0.841093 | 0 | 0.087662 | 0.312347 | 65,709 | 1,711 | 140 | 38.403857 | 0.669558 | 0.080659 | 0 | 0.768879 | 0 | 0.003051 | 0.083975 | 0.017931 | 0 | 0 | 0 | 0 | 0 | 1 | 0.048818 | false | 0.000763 | 0.004577 | 0.000763 | 0.102975 | 0.006102 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
db9c5ebb11b79b2987c3bea66fcec688b28b3973 | 3,146 | py | Python | python/ideep4py/tests/mm/test_mdarray_sum.py | cis-ncbj/ideep | b57539e4608e75f80dbc5c2784643d5f2f242003 | [
"MIT"
] | 133 | 2017-10-20T06:45:25.000Z | 2022-03-22T08:27:10.000Z | python/ideep4py/tests/mm/test_mdarray_sum.py | cis-ncbj/ideep | b57539e4608e75f80dbc5c2784643d5f2f242003 | [
"MIT"
] | 46 | 2018-03-05T06:44:16.000Z | 2021-12-01T03:50:19.000Z | python/ideep4py/tests/mm/test_mdarray_sum.py | cis-ncbj/ideep | b57539e4608e75f80dbc5c2784643d5f2f242003 | [
"MIT"
] | 78 | 2017-11-05T11:46:31.000Z | 2022-03-28T08:14:52.000Z | import ideep4py # NOQA
import numpy
from chainer import testing
from ideep4py import relu, mdarray
print('mdarray sum [larg shape routine]')
print('shape (256, 384, 13, 13) along (0, 2, 3)')
x = numpy.ndarray((256, 384, 13, 13), dtype=numpy.float32)
y = numpy.maximum(x, 0, dtype=x.dtype)
mx = mdarray(x)
my = relu.Forward(mx)
testing.assert_allclose(my.sum((0, 2, 3)), y.sum((0, 2, 3)))
print('pass ...\n')
print('mdarray sum [small shape routine]')
print('shape (39, 32, 13, 13) along (0, 2, 3)')
x = numpy.ndarray((39, 32, 13, 13), dtype=numpy.float32)
y = numpy.maximum(x, 0, dtype=x.dtype)
mx = mdarray(x)
my = relu.Forward(mx)
testing.assert_allclose(my.sum((0, 2, 3)), y.sum((0, 2, 3)))
print('pass ...\n')
print('mdarray sum [mkldnn format keepdims routine]')
print('shape (39, 32, 13, 13) along (0, 2, 3)')
x = numpy.ndarray((39, 32, 13, 13), dtype=numpy.float32)
y = numpy.maximum(x, 0, dtype=x.dtype)
mx = mdarray(x)
my = relu.Forward(mx)
testing.assert_allclose(my.sum((0, 2, 3), keepdims=True),
y.sum((0, 2, 3), keepdims=True))
print('pass ...\n')
print('mdarray sum [common format small shape routine]')
print('shape (2, 2, 3, 3) along (0, 2, 3)')
x = numpy.ndarray((2, 2, 3, 3), dtype=numpy.float32)
x.fill(2.3232)
x[0].fill(3.1212)
mx = mdarray(x)
testing.assert_allclose(mx.sum((0, 2, 3)), x.sum((0, 2, 3)))
print('pass ...\n')
print('mdarray sum [common format small shape routine]')
print('shape (2, 2, 3, 3) along (1, 3)')
x = numpy.ndarray((2, 2, 3, 3), dtype=numpy.float32)
x.fill(2.3232)
x[0].fill(3.1212)
mx = mdarray(x)
testing.assert_allclose(mx.sum((1, 3)), x.sum((1, 3)))
print('pass ...\n')
print('mdarray sum [common format routine keepdims]')
print('shape (2, 2, 3, 3) along (0, 2, 3)')
x = numpy.ndarray((2, 2, 3, 3), dtype=numpy.float32)
x.fill(2.3232)
x[0].fill(3.1212)
mx = mdarray(x)
ms = mx.sum((0, 2, 3), keepdims=True)
ns = x.sum((0, 2, 3), keepdims=True)
testing.assert_allclose(ms, ns)
print('pass ...\n')
print('mdarray sum [common format routine]')
print('shape (2, 15, 3, 3) along (0, 2, 3)')
x = numpy.ndarray((2, 15, 3, 3), dtype=numpy.float32)
x.fill(1)
x[0].fill(3.1212)
mx = mdarray(x)
ms = mx.sum((0, 2, 3))
ns = x.sum((0, 2, 3))
testing.assert_allclose(ms, ns)
print('pass ...\n')
print('mdarray sum [common format big shape routine]')
print('shape (256, 385, 13, 13) along (0, 2, 3)')
x = numpy.ndarray((256, 385, 13, 13), dtype=numpy.float32)
x.fill(1)
x[0].fill(3.1212)
mx = mdarray(x)
ms = mx.sum((0, 2, 3))
ns = x.sum((0, 2, 3))
testing.assert_allclose(ms, ns)
print('pass ...\n')
print('mdarray sum [common format big shape routine]')
print('shape (256, 1000) along (0)')
x = numpy.ndarray((256, 1000), dtype=numpy.float32)
x.fill(1)
x[0].fill(3.1212)
mx = mdarray(x)
ms = mx.sum((0))
ns = x.sum((0))
testing.assert_allclose(ms, ns)
print('pass ...\n')
print('mdarray sum [common format big shape routine]')
print('shape (256, 1000) along (1)')
x = numpy.ndarray((256, 1000), dtype=numpy.float32)
x.fill(1)
x[0].fill(3.1212)
mx = mdarray(x)
ms = mx.sum((1))
ns = x.sum((1))
testing.assert_allclose(ms, ns)
print('pass ...\n')
| 23.477612 | 60 | 0.631914 | 576 | 3,146 | 3.434028 | 0.090278 | 0.0273 | 0.03185 | 0.042467 | 0.908493 | 0.886249 | 0.854398 | 0.853387 | 0.835693 | 0.809403 | 0 | 0.109788 | 0.1459 | 3,146 | 133 | 61 | 23.654135 | 0.626349 | 0.001271 | 0 | 0.705263 | 0 | 0 | 0.274204 | 0 | 0 | 0 | 0 | 0 | 0.105263 | 1 | 0 | false | 0.105263 | 0.042105 | 0 | 0.042105 | 0.315789 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
dbc214ba0f2653a89af5dbe51a73e4d5902469db | 65,829 | py | Python | datalabeling/tests/unit/gapic/v1beta1/test_data_labeling_service_client_v1beta1.py | DaveCheez/google-cloud-python | fc03d4d41f13e9d13db7206438163b3a471fdabd | [
"Apache-2.0"
] | 2 | 2021-11-26T07:08:43.000Z | 2022-03-07T20:20:04.000Z | datalabeling/tests/unit/gapic/v1beta1/test_data_labeling_service_client_v1beta1.py | DaveCheez/google-cloud-python | fc03d4d41f13e9d13db7206438163b3a471fdabd | [
"Apache-2.0"
] | 6 | 2019-05-27T22:05:58.000Z | 2019-08-05T16:46:16.000Z | datalabeling/tests/unit/gapic/v1beta1/test_data_labeling_service_client_v1beta1.py | DaveCheez/google-cloud-python | fc03d4d41f13e9d13db7206438163b3a471fdabd | [
"Apache-2.0"
] | 1 | 2019-03-29T18:26:16.000Z | 2019-03-29T18:26:16.000Z | # -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests."""
import mock
import pytest
from google.rpc import status_pb2
from google.cloud import datalabeling_v1beta1
from google.cloud.datalabeling_v1beta1 import enums
from google.cloud.datalabeling_v1beta1.proto import annotation_spec_set_pb2
from google.cloud.datalabeling_v1beta1.proto import data_labeling_service_pb2
from google.cloud.datalabeling_v1beta1.proto import dataset_pb2
from google.cloud.datalabeling_v1beta1.proto import evaluation_job_pb2
from google.cloud.datalabeling_v1beta1.proto import evaluation_pb2
from google.cloud.datalabeling_v1beta1.proto import human_annotation_config_pb2
from google.cloud.datalabeling_v1beta1.proto import instruction_pb2
from google.cloud.datalabeling_v1beta1.proto import (
operations_pb2 as proto_operations_pb2,
)
from google.longrunning import operations_pb2 as longrunning_operations_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
class MultiCallableStub(object):
"""Stub for the grpc.UnaryUnaryMultiCallable interface."""
def __init__(self, method, channel_stub):
self.method = method
self.channel_stub = channel_stub
def __call__(self, request, timeout=None, metadata=None, credentials=None):
self.channel_stub.requests.append((self.method, request))
response = None
if self.channel_stub.responses:
response = self.channel_stub.responses.pop()
if isinstance(response, Exception):
raise response
if response:
return response
class ChannelStub(object):
"""Stub for the grpc.Channel interface."""
def __init__(self, responses=[]):
self.responses = responses
self.requests = []
def unary_unary(self, method, request_serializer=None, response_deserializer=None):
return MultiCallableStub(method, self)
class CustomException(Exception):
pass
class TestDataLabelingServiceClient(object):
def test_create_dataset(self):
# Setup Expected Response
name = "name3373707"
display_name = "displayName1615086568"
description = "description-1724546052"
data_item_count = 2014260376
expected_response = {
"name": name,
"display_name": display_name,
"description": description,
"data_item_count": data_item_count,
}
expected_response = dataset_pb2.Dataset(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
dataset = {}
response = client.create_dataset(parent, dataset)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.CreateDatasetRequest(
parent=parent, dataset=dataset
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_dataset_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
dataset = {}
with pytest.raises(CustomException):
client.create_dataset(parent, dataset)
def test_get_dataset(self):
# Setup Expected Response
name_2 = "name2-1052831874"
display_name = "displayName1615086568"
description = "description-1724546052"
data_item_count = 2014260376
expected_response = {
"name": name_2,
"display_name": display_name,
"description": description,
"data_item_count": data_item_count,
}
expected_response = dataset_pb2.Dataset(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.dataset_path("[PROJECT]", "[DATASET]")
response = client.get_dataset(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.GetDatasetRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_dataset_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.dataset_path("[PROJECT]", "[DATASET]")
with pytest.raises(CustomException):
client.get_dataset(name)
def test_list_datasets(self):
# Setup Expected Response
next_page_token = ""
datasets_element = {}
datasets = [datasets_element]
expected_response = {"next_page_token": next_page_token, "datasets": datasets}
expected_response = data_labeling_service_pb2.ListDatasetsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
paged_list_response = client.list_datasets(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.datasets[0] == resources[0]
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.ListDatasetsRequest(parent=parent)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_datasets_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
paged_list_response = client.list_datasets(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_delete_dataset(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.dataset_path("[PROJECT]", "[DATASET]")
client.delete_dataset(name)
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.DeleteDatasetRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_dataset_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.dataset_path("[PROJECT]", "[DATASET]")
with pytest.raises(CustomException):
client.delete_dataset(name)
def test_import_data(self):
# Setup Expected Response
dataset = "dataset1443214456"
total_count = 407761836
import_count = 1721296907
expected_response = {
"dataset": dataset,
"total_count": total_count,
"import_count": import_count,
}
expected_response = proto_operations_pb2.ImportDataOperationResponse(
**expected_response
)
operation = longrunning_operations_pb2.Operation(
name="operations/test_import_data", done=True
)
operation.response.Pack(expected_response)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.dataset_path("[PROJECT]", "[DATASET]")
input_config = {}
response = client.import_data(name, input_config)
result = response.result()
assert expected_response == result
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.ImportDataRequest(
name=name, input_config=input_config
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_import_data_exception(self):
# Setup Response
error = status_pb2.Status()
operation = longrunning_operations_pb2.Operation(
name="operations/test_import_data_exception", done=True
)
operation.error.CopyFrom(error)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.dataset_path("[PROJECT]", "[DATASET]")
input_config = {}
response = client.import_data(name, input_config)
exception = response.exception()
assert exception.errors[0] == error
def test_export_data(self):
# Setup Expected Response
dataset = "dataset1443214456"
total_count = 407761836
export_count = 529256252
expected_response = {
"dataset": dataset,
"total_count": total_count,
"export_count": export_count,
}
expected_response = proto_operations_pb2.ExportDataOperationResponse(
**expected_response
)
operation = longrunning_operations_pb2.Operation(
name="operations/test_export_data", done=True
)
operation.response.Pack(expected_response)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.dataset_path("[PROJECT]", "[DATASET]")
annotated_dataset = "annotatedDataset-1407812655"
output_config = {}
response = client.export_data(name, annotated_dataset, output_config)
result = response.result()
assert expected_response == result
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.ExportDataRequest(
name=name, annotated_dataset=annotated_dataset, output_config=output_config
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_export_data_exception(self):
# Setup Response
error = status_pb2.Status()
operation = longrunning_operations_pb2.Operation(
name="operations/test_export_data_exception", done=True
)
operation.error.CopyFrom(error)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.dataset_path("[PROJECT]", "[DATASET]")
annotated_dataset = "annotatedDataset-1407812655"
output_config = {}
response = client.export_data(name, annotated_dataset, output_config)
exception = response.exception()
assert exception.errors[0] == error
def test_get_data_item(self):
# Setup Expected Response
name_2 = "name2-1052831874"
expected_response = {"name": name_2}
expected_response = dataset_pb2.DataItem(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.data_item_path("[PROJECT]", "[DATASET]", "[DATA_ITEM]")
response = client.get_data_item(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.GetDataItemRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_data_item_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.data_item_path("[PROJECT]", "[DATASET]", "[DATA_ITEM]")
with pytest.raises(CustomException):
client.get_data_item(name)
def test_list_data_items(self):
# Setup Expected Response
next_page_token = ""
data_items_element = {}
data_items = [data_items_element]
expected_response = {
"next_page_token": next_page_token,
"data_items": data_items,
}
expected_response = data_labeling_service_pb2.ListDataItemsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.dataset_path("[PROJECT]", "[DATASET]")
paged_list_response = client.list_data_items(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.data_items[0] == resources[0]
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.ListDataItemsRequest(parent=parent)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_data_items_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.dataset_path("[PROJECT]", "[DATASET]")
paged_list_response = client.list_data_items(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_get_annotated_dataset(self):
# Setup Expected Response
name_2 = "name2-1052831874"
display_name = "displayName1615086568"
description = "description-1724546052"
example_count = 1517063674
completed_example_count = 612567290
expected_response = {
"name": name_2,
"display_name": display_name,
"description": description,
"example_count": example_count,
"completed_example_count": completed_example_count,
}
expected_response = dataset_pb2.AnnotatedDataset(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.annotated_dataset_path(
"[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]"
)
response = client.get_annotated_dataset(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.GetAnnotatedDatasetRequest(
name=name
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_annotated_dataset_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.annotated_dataset_path(
"[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]"
)
with pytest.raises(CustomException):
client.get_annotated_dataset(name)
def test_list_annotated_datasets(self):
# Setup Expected Response
next_page_token = ""
annotated_datasets_element = {}
annotated_datasets = [annotated_datasets_element]
expected_response = {
"next_page_token": next_page_token,
"annotated_datasets": annotated_datasets,
}
expected_response = data_labeling_service_pb2.ListAnnotatedDatasetsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.dataset_path("[PROJECT]", "[DATASET]")
paged_list_response = client.list_annotated_datasets(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.annotated_datasets[0] == resources[0]
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.ListAnnotatedDatasetsRequest(
parent=parent
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_annotated_datasets_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.dataset_path("[PROJECT]", "[DATASET]")
paged_list_response = client.list_annotated_datasets(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_label_image(self):
# Setup Expected Response
name = "name3373707"
display_name = "displayName1615086568"
description = "description-1724546052"
example_count = 1517063674
completed_example_count = 612567290
expected_response = {
"name": name,
"display_name": display_name,
"description": description,
"example_count": example_count,
"completed_example_count": completed_example_count,
}
expected_response = dataset_pb2.AnnotatedDataset(**expected_response)
operation = longrunning_operations_pb2.Operation(
name="operations/test_label_image", done=True
)
operation.response.Pack(expected_response)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.dataset_path("[PROJECT]", "[DATASET]")
basic_config = {}
feature = enums.LabelImageRequest.Feature.FEATURE_UNSPECIFIED
response = client.label_image(parent, basic_config, feature)
result = response.result()
assert expected_response == result
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.LabelImageRequest(
parent=parent, basic_config=basic_config, feature=feature
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_label_image_exception(self):
# Setup Response
error = status_pb2.Status()
operation = longrunning_operations_pb2.Operation(
name="operations/test_label_image_exception", done=True
)
operation.error.CopyFrom(error)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.dataset_path("[PROJECT]", "[DATASET]")
basic_config = {}
feature = enums.LabelImageRequest.Feature.FEATURE_UNSPECIFIED
response = client.label_image(parent, basic_config, feature)
exception = response.exception()
assert exception.errors[0] == error
def test_label_video(self):
# Setup Expected Response
name = "name3373707"
display_name = "displayName1615086568"
description = "description-1724546052"
example_count = 1517063674
completed_example_count = 612567290
expected_response = {
"name": name,
"display_name": display_name,
"description": description,
"example_count": example_count,
"completed_example_count": completed_example_count,
}
expected_response = dataset_pb2.AnnotatedDataset(**expected_response)
operation = longrunning_operations_pb2.Operation(
name="operations/test_label_video", done=True
)
operation.response.Pack(expected_response)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.dataset_path("[PROJECT]", "[DATASET]")
basic_config = {}
feature = enums.LabelVideoRequest.Feature.FEATURE_UNSPECIFIED
response = client.label_video(parent, basic_config, feature)
result = response.result()
assert expected_response == result
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.LabelVideoRequest(
parent=parent, basic_config=basic_config, feature=feature
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_label_video_exception(self):
# Setup Response
error = status_pb2.Status()
operation = longrunning_operations_pb2.Operation(
name="operations/test_label_video_exception", done=True
)
operation.error.CopyFrom(error)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.dataset_path("[PROJECT]", "[DATASET]")
basic_config = {}
feature = enums.LabelVideoRequest.Feature.FEATURE_UNSPECIFIED
response = client.label_video(parent, basic_config, feature)
exception = response.exception()
assert exception.errors[0] == error
def test_label_text(self):
# Setup Expected Response
name = "name3373707"
display_name = "displayName1615086568"
description = "description-1724546052"
example_count = 1517063674
completed_example_count = 612567290
expected_response = {
"name": name,
"display_name": display_name,
"description": description,
"example_count": example_count,
"completed_example_count": completed_example_count,
}
expected_response = dataset_pb2.AnnotatedDataset(**expected_response)
operation = longrunning_operations_pb2.Operation(
name="operations/test_label_text", done=True
)
operation.response.Pack(expected_response)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.dataset_path("[PROJECT]", "[DATASET]")
basic_config = {}
feature = enums.LabelTextRequest.Feature.FEATURE_UNSPECIFIED
response = client.label_text(parent, basic_config, feature)
result = response.result()
assert expected_response == result
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.LabelTextRequest(
parent=parent, basic_config=basic_config, feature=feature
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_label_text_exception(self):
# Setup Response
error = status_pb2.Status()
operation = longrunning_operations_pb2.Operation(
name="operations/test_label_text_exception", done=True
)
operation.error.CopyFrom(error)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.dataset_path("[PROJECT]", "[DATASET]")
basic_config = {}
feature = enums.LabelTextRequest.Feature.FEATURE_UNSPECIFIED
response = client.label_text(parent, basic_config, feature)
exception = response.exception()
assert exception.errors[0] == error
def test_get_example(self):
# Setup Expected Response
name_2 = "name2-1052831874"
expected_response = {"name": name_2}
expected_response = dataset_pb2.Example(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.example_path(
"[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]", "[EXAMPLE]"
)
response = client.get_example(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.GetExampleRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_example_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.example_path(
"[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]", "[EXAMPLE]"
)
with pytest.raises(CustomException):
client.get_example(name)
def test_list_examples(self):
# Setup Expected Response
next_page_token = ""
examples_element = {}
examples = [examples_element]
expected_response = {"next_page_token": next_page_token, "examples": examples}
expected_response = data_labeling_service_pb2.ListExamplesResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.annotated_dataset_path(
"[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]"
)
paged_list_response = client.list_examples(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.examples[0] == resources[0]
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.ListExamplesRequest(parent=parent)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_examples_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.annotated_dataset_path(
"[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]"
)
paged_list_response = client.list_examples(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_create_annotation_spec_set(self):
# Setup Expected Response
name = "name3373707"
display_name = "displayName1615086568"
description = "description-1724546052"
expected_response = {
"name": name,
"display_name": display_name,
"description": description,
}
expected_response = annotation_spec_set_pb2.AnnotationSpecSet(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
annotation_spec_set = {}
response = client.create_annotation_spec_set(parent, annotation_spec_set)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.CreateAnnotationSpecSetRequest(
parent=parent, annotation_spec_set=annotation_spec_set
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_annotation_spec_set_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
annotation_spec_set = {}
with pytest.raises(CustomException):
client.create_annotation_spec_set(parent, annotation_spec_set)
def test_get_annotation_spec_set(self):
# Setup Expected Response
name_2 = "name2-1052831874"
display_name = "displayName1615086568"
description = "description-1724546052"
expected_response = {
"name": name_2,
"display_name": display_name,
"description": description,
}
expected_response = annotation_spec_set_pb2.AnnotationSpecSet(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.annotation_spec_set_path("[PROJECT]", "[ANNOTATION_SPEC_SET]")
response = client.get_annotation_spec_set(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.GetAnnotationSpecSetRequest(
name=name
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_annotation_spec_set_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.annotation_spec_set_path("[PROJECT]", "[ANNOTATION_SPEC_SET]")
with pytest.raises(CustomException):
client.get_annotation_spec_set(name)
def test_list_annotation_spec_sets(self):
# Setup Expected Response
next_page_token = ""
annotation_spec_sets_element = {}
annotation_spec_sets = [annotation_spec_sets_element]
expected_response = {
"next_page_token": next_page_token,
"annotation_spec_sets": annotation_spec_sets,
}
expected_response = data_labeling_service_pb2.ListAnnotationSpecSetsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
paged_list_response = client.list_annotation_spec_sets(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.annotation_spec_sets[0] == resources[0]
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.ListAnnotationSpecSetsRequest(
parent=parent
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_annotation_spec_sets_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
paged_list_response = client.list_annotation_spec_sets(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_delete_annotation_spec_set(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.annotation_spec_set_path("[PROJECT]", "[ANNOTATION_SPEC_SET]")
client.delete_annotation_spec_set(name)
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.DeleteAnnotationSpecSetRequest(
name=name
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_annotation_spec_set_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.annotation_spec_set_path("[PROJECT]", "[ANNOTATION_SPEC_SET]")
with pytest.raises(CustomException):
client.delete_annotation_spec_set(name)
def test_create_instruction(self):
# Setup Expected Response
name = "name3373707"
display_name = "displayName1615086568"
description = "description-1724546052"
expected_response = {
"name": name,
"display_name": display_name,
"description": description,
}
expected_response = instruction_pb2.Instruction(**expected_response)
operation = longrunning_operations_pb2.Operation(
name="operations/test_create_instruction", done=True
)
operation.response.Pack(expected_response)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
instruction = {}
response = client.create_instruction(parent, instruction)
result = response.result()
assert expected_response == result
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.CreateInstructionRequest(
parent=parent, instruction=instruction
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_instruction_exception(self):
# Setup Response
error = status_pb2.Status()
operation = longrunning_operations_pb2.Operation(
name="operations/test_create_instruction_exception", done=True
)
operation.error.CopyFrom(error)
# Mock the API response
channel = ChannelStub(responses=[operation])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
instruction = {}
response = client.create_instruction(parent, instruction)
exception = response.exception()
assert exception.errors[0] == error
def test_get_instruction(self):
# Setup Expected Response
name_2 = "name2-1052831874"
display_name = "displayName1615086568"
description = "description-1724546052"
expected_response = {
"name": name_2,
"display_name": display_name,
"description": description,
}
expected_response = instruction_pb2.Instruction(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.instruction_path("[PROJECT]", "[INSTRUCTION]")
response = client.get_instruction(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.GetInstructionRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_instruction_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.instruction_path("[PROJECT]", "[INSTRUCTION]")
with pytest.raises(CustomException):
client.get_instruction(name)
def test_list_instructions(self):
# Setup Expected Response
next_page_token = ""
instructions_element = {}
instructions = [instructions_element]
expected_response = {
"next_page_token": next_page_token,
"instructions": instructions,
}
expected_response = data_labeling_service_pb2.ListInstructionsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
paged_list_response = client.list_instructions(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.instructions[0] == resources[0]
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.ListInstructionsRequest(
parent=parent
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_instructions_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
paged_list_response = client.list_instructions(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_delete_instruction(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.instruction_path("[PROJECT]", "[INSTRUCTION]")
client.delete_instruction(name)
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.DeleteInstructionRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_instruction_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.instruction_path("[PROJECT]", "[INSTRUCTION]")
with pytest.raises(CustomException):
client.delete_instruction(name)
def test_get_evaluation(self):
# Setup Expected Response
name_2 = "name2-1052831874"
evaluated_item_count = 358077111
expected_response = {
"name": name_2,
"evaluated_item_count": evaluated_item_count,
}
expected_response = evaluation_pb2.Evaluation(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.evaluation_path("[PROJECT]", "[DATASET]", "[EVALUATION]")
response = client.get_evaluation(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.GetEvaluationRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_evaluation_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.evaluation_path("[PROJECT]", "[DATASET]", "[EVALUATION]")
with pytest.raises(CustomException):
client.get_evaluation(name)
def test_search_evaluations(self):
# Setup Expected Response
next_page_token = ""
evaluations_element = {}
evaluations = [evaluations_element]
expected_response = {
"next_page_token": next_page_token,
"evaluations": evaluations,
}
expected_response = data_labeling_service_pb2.SearchEvaluationsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
filter_ = "filter-1274492040"
paged_list_response = client.search_evaluations(parent, filter_)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.evaluations[0] == resources[0]
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.SearchEvaluationsRequest(
parent=parent, filter=filter_
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_search_evaluations_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
filter_ = "filter-1274492040"
paged_list_response = client.search_evaluations(parent, filter_)
with pytest.raises(CustomException):
list(paged_list_response)
def test_search_example_comparisons(self):
# Setup Expected Response
next_page_token = ""
example_comparisons_element = {}
example_comparisons = [example_comparisons_element]
expected_response = {
"next_page_token": next_page_token,
"example_comparisons": example_comparisons,
}
expected_response = data_labeling_service_pb2.SearchExampleComparisonsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.evaluation_path("[PROJECT]", "[DATASET]", "[EVALUATION]")
paged_list_response = client.search_example_comparisons(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.example_comparisons[0] == resources[0]
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.SearchExampleComparisonsRequest(
parent=parent
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_search_example_comparisons_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.evaluation_path("[PROJECT]", "[DATASET]", "[EVALUATION]")
paged_list_response = client.search_example_comparisons(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_create_evaluation_job(self):
# Setup Expected Response
name = "name3373707"
description = "description-1724546052"
schedule = "schedule-697920873"
model_version = "modelVersion-1669102142"
annotation_spec_set = "annotationSpecSet1881405678"
label_missing_ground_truth = False
expected_response = {
"name": name,
"description": description,
"schedule": schedule,
"model_version": model_version,
"annotation_spec_set": annotation_spec_set,
"label_missing_ground_truth": label_missing_ground_truth,
}
expected_response = evaluation_job_pb2.EvaluationJob(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
job = {}
response = client.create_evaluation_job(parent, job)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.CreateEvaluationJobRequest(
parent=parent, job=job
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_evaluation_job_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
job = {}
with pytest.raises(CustomException):
client.create_evaluation_job(parent, job)
def test_update_evaluation_job(self):
# Setup Expected Response
name = "name3373707"
description = "description-1724546052"
schedule = "schedule-697920873"
model_version = "modelVersion-1669102142"
annotation_spec_set = "annotationSpecSet1881405678"
label_missing_ground_truth = False
expected_response = {
"name": name,
"description": description,
"schedule": schedule,
"model_version": model_version,
"annotation_spec_set": annotation_spec_set,
"label_missing_ground_truth": label_missing_ground_truth,
}
expected_response = evaluation_job_pb2.EvaluationJob(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
evaluation_job = {}
update_mask = {}
response = client.update_evaluation_job(evaluation_job, update_mask)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.UpdateEvaluationJobRequest(
evaluation_job=evaluation_job, update_mask=update_mask
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_update_evaluation_job_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
evaluation_job = {}
update_mask = {}
with pytest.raises(CustomException):
client.update_evaluation_job(evaluation_job, update_mask)
def test_get_evaluation_job(self):
# Setup Expected Response
name_2 = "name2-1052831874"
description = "description-1724546052"
schedule = "schedule-697920873"
model_version = "modelVersion-1669102142"
annotation_spec_set = "annotationSpecSet1881405678"
label_missing_ground_truth = False
expected_response = {
"name": name_2,
"description": description,
"schedule": schedule,
"model_version": model_version,
"annotation_spec_set": annotation_spec_set,
"label_missing_ground_truth": label_missing_ground_truth,
}
expected_response = evaluation_job_pb2.EvaluationJob(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]")
response = client.get_evaluation_job(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.GetEvaluationJobRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_evaluation_job_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]")
with pytest.raises(CustomException):
client.get_evaluation_job(name)
def test_pause_evaluation_job(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]")
client.pause_evaluation_job(name)
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.PauseEvaluationJobRequest(
name=name
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_pause_evaluation_job_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]")
with pytest.raises(CustomException):
client.pause_evaluation_job(name)
def test_resume_evaluation_job(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]")
client.resume_evaluation_job(name)
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.ResumeEvaluationJobRequest(
name=name
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_resume_evaluation_job_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]")
with pytest.raises(CustomException):
client.resume_evaluation_job(name)
def test_delete_evaluation_job(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]")
client.delete_evaluation_job(name)
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.DeleteEvaluationJobRequest(
name=name
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_evaluation_job_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]")
with pytest.raises(CustomException):
client.delete_evaluation_job(name)
def test_list_evaluation_jobs(self):
# Setup Expected Response
next_page_token = ""
evaluation_jobs_element = {}
evaluation_jobs = [evaluation_jobs_element]
expected_response = {
"next_page_token": next_page_token,
"evaluation_jobs": evaluation_jobs,
}
expected_response = data_labeling_service_pb2.ListEvaluationJobsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
filter_ = "filter-1274492040"
paged_list_response = client.list_evaluation_jobs(parent, filter_)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.evaluation_jobs[0] == resources[0]
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.ListEvaluationJobsRequest(
parent=parent, filter=filter_
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_evaluation_jobs_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
filter_ = "filter-1274492040"
paged_list_response = client.list_evaluation_jobs(parent, filter_)
with pytest.raises(CustomException):
list(paged_list_response)
def test_delete_annotated_dataset(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
client.delete_annotated_dataset()
assert len(channel.requests) == 1
expected_request = data_labeling_service_pb2.DeleteAnnotatedDatasetRequest()
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_annotated_dataset_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = datalabeling_v1beta1.DataLabelingServiceClient()
with pytest.raises(CustomException):
client.delete_annotated_dataset()
| 38.632042 | 88 | 0.670525 | 6,525 | 65,829 | 6.477854 | 0.042299 | 0.062743 | 0.022523 | 0.032176 | 0.91005 | 0.890177 | 0.867394 | 0.854524 | 0.838223 | 0.825045 | 0 | 0.022071 | 0.248401 | 65,829 | 1,703 | 89 | 38.654727 | 0.832225 | 0.052849 | 0 | 0.743942 | 0 | 0 | 0.108398 | 0.068748 | 0 | 0 | 0 | 0 | 0.088853 | 1 | 0.058158 | false | 0.000808 | 0.021002 | 0.000808 | 0.084006 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
917f161a21db244b7a54ccc6b0ff3de6e89bce5f | 6,766 | py | Python | smorfinder/main.py | bhattlab/DeepSmORFNet | b4958e82436a56feadd283b5d25b742a8df8de5b | [
"MIT"
] | 4 | 2020-08-05T20:42:32.000Z | 2021-04-14T07:59:52.000Z | smorfinder/main.py | bhattlab/DeepSmORFNet | b4958e82436a56feadd283b5d25b742a8df8de5b | [
"MIT"
] | 7 | 2020-08-14T10:34:11.000Z | 2022-03-13T16:02:25.000Z | smorfinder/main.py | bhattlab/DeepSmORFNet | b4958e82436a56feadd283b5d25b742a8df8de5b | [
"MIT"
] | 1 | 2021-01-12T17:51:08.000Z | 2021-01-12T17:51:08.000Z | import click
from smorfinder import *
from smorfinder.help import CustomHelp
from smorfinder.run import _run
@click.group(cls=CustomHelp)
def cli():
"""Command-line tool to predict and annotate small protein sequences in genomic sequencing data"""
pass
@cli.command(short_help='Run SmORFinder on a complete or draft genome assembly of a single species.', help_priority=1)
@click.argument('fasta', type=click.Path(exists=True))
@click.option('--outdir', '-o', default='smorf_output')
@click.option('--prodigal-path', '-pp', default=PRODIGAL_PATH, type=click.Path(exists=True))
@click.option('--dsn1-model-path', '-shp', default=DSN1_MODEL_PATH, type=click.Path(exists=True))
@click.option('--dsn2-model-path', '-shp', default=DSN2_MODEL_PATH, type=click.Path(exists=True))
@click.option('--smorf-hmm-path', '-shp', default=SMORFHMM_PATH, type=click.Path(exists=True))
@click.option('--hmmsearch-path', '-hp', default=HMMSEARCH_PATH, type=click.Path(exists=True))
@click.option('--force/--no-force', default=False, help="Force overwriting of output directory.")
@click.option('--dsn1-indiv-cutoff', '-idsn1', default=0.9999, help='Minimum cutoff necessary to keep prediction based on DSN1 significance cutoff alone. Between 0 and 1, default=0.9999')
@click.option('--dsn2-indiv-cutoff', '-idsn2', default=0.9999, help='Minimum cutoff necessary to keep prediction based on DSN2 significance cutoff alone. Between 0 and 1, default=0.9999')
@click.option('--phmm-indiv-cutoff', '-iphmm', default=1e-6, help='Minimum cutoff necessary to keep prediction based on pHMM significance cutoff alone. Between 0 and 1, default=1e-6')
@click.option('--dsn1-overlap-cutoff', '-odsn1', default=0.5, help='Minimum cutoff necessary to keep prediction based on DSN1 significance if both other models meet their respective cutoffs. Between 0 and 1, default=0.5')
@click.option('--dsn2-overlap-cutoff', '-odsn2', default=0.5, help='Minimum cutoff necessary to keep prediction based on DSN2 significance if both other models meet their respective cutoffs. Between 0 and 1, default=0.5')
@click.option('--phmm-overlap-cutoff', '-ophmm', default=1, help='Minimum cutoff necessary to keep prediction based on pHMM significance if both other models meet their respective cutoffs. Between 0 and 1, default=1')
def single(fasta, outdir, prodigal_path, dsn1_model_path, dsn2_model_path, smorf_hmm_path, hmmsearch_path, force, dsn1_indiv_cutoff, dsn2_indiv_cutoff, phmm_indiv_cutoff, dsn1_overlap_cutoff, dsn2_overlap_cutoff, phmm_overlap_cutoff):
"""A click access point for the run module. This is used for creating the command line interface."""
log_params(command='run', fasta=fasta, outdir=outdir, prodigal_path=prodigal_path, dsn1_model_path=dsn1_model_path,
dsn2_model_path=dsn2_model_path,
smorf_hmm_path=smorf_hmm_path, hmmsearch_path=hmmsearch_path, force=force, dsn1_indiv_cutoff=dsn1_indiv_cutoff, dsn2_indiv_cutoff=dsn2_indiv_cutoff, phmm_indiv_cutoff=phmm_indiv_cutoff, dsn1_overlap_cutoff=dsn1_overlap_cutoff, dsn2_overlap_cutoff=dsn2_overlap_cutoff, phmm_overlap_cutoff=phmm_overlap_cutoff)
_run(fasta, outdir, 1, prodigal_path, dsn1_model_path, dsn2_model_path, smorf_hmm_path, hmmsearch_path, force, dsn1_indiv_cutoff, dsn2_indiv_cutoff, phmm_indiv_cutoff, dsn1_overlap_cutoff, dsn2_overlap_cutoff, phmm_overlap_cutoff, mode='single')
@cli.command(short_help='Run SmORFinder on a metagenomic assembly.', help_priority=2)
@click.argument('fasta', type=click.Path(exists=True))
@click.option('--outdir', '-o', default='smorf_output')
@click.option('--threads', '-t', default=1)
@click.option('--prodigal-path', '-pp', default=PRODIGAL_PATH, type=click.Path(exists=True))
@click.option('--dsn1-model-path', '-shp', default=DSN1_MODEL_PATH, type=click.Path(exists=True))
@click.option('--dsn2-model-path', '-shp', default=DSN2_MODEL_PATH, type=click.Path(exists=True))
@click.option('--smorf-hmm-path', '-shp', default=SMORFHMM_PATH, type=click.Path(exists=True))
@click.option('--hmmsearch-path', '-hp', default=HMMSEARCH_PATH, type=click.Path(exists=True))
@click.option('--force/--no-force', default=False, help="Force overwriting of output directory.")
@click.option('--dsn1-indiv-cutoff', '-idsn1', default=0.9999, help='Minimum cutoff necessary to keep prediction based on DSN1 significance cutoff alone. Between 0 and 1, default=0.9999')
@click.option('--dsn2-indiv-cutoff', '-idsn2', default=0.9999, help='Minimum cutoff necessary to keep prediction based on DSN2 significance cutoff alone. Between 0 and 1, default=0.9999')
@click.option('--phmm-indiv-cutoff', '-iphmm', default=1e-6, help='Minimum cutoff necessary to keep prediction based on pHMM significance cutoff alone. Between 0 and 1, default=1e-6')
@click.option('--dsn1-overlap-cutoff', '-odsn1', default=0.5, help='Minimum cutoff necessary to keep prediction based on DSN1 significance if both other models meet their respective cutoffs. Between 0 and 1, default=0.5')
@click.option('--dsn2-overlap-cutoff', '-odsn2', default=0.5, help='Minimum cutoff necessary to keep prediction based on DSN2 significance if both other models meet their respective cutoffs. Between 0 and 1, default=0.5')
@click.option('--phmm-overlap-cutoff', '-ophmm', default=1, help='Minimum cutoff necessary to keep prediction based on pHMM significance if both other models meet their respective cutoffs. Between 0 and 1, default=1')
def meta(fasta, outdir, threads, prodigal_path, dsn1_model_path, dsn2_model_path, smorf_hmm_path, hmmsearch_path, force, dsn1_indiv_cutoff, dsn2_indiv_cutoff, phmm_indiv_cutoff, dsn1_overlap_cutoff, dsn2_overlap_cutoff, phmm_overlap_cutoff):
"""A click access point for the run module. This is used for creating the command line interface."""
log_params(command='run', fasta=fasta, outdir=outdir, threads=threads, prodigal_path=prodigal_path,
dsn1_model_path=dsn1_model_path, dsn2_model_path=dsn2_model_path,
smorf_hmm_path=smorf_hmm_path, hmmsearch_path=hmmsearch_path, force=force, dsn1_indiv_cutoff=dsn1_indiv_cutoff, dsn2_indiv_cutoff=dsn2_indiv_cutoff, phmm_indiv_cutoff=phmm_indiv_cutoff, dsn1_overlap_cutoff=dsn1_overlap_cutoff, dsn2_overlap_cutoff=dsn2_overlap_cutoff, phmm_overlap_cutoff=phmm_overlap_cutoff)
_run(fasta, outdir, threads, prodigal_path, dsn1_model_path, dsn2_model_path, smorf_hmm_path, hmmsearch_path, force, dsn1_indiv_cutoff, dsn2_indiv_cutoff, phmm_indiv_cutoff, dsn1_overlap_cutoff, dsn2_overlap_cutoff, phmm_overlap_cutoff, mode='meta')
def log_params(**kwargs):
click.echo("#### PARAMETERS ####")
click.echo('\n'.join(list(map(lambda x: ': '.join(list(map(str, x))), kwargs.items()))))
click.echo("####################")
if __name__ == '__main__':
cli()
| 99.5 | 323 | 0.770618 | 1,008 | 6,766 | 4.980159 | 0.126984 | 0.065737 | 0.031076 | 0.045418 | 0.906773 | 0.906773 | 0.906773 | 0.906773 | 0.892829 | 0.892829 | 0 | 0.028815 | 0.097251 | 6,766 | 67 | 324 | 100.985075 | 0.793058 | 0.041679 | 0 | 0.555556 | 0 | 0.222222 | 0.380952 | 0.019481 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0.018519 | 0.074074 | 0 | 0.148148 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
91932890fd8955eca6253bcda5584544418492e0 | 40,207 | py | Python | dataset/kinfacewdataset1.py | xiaopanchen/D2GFL | c5fd51920a20ab01ca51caa80bfb87635577b380 | [
"Apache-2.0"
] | null | null | null | dataset/kinfacewdataset1.py | xiaopanchen/D2GFL | c5fd51920a20ab01ca51caa80bfb87635577b380 | [
"Apache-2.0"
] | null | null | null | dataset/kinfacewdataset1.py | xiaopanchen/D2GFL | c5fd51920a20ab01ca51caa80bfb87635577b380 | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
from torch.autograd import Variable
from torch.utils.data import Dataset, DataLoader, sampler
import torchvision.transforms as transforms
import torch.nn.functional as F
from torch.optim import lr_scheduler
import numpy as np
from PIL import Image
# import pandas as pd
# from pandas.io.parsers import read_csv
from sklearn.utils import shuffle
from functools import wraps
from collections import OrderedDict
from sklearn.base import clone
from skimage import io # 基于python脚本语言开发的数字图片处理包,pip install scikit-image
from skimage.color import rgb2gray
import matplotlib.pyplot as plt
import random
import time
import sys
import scipy.io as sio
import os
from pathlib import Path
import torchvision
# 通过数据扩展增加batchsize
rgb2gray = transforms.Compose([transforms.ToPILImage(), transforms.Grayscale(3), transforms.ToTensor()])
hf = transforms.Compose([transforms.ToPILImage(), transforms.RandomHorizontalFlip(), transforms.ToTensor()])
# 将Numpy的ndarray或者Tensor转化成PILImage类型
vf = transforms.Compose([transforms.ToPILImage(), transforms.RandomVerticalFlip(), transforms.ToTensor()])
# 自定义DataSet
class KinFaceWDataSet1(Dataset):
# image_path = os.path.join(str(Path.home()), '../KinFaceW-II/images')
# meta_data_path = os.path.join(str(Path.home()), '../KinFaceW-II/meta_data/')
# image_path = '../KinFaceW-II/images' # 静态变量
# meta_data_path = '../KinFaceW-II/meta_data/'
rel_lookup = {'fd': 'father-dau', 'fs': 'father-son', 'md': 'mother-dau', 'ms': 'mother-son', 'all': 'all',
'allthree': 'allthree'}
def __init__(self, image_path, meta_data_path, u_idx, relation, transform=None, mode='train',
aug=False):
"""
构造函数,也可以理解为是初始化函数,
:param relation:
:param transform:
:param test:
:param fold: 用来测试的那个fold
:param aug: 是否数据增强
"""
# 读取.mat文件
# pairs: 1 1 'fd_024_1.jpg' 'fd_024_2.jpg' 表示 fold,1/0, parent image, child image
self.meta_data_path = meta_data_path # 实例变量
self.image_path = image_path
self.relation = relation
if self.relation != 'allthree':
self.meta_data = sio.loadmat(os.path.join(meta_data_path, relation + '_pairs.mat'))
else:
strlist = meta_data_path.split('/')
data_name = strlist[2]
if data_name == 'KinFaceW-I' or data_name == 'KinFaceW-II':
# self.meta_data = sio.loadmat(
# os.path.join(meta_data_path, data_name + '_allthree_' + relation + '_pairs.mat'))
self.meta_data = sio.loadmat(
os.path.join(meta_data_path, 'KinFaceW_TSKinFace_all_pairs.mat'))
else: # 暂未处理
pass
self.relation = relation
self.transform = transform
self.mode = mode
self.image_size = 64
self.aug = aug
self.len = len(u_idx)
self.tvtid = u_idx
def __len__(self):
return self.len
def __getitem__(self, i):
"""
根据索引获取数据的方法,在使用迭代器不断地获取变量的时候,就会用到这个方法。其中的return函数后面的返回值,可以自定义返回值的数量
:param i:
:return:
"""
# assert(i < len(self))
# if self.test:
# i += self.trainlen
# print(self.imagesname1[i])
# print(self.imagesname1[i][:2])
strlist = self.meta_data_path.split('/')
self.data_name = strlist[2]
if self.relation != 'allthree':
if self.data_name == 'UBKinFace':
s1 = self.meta_data['pairs'][self.tvtid[i], 2][0]
s2 = self.meta_data['pairs'][self.tvtid[i], 3][0]
s1 = s1.replace('_new', '')
s2 = s2.replace('_new', '')
if self.relation == 'set1':
# old parent and child
image_file1 = os.path.join(self.image_path, '03' + '/' + s1)
image_file2 = os.path.join(self.image_path, '01' + '/' + s2)
elif self.relation == 'set2':
# young paren and child
image_file1 = os.path.join(self.image_path, '02' + '/' + s1)
image_file2 = os.path.join(self.image_path, '01' + '/' + s1)
else:
if self.data_name == 'TSKinFace' or self.data_name == 'CornellKinFace':
folder = KinFaceWDataSet1.rel_lookup[self.relation]
else:
folder = KinFaceWDataSet1.rel_lookup[self.meta_data['pairs'][self.tvtid[i], 2][0][:2]]
image_file1 = os.path.join(self.image_path,
folder + '/' + self.meta_data['pairs'][self.tvtid[i], 2][0])
image_file2 = os.path.join(self.image_path,
folder + '/' + self.meta_data['pairs'][self.tvtid[i], 3][0])
else:
temprelation = self.meta_data['pairs'][self.tvtid[i], 4][0]
folder = KinFaceWDataSet1.rel_lookup[temprelation]
temp_data_name = self.meta_data['pairs'][self.tvtid[i], 5][0]
imagepath0 = '../data/' + temp_data_name + '/images/' + folder
image_file1 = os.path.join(imagepath0 + '/' + self.meta_data['pairs'][self.tvtid[i], 2][0])
image_file2 = os.path.join(imagepath0 + '/' + self.meta_data['pairs'][self.tvtid[i], 3][0])
# image1 = io.imread(image_file1).astype(np.float32)
# image2 = io.imread(image_file2).astype(np.float32)
image1 = io.imread(image_file1)
image2 = io.imread(image_file2)
gray1 = rgb2gray(image1)
gray2 = rgb2gray(image2)
image1 = Image.fromarray(image1)
image2 = Image.fromarray(image2)
# image1 = image1.transpose(2, 0, 1) #/ 255
# image2 = image2.transpose(2, 0, 1) #/ 255
if self.transform:
image1 = self.transform(image1) # 原始
# image1 = self.transform(Image.fromarray(image1)) #
# image2 = self.transform(Image.fromarray(image2)) #
# image1 = torch.from_numpy(image1.copy())
image2 = self.transform(image2) # 原始
# image2 = torch.from_numpy(image2.copy())
else:
image1 = torch.from_numpy(image1)
image2 = torch.from_numpy(image2)
# print(image1.shape)
if self.mode == 'test':
if self.aug:
vf1 = vf(image1)
vf2 = vf(image2)
hf1 = hf(image1)
hf2 = hf(image2)
pair_normal1 = image1.view(-1, 3, self.image_size, self.image_size) # 按维数0(行)拼接
pair_normal2 = image2.view(-1, 3, self.image_size, self.image_size)
# pair_gray1 = gray1.view(-1, 3, self.image_size, self.image_size)
# pair_gray2 = gray2.view(-1, 3, self.image_size, self.image_size)
pair_hf1 = hf1.view(-1, 3, self.image_size, self.image_size)
pair_hf2 = hf2.view(-1, 3, self.image_size, self.image_size)
pair_vf1 = vf1.view(-1, 3, self.image_size, self.image_size)
pair_vf2 = vf2.view(-1, 3, self.image_size, self.image_size)
# pair = pair.view(-1,6,self.image_size,self.image_size)
# print(pair.shape)
pair1 = torch.cat((pair_normal1, pair_hf1, pair_vf1), dim=0)
pair2 = torch.cat((pair_normal2, pair_hf2, pair_vf2), dim=0)
# print(pair.shape)
label = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 1]), dtype=int).tolist())
if self.data_name == 'KinFaceW-I' or self.data_name == 'KinFaceW-II':
label1 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1]) - 1,
dtype=int).tolist())
label2 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1]) - 1,
dtype=int).tolist())
elif self.data_name == 'CornellKinFace':
label1 = torch.LongTensor(
[int(int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1].split('.')[0])) - 1])
label2 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1].split('.')[0]) - 1])
elif self.data_name == 'TSKinFace':
label1 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('-')[1]) - 1])
label2 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('-')[1]) - 1])
else:
label1 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1,
dtype=int).tolist())
label2 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1,
dtype=int).tolist())
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
else:
pair1 = image1
pair2 = image2
label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 1])])
if self.data_name == 'KinFaceW-I' or self.data_name == 'KinFaceW-II':
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1]) - 1])
elif self.data_name == 'CornellKinFace':
label1 = torch.LongTensor(
[int(int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1].split('.')[0])) - 1])
label2 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1].split('.')[0]) - 1])
elif self.data_name == 'TSKinFace':
label1 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('-')[1]) - 1])
label2 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('-')[1]) - 1])
else:
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1])
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
else:
if self.aug:
vf1 = vf(image1)
vf2 = vf(image2)
hf1 = hf(image1)
hf2 = hf(image2)
pair_normal1 = image1.view(-1, 3, self.image_size, self.image_size) # 按维数0(行)拼接
pair_normal2 = image2.view(-1, 3, self.image_size, self.image_size)
# pair_gray1 = gray1.view(-1, 3, self.image_size, self.image_size)
# pair_gray2 = gray2.view(-1, 3, self.image_size, self.image_size)
pair_hf1 = hf1.view(-1, 3, self.image_size, self.image_size)
pair_hf2 = hf2.view(-1, 3, self.image_size, self.image_size)
pair_vf1 = vf1.view(-1, 3, self.image_size, self.image_size)
pair_vf2 = vf2.view(-1, 3, self.image_size, self.image_size)
# pair = pair.view(-1,6,self.image_size,self.image_size)
# print(pair.shape)
pair1 = torch.cat((pair_normal1, pair_hf1, pair_vf1), dim=0)
pair2 = torch.cat((pair_normal2, pair_hf2, pair_vf2), dim=0)
# print(pair.shape)
label = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 1]),
dtype=int).tolist())
if self.data_name == 'KinFaceW-I' or self.data_name == 'KinFaceW-II':
label1 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1]) - 1,
dtype=int).tolist())
label2 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1]) - 1,
dtype=int).tolist())
elif self.data_name == 'CornellKinFace':
label1 = torch.LongTensor(
[int(int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1].split('.')[0])) - 1])
label2 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1].split('.')[0]) - 1])
elif self.data_name == 'TSKinFace':
label1 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('-')[1]) - 1])
label2 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('-')[1]) - 1])
else:
label1 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1,
dtype=int).tolist())
label2 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1,
dtype=int).tolist())
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
else:
pair1 = image1
pair2 = image2
label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 1])])
# image1_label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1])-1])
# image2_label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1])-1])
# image1_label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0])-1])
# image2_label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0])-1])
if self.data_name == 'KinFaceW-I' or self.data_name == 'KinFaceW-II':
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1]) - 1])
elif self.data_name=='CornellKinFace':
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1].split('.')[0]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1].split('.')[0]) - 1])
elif self.data_name == 'TSKinFace':
label1 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('-')[1]) - 1])
label2 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('-')[1]) - 1])
else:
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1])
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
return sample
class KinFaceWDataSet1new(Dataset):
# image_path = os.path.join(str(Path.home()), '../KinFaceW-II/images')
# meta_data_path = os.path.join(str(Path.home()), '../KinFaceW-II/meta_data/')
# image_path = '../KinFaceW-II/images' # 静态变量
# meta_data_path = '../KinFaceW-II/meta_data/'
rel_lookup = {'fd': 'father-dau', 'fs': 'father-son', 'md': 'mother-dau', 'ms': 'mother-son', 'all': 'all',
'allthree': 'allthree'}
def __init__(self, image_path, meta_data_path, u_idx, relation, transform=None, mode='train',
aug=False):
"""
构造函数,也可以理解为是初始化函数,
:param relation:
:param transform:
:param test:
:param fold: 用来测试的那个fold
:param aug: 是否数据增强
"""
# 读取.mat文件
# pairs: 1 1 'fd_024_1.jpg' 'fd_024_2.jpg' 表示 fold,1/0, parent image, child image
self.meta_data_path = meta_data_path # 实例变量
self.image_path = image_path
self.relation = relation
if self.relation != 'allthree':
self.meta_data = sio.loadmat(os.path.join(meta_data_path, relation + '_pairs.mat'))
else:
strlist = meta_data_path.split('/')
data_name = strlist[2]
if data_name == 'KinFaceW-I' or data_name == 'KinFaceW-II':
# self.meta_data = sio.loadmat(
# os.path.join(meta_data_path, data_name + '_allthree_' + relation + '_pairs.mat'))
self.meta_data = sio.loadmat(
os.path.join(meta_data_path, 'KinFaceW_TSKinFace_all_pairs.mat'))
else: # 暂未处理
pass
self.relation = relation
self.transform = transform
self.mode = mode
self.image_size = 64
self.aug = aug
self.len = len(u_idx)
self.tvtid = u_idx
def __len__(self):
return self.len
def __getitem__(self, i):
"""
根据索引获取数据的方法,在使用迭代器不断地获取变量的时候,就会用到这个方法。其中的return函数后面的返回值,可以自定义返回值的数量
:param i:
:return:
"""
# assert(i < len(self))
# if self.test:
# i += self.trainlen
# print(self.imagesname1[i])
# print(self.imagesname1[i][:2])
strlist = self.meta_data_path.split('/')
self.data_name = strlist[2]
if self.relation != 'allthree':
if self.data_name == 'UBKinFace':
s1 = self.meta_data['pairs'][self.tvtid[i], 2][0]
s2 = self.meta_data['pairs'][self.tvtid[i], 3][0]
s1 = s1.replace('_new', '')
s2 = s2.replace('_new', '')
if self.relation == 'set1':
# old parent and child
image_file1 = os.path.join(self.image_path, '03' + '/' + s1)
image_file2 = os.path.join(self.image_path, '01' + '/' + s2)
elif self.relation == 'set2':
# young paren and child
image_file1 = os.path.join(self.image_path, '02' + '/' + s1)
image_file2 = os.path.join(self.image_path, '01' + '/' + s1)
else:
if self.data_name == 'TSKinFace' or self.data_name == 'CornellKinFace':
folder = KinFaceWDataSet1new.rel_lookup[self.relation]
else:
folder = KinFaceWDataSet1new.rel_lookup[self.meta_data['pairs'][self.tvtid[i], 2][0][:2]]
image_file1 = os.path.join(self.image_path,
folder + '/' + self.meta_data['pairs'][self.tvtid[i], 2][0])
image_file2 = os.path.join(self.image_path,
folder + '/' + self.meta_data['pairs'][self.tvtid[i], 3][0])
else:
temprelation = self.meta_data['pairs'][self.tvtid[i], 4][0]
folder = KinFaceWDataSet1new.rel_lookup[temprelation]
temp_data_name = self.meta_data['pairs'][self.tvtid[i], 5][0]
imagepath0 = '../data/' + temp_data_name + '/images/' + folder
image_file1 = os.path.join(imagepath0 + '/' + self.meta_data['pairs'][self.tvtid[i], 2][0])
image_file2 = os.path.join(imagepath0 + '/' + self.meta_data['pairs'][self.tvtid[i], 3][0])
# image1 = io.imread(image_file1).astype(np.float32)
# image2 = io.imread(image_file2).astype(np.float32)
image1 = io.imread(image_file1)
image2 = io.imread(image_file2)
gray1 = rgb2gray(image1)
gray2 = rgb2gray(image2)
image1 = Image.fromarray(image1)
image2 = Image.fromarray(image2)
# image1 = image1.transpose(2, 0, 1) #/ 255
# image2 = image2.transpose(2, 0, 1) #/ 255
if self.transform:
image1 = self.transform(image1) # 原始
# image1 = self.transform(Image.fromarray(image1)) #
# image2 = self.transform(Image.fromarray(image2)) #
# image1 = torch.from_numpy(image1.copy())
image2 = self.transform(image2) # 原始
# image2 = torch.from_numpy(image2.copy())
else:
image1 = torch.from_numpy(image1)
image2 = torch.from_numpy(image2)
# print(image1.shape)
if self.mode == 'test':
if self.aug:
vf1 = vf(image1)
vf2 = vf(image2)
hf1 = hf(image1)
hf2 = hf(image2)
pair_normal1 = image1.view(-1, 3, self.image_size, self.image_size) # 按维数0(行)拼接
pair_normal2 = image2.view(-1, 3, self.image_size, self.image_size)
# pair_gray1 = gray1.view(-1, 3, self.image_size, self.image_size)
# pair_gray2 = gray2.view(-1, 3, self.image_size, self.image_size)
pair_hf1 = hf1.view(-1, 3, self.image_size, self.image_size)
pair_hf2 = hf2.view(-1, 3, self.image_size, self.image_size)
pair_vf1 = vf1.view(-1, 3, self.image_size, self.image_size)
pair_vf2 = vf2.view(-1, 3, self.image_size, self.image_size)
# pair = pair.view(-1,6,self.image_size,self.image_size)
# print(pair.shape)
pair1 = torch.cat((pair_normal1, pair_hf1, pair_vf1), dim=0)
pair2 = torch.cat((pair_normal2, pair_hf2, pair_vf2), dim=0)
# print(pair.shape)
label = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 1]), dtype=int).tolist())
if self.data_name == 'KinFaceW-I' or self.data_name == 'KinFaceW-II':
label1 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1]) - 1,
dtype=int).tolist())
label2 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1]) - 1,
dtype=int).tolist())
elif self.data_name == 'CornellKinFace':
label1 = torch.LongTensor(
[int(int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1].split('.')[0])) - 1])
label2 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1].split('.')[0]) - 1])
else:
label1 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1,
dtype=int).tolist())
label2 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1,
dtype=int).tolist())
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
else:
pair1 = image1
pair2 = image2
label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 1])])
if self.data_name == 'KinFaceW-I' or self.data_name == 'KinFaceW-II':
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1]) - 1])
elif self.data_name == 'CornellKinFace':
label1 = torch.LongTensor(
[int(int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1].split('.')[0])) - 1])
label2 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1].split('.')[0]) - 1])
else:
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1])
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
else:
if self.aug:
vf1 = vf(image1)
vf2 = vf(image2)
hf1 = hf(image1)
hf2 = hf(image2)
pair_normal1 = image1.view(-1, 3, self.image_size, self.image_size) # 按维数0(行)拼接
pair_normal2 = image2.view(-1, 3, self.image_size, self.image_size)
# pair_gray1 = gray1.view(-1, 3, self.image_size, self.image_size)
# pair_gray2 = gray2.view(-1, 3, self.image_size, self.image_size)
pair_hf1 = hf1.view(-1, 3, self.image_size, self.image_size)
pair_hf2 = hf2.view(-1, 3, self.image_size, self.image_size)
pair_vf1 = vf1.view(-1, 3, self.image_size, self.image_size)
pair_vf2 = vf2.view(-1, 3, self.image_size, self.image_size)
# pair = pair.view(-1,6,self.image_size,self.image_size)
# print(pair.shape)
pair1 = torch.cat((pair_normal1, pair_hf1, pair_vf1), dim=0)
pair2 = torch.cat((pair_normal2, pair_hf2, pair_vf2), dim=0)
# print(pair.shape)
label = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 1]),
dtype=int).tolist())
if self.data_name == 'KinFaceW-I' or self.data_name == 'KinFaceW-II':
label1 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1]) - 1,
dtype=int).tolist())
label2 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1]) - 1,
dtype=int).tolist())
elif self.data_name == 'CornellKinFace':
label1 = torch.LongTensor(
[int(int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1].split('.')[0])) - 1])
label2 = torch.LongTensor(
[int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1].split('.')[0]) - 1])
else:
label1 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1,
dtype=int).tolist())
label2 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1,
dtype=int).tolist())
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
else:
pair1 = image1
pair2 = image2
label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 1])])
# image1_label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1])-1])
# image2_label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1])-1])
# image1_label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0])-1])
# image2_label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0])-1])
if self.data_name == 'KinFaceW-I' or self.data_name == 'KinFaceW-II':
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1]) - 1])
elif self.data_name=='CornellKinFace':
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[1].split('.')[0]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[1].split('.')[0]) - 1])
else:
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1])
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
return sample
class KinFaceWDataSetT(Dataset):
# image_path = os.path.join(str(Path.home()), '../KinFaceW-II/images')
# meta_data_path = os.path.join(str(Path.home()), '../KinFaceW-II/meta_data/')
# image_path = '../KinFaceW-II/images' # 静态变量
# meta_data_path = '../KinFaceW-II/meta_data/'
rel_lookup = {'fd': 'father-dau', 'fs': 'father-son', 'md': 'mother-dau', 'ms': 'mother-son', 'all': 'all',
'allthree': 'allthree'}
def __init__(self, image_path, meta_data_path, u_idx, relation, transform=None, mode='train',
aug=False):
"""
构造函数,也可以理解为是初始化函数,
:param relation:
:param transform:
:param test:
:param fold: 用来测试的那个fold
:param aug: 是否数据增强
"""
# 读取.mat文件
# pairs: 1 1 'fd_024_1.jpg' 'fd_024_2.jpg' 表示 fold,1/0, parent image, child image
self.meta_data_path = meta_data_path # 实例变量
self.image_path = image_path
self.relation = relation
if self.relation != 'allthree':
self.meta_data = sio.loadmat(os.path.join(meta_data_path, relation + '_pairs.mat'))
else:
strlist = meta_data_path.split('/')
data_name = strlist[2]
if data_name == 'KinFaceW-I' or data_name == 'KinFaceW-II':
# self.meta_data = sio.loadmat(
# os.path.join(meta_data_path, data_name + '_allthree_' + relation + '_pairs.mat'))
self.meta_data = sio.loadmat(
os.path.join(meta_data_path, 'KinFaceW_TSKinFace_all_pairs.mat'))
else: # 暂未处理
pass
self.relation = relation
self.transform = transform
self.mode = mode
self.image_size = 64
self.aug = aug
self.len = len(u_idx)
self.tvtid = u_idx
def __len__(self):
return self.len
def __getitem__(self, i):
"""
根据索引获取数据的方法,在使用迭代器不断地获取变量的时候,就会用到这个方法。其中的return函数后面的返回值,可以自定义返回值的数量
:param i:
:return:
"""
# assert(i < len(self))
# if self.test:
# i += self.trainlen
# print(self.imagesname1[i])
# print(self.imagesname1[i][:2])
strlist = self.meta_data_path.split('/')
data_name = strlist[2]
if self.relation != 'allthree':
if data_name == 'UBKinFace':
s1 = self.meta_data['pairs'][self.tvtid[i], 2][0]
s2 = self.meta_data['pairs'][self.tvtid[i], 3][0]
s1 = s1.replace('_new', '')
s2 = s2.replace('_new', '')
if self.relation == 'set1':
image_file1 = os.path.join(self.image_path, '03' + '/' + s1)
image_file2 = os.path.join(self.image_path, '01' + '/' + s2)
elif self.relation == 'set2':
image_file1 = os.path.join(self.image_path, '02' + '/' + s1)
image_file2 = os.path.join(self.image_path, '01' + '/' + s1)
else:
if data_name == 'TSKinFace' or data_name == 'CornellKinFace':
folder = KinFaceWDataSetT.rel_lookup[self.relation]
else:
folder = KinFaceWDataSetT.rel_lookup[self.meta_data['pairs'][self.tvtid[i], 2][0][:2]]
image_file1 = os.path.join(self.image_path,
folder + '/' + self.meta_data['pairs'][self.tvtid[i], 2][0])
image_file2 = os.path.join(self.image_path,
folder + '/' + self.meta_data['pairs'][self.tvtid[i], 3][0])
else:
temprelation = self.meta_data['pairs'][self.tvtid[i], 4][0]
folder = KinFaceWDataSet1.rel_lookup[temprelation]
temp_data_name = self.meta_data['pairs'][self.tvtid[i], 5][0]
imagepath0 = '../data/' + temp_data_name + '/images/' + folder
image_file1 = os.path.join(imagepath0 + '/' + self.meta_data['pairs'][self.tvtid[i], 2][0])
image_file2 = os.path.join(imagepath0 + '/' + self.meta_data['pairs'][self.tvtid[i], 3][0])
# print(self.meta_data['pairs'][self.tvtid[i], 2],'-',self.meta_data['pairs'][self.tvtid[i], 3])
# image1 = io.imread(image_file1).astype(np.float32)
# image2 = io.imread(image_file2).astype(np.float32)
image1 = io.imread(image_file1)
image2 = io.imread(image_file2)
gray1 = rgb2gray(image1)
gray2 = rgb2gray(image2)
image1 = Image.fromarray(image1)
image2 = Image.fromarray(image2)
# image1 = image1.transpose(2, 0, 1) #/ 255
# image2 = image2.transpose(2, 0, 1) #/ 255
if self.transform:
image1 = self.transform(image1) # 原始
# image1 = self.transform(Image.fromarray(image1)) #
# image2 = self.transform(Image.fromarray(image2)) #
# image1 = torch.from_numpy(image1.copy())
image2 = self.transform(image2) # 原始
# image2 = torch.from_numpy(image2.copy())
else:
image1 = torch.from_numpy(image1)
image2 = torch.from_numpy(image2)
# print(image1.shape)
if self.mode == 'test':
if self.aug:
vf1 = vf(image1)
vf2 = vf(image2)
hf1 = hf(image1)
hf2 = hf(image2)
pair_normal1 = image1.view(-1, 3, self.image_size, self.image_size) # 按维数0(行)拼接
pair_normal2 = image2.view(-1, 3, self.image_size, self.image_size)
# pair_gray1 = gray1.view(-1, 3, self.image_size, self.image_size)
# pair_gray2 = gray2.view(-1, 3, self.image_size, self.image_size)
pair_hf1 = hf1.view(-1, 3, self.image_size, self.image_size)
pair_hf2 = hf2.view(-1, 3, self.image_size, self.image_size)
pair_vf1 = vf1.view(-1, 3, self.image_size, self.image_size)
pair_vf2 = vf2.view(-1, 3, self.image_size, self.image_size)
# pair = pair.view(-1,6,self.image_size,self.image_size)
# print(pair.shape)
pair1 = torch.cat((pair_normal1, pair_hf1, pair_vf1), dim=0)
pair2 = torch.cat((pair_normal2, pair_hf2, pair_vf2), dim=0)
# print(pair.shape)
label = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 1]), dtype=int).tolist())
label1 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1,
dtype=int).tolist())
label2 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1,
dtype=int).tolist())
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
else:
pair1 = image1
pair2 = image2
label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 1])])
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1])
# print(int(self.meta_data['pairs'][self.tvtid[i], 4]) - 1,'-',int(self.meta_data['pairs'][self.tvtid[i], 5]) - 1)
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
else:
if self.aug:
vf1 = vf(image1)
vf2 = vf(image2)
hf1 = hf(image1)
hf2 = hf(image2)
pair_normal1 = image1.view(-1, 3, self.image_size, self.image_size) # 按维数0(行)拼接
pair_normal2 = image2.view(-1, 3, self.image_size, self.image_size)
# pair_gray1 = gray1.view(-1, 3, self.image_size, self.image_size)
# pair_gray2 = gray2.view(-1, 3, self.image_size, self.image_size)
pair_hf1 = hf1.view(-1, 3, self.image_size, self.image_size)
pair_hf2 = hf2.view(-1, 3, self.image_size, self.image_size)
pair_vf1 = vf1.view(-1, 3, self.image_size, self.image_size)
pair_vf2 = vf2.view(-1, 3, self.image_size, self.image_size)
# pair = pair.view(-1,6,self.image_size,self.image_size)
# print(pair.shape)
pair1 = torch.cat((pair_normal1, pair_hf1, pair_vf1), dim=0)
pair2 = torch.cat((pair_normal2, pair_hf2, pair_vf2), dim=0)
# print(pair.shape)
label = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 1]),
dtype=int).tolist())
label1 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1,
dtype=int).tolist())
label2 = torch.LongTensor(
np.full((3), int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1,
dtype=int).tolist())
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
else:
pair1 = image1
pair2 = image2
label = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 1])])
label1 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 2][0].split('_')[0]) - 1])
label2 = torch.LongTensor([int(self.meta_data['pairs'][self.tvtid[i], 3][0].split('_')[0]) - 1])
sample = {'pair1': pair1, 'pair2': pair2, 'label': label, 'label1': label1, 'label2': label2}
return sample
| 56.076709 | 131 | 0.513045 | 4,744 | 40,207 | 4.200675 | 0.044477 | 0.064231 | 0.078282 | 0.098103 | 0.954235 | 0.94174 | 0.937073 | 0.937073 | 0.931152 | 0.929145 | 0 | 0.045997 | 0.328425 | 40,207 | 716 | 132 | 56.155028 | 0.692023 | 0.152635 | 0 | 0.927757 | 0 | 0 | 0.059424 | 0.002918 | 0 | 0 | 0 | 0 | 0 | 1 | 0.01711 | false | 0.005703 | 0.043726 | 0.005703 | 0.08365 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
91bda4874915e0f4e199ea433b5a160eb2091dcb | 8,805 | py | Python | datalad_osf/osfclient/osfclient/tests/test_fetching.py | adswa/datalad-osf-2 | 25988f898ffc6f489c0855933136f39f79cf8c65 | [
"BSD-3-Clause"
] | 80 | 2017-10-24T05:06:00.000Z | 2022-02-20T18:44:17.000Z | datalad_osf/osfclient/osfclient/tests/test_fetching.py | adswa/datalad-osf-2 | 25988f898ffc6f489c0855933136f39f79cf8c65 | [
"BSD-3-Clause"
] | 133 | 2017-03-07T22:36:54.000Z | 2017-10-24T04:43:57.000Z | datalad_osf/osfclient/osfclient/tests/test_fetching.py | adswa/datalad-osf-2 | 25988f898ffc6f489c0855933136f39f79cf8c65 | [
"BSD-3-Clause"
] | 37 | 2017-10-26T03:34:17.000Z | 2021-11-17T05:24:10.000Z | """Test `osf fetch` command."""
import pytest
import mock
from mock import call, patch, mock_open
from osfclient import OSF
from osfclient.cli import fetch
from osfclient.models import Storage
from osfclient.tests.mocks import MockProject
from osfclient.tests.mocks import MockArgs
@patch('osfclient.cli.makedirs')
@patch('osfclient.cli.os.path.exists', return_value=False)
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_fetch_file(OSF_project, os_path_exists, os_makedirs):
# check that `osf fetch` opens the right files with the right name and mode
args = MockArgs(project='1234', remote='osfstorage/a/a/a')
mock_open_func = mock_open()
with patch('osfclient.cli.open', mock_open_func):
fetch(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been accessed
store = OSF_project.return_value.storages[0]
assert store._name_mock.return_value == 'osfstorage'
# should create a file in the same directory when no local
# filename is specified
assert mock.call('a', 'wb') in mock_open_func.mock_calls
@patch('osfclient.cli.makedirs')
@patch('osfclient.cli.os.path.exists', return_value=False)
@patch('osfclient.cli.OSF.project', return_value=MockProject('1234'))
def test_fetch_file_local_name_specified(OSF_project, os_path_exists,
os_makedirs):
# check that `osf fetch` opens the right files with the right name
# and mode when specifying a local filename
args = MockArgs(project='1234', remote='osfstorage/a/a/a',
local='foobar.txt')
mock_open_func = mock_open()
with patch('osfclient.cli.open', mock_open_func):
fetch(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been accessed
project = OSF_project.return_value
store = project._storage_mock.return_value
assert store._name_mock.return_value == 'osfstorage'
expected = [call._path_mock(), call.write_to(mock_open_func())]
assert expected == store.files[0].mock_calls
# second file should not have been looked at
assert not store.files[1].mock_calls
# should create a file in the same directory when no local
# filename is specified
assert mock.call('foobar.txt', 'wb') in mock_open_func.mock_calls
assert not os_makedirs.called
@patch('osfclient.cli.makedirs')
@patch('osfclient.cli.os.path.exists', return_value=False)
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_fetch_file_local_dir_specified(OSF_project, os_path_exists,
os_makedirs):
# check that `osf fetch` opens the right files with the right name
# and mode when specifying a local filename
args = MockArgs(project='1234', remote='osfstorage/a/a/a',
local='subdir/foobar.txt')
mock_open_func = mock_open()
with patch('osfclient.cli.open', mock_open_func):
fetch(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been accessed
store = OSF_project.return_value.storages[0]
assert store._name_mock.return_value == 'osfstorage'
assert (mock.call('subdir/foobar.txt', 'wb') in
mock_open_func.mock_calls)
assert mock.call('subdir', exist_ok=True) in os_makedirs.mock_calls
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_fetch_local_file_exists(OSF_project):
# check that `osf fetch` opens the right files with the right name
# and mode when specifying a local filename
args = MockArgs(project='1234', remote='osfstorage/a/a/a',
local='subdir/foobar.txt')
def exists(path):
if path == ".osfcli.config":
return False
else:
return True
with patch('osfclient.cli.os.path.exists', side_effect=exists):
with pytest.raises(SystemExit) as e:
fetch(args)
assert 'already exists, not overwriting' in e.value.args[0]
@patch('osfclient.cli.makedirs')
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_fetch_local_file_exists_force(OSF_project, os_makedirs):
# check that `osf fetch --force` overwrites the local files if it exists
args = MockArgs(project='1234', remote='osfstorage/a/a/a', force=True)
def exists(path):
if path == ".osfcli.config":
return False
else:
return True
mock_open_func = mock_open()
with patch('osfclient.cli.open', mock_open_func):
with patch('osfclient.cli.os.path.exists', side_effect=exists):
fetch(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been accessed
store = OSF_project.return_value.storages[0]
assert store._name_mock.return_value == 'osfstorage'
# should create a file in the same directory when no local
# filename is specified
assert mock.call('a', 'wb') in mock_open_func.mock_calls
@patch('osfclient.cli.makedirs')
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_fetch_local_file_exists_update_files_differ(OSF_project, os_makedirs):
# check that `osf fetch --update` overwrites an existing local file if it
# differs from the remote
args = MockArgs(project='1234', remote='osfstorage/a/a/a', update=True)
def exists(path):
if path == ".osfcli.config":
return False
else:
return True
def simple_checksum(file_path):
return '1' * 32
mock_open_func = mock_open()
with patch('osfclient.cli.open', mock_open_func):
with patch('osfclient.cli.os.path.exists', side_effect=exists):
with patch('osfclient.cli.checksum', side_effect=simple_checksum):
fetch(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been accessed
store = OSF_project.return_value.storages[0]
assert store._name_mock.return_value == 'osfstorage'
# should create a file in the same directory when no local
# filename is specified
assert mock.call('a', 'wb') in mock_open_func.mock_calls
@patch('osfclient.cli.makedirs')
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_fetch_local_file_exists_update_files_match(OSF_project, os_makedirs):
# check that `osf fetch --update` does not overwrite local file if it
# matches the remote
args = MockArgs(project='1234', remote='osfstorage/a/a/a', update=True)
def exists(path):
if path == ".osfcli.config":
return False
else:
return True
def simple_checksum(file_path):
return '0' * 32
mock_open_func = mock_open()
with patch('osfclient.cli.open', mock_open_func):
with patch('osfclient.cli.os.path.exists', side_effect=exists):
with patch('osfclient.cli.checksum', side_effect=simple_checksum):
fetch(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been accessed
store = OSF_project.return_value.storages[0]
assert store._name_mock.return_value == 'osfstorage'
# should create a file in the same directory when no local
# filename is specified
assert mock.call('a', 'wb') not in mock_open_func.mock_calls
@patch('osfclient.cli.makedirs')
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_fetch_local_file_exists_force_overrides_update(OSF_project, os_makedirs):
# check that `osf fetch --force --update` overwrites the local file even if
# it matches the remote file (force overrides update)
args = MockArgs(project='1234', remote='osfstorage/a/a/a', force=True,
update=True)
def exists(path):
if path == ".osfcli.config":
return False
else:
return True
def simple_checksum(file_path):
return '0' * 32
mock_open_func = mock_open()
with patch('osfclient.cli.open', mock_open_func):
with patch('osfclient.cli.os.path.exists', side_effect=exists):
with patch('osfclient.cli.checksum', side_effect=simple_checksum):
fetch(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been accessed
store = OSF_project.return_value.storages[0]
assert store._name_mock.return_value == 'osfstorage'
# should create a file in the same directory when no local
# filename is specified.
# file should be created even though local matches remote and update is
# True, because force overrides update
assert mock.call('a', 'wb') in mock_open_func.mock_calls
| 36.086066 | 82 | 0.69347 | 1,221 | 8,805 | 4.816544 | 0.095823 | 0.040809 | 0.075157 | 0.053562 | 0.856147 | 0.841013 | 0.841013 | 0.834212 | 0.834212 | 0.798164 | 0 | 0.01569 | 0.203748 | 8,805 | 243 | 83 | 36.234568 | 0.823135 | 0.206474 | 0 | 0.753425 | 0 | 0 | 0.163259 | 0.067521 | 0 | 0 | 0 | 0 | 0.178082 | 1 | 0.109589 | false | 0 | 0.054795 | 0.020548 | 0.253425 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
91ec234781f99f4cb926a2c6e08dfab0725240c6 | 2,285 | py | Python | tests/trainer/flags/test_val_check_interval.py | DKandrew/pytorch-lightning | 1f8ff7c48c1b173928f5dcd652a81d1c4a7ab273 | [
"Apache-2.0"
] | null | null | null | tests/trainer/flags/test_val_check_interval.py | DKandrew/pytorch-lightning | 1f8ff7c48c1b173928f5dcd652a81d1c4a7ab273 | [
"Apache-2.0"
] | null | null | null | tests/trainer/flags/test_val_check_interval.py | DKandrew/pytorch-lightning | 1f8ff7c48c1b173928f5dcd652a81d1c4a7ab273 | [
"Apache-2.0"
] | null | null | null | import pytest
from tests.base import SimpleModule
from pytorch_lightning.trainer import Trainer
@pytest.mark.parametrize('max_epochs', [1, 2, 3])
def test_val_check_interval_1(tmpdir, max_epochs):
class TestModel(SimpleModule):
def __init__(self):
super().__init__()
self.train_epoch_calls = 0
self.val_epoch_calls = 0
def on_train_epoch_start(self) -> None:
self.train_epoch_calls += 1
def on_validation_epoch_start(self) -> None:
if not self.trainer.running_sanity_check:
self.val_epoch_calls += 1
model = TestModel()
trainer = Trainer(
max_epochs=max_epochs,
val_check_interval=1.0,
logger=False,
)
trainer.fit(model)
assert model.val_epoch_calls == max_epochs
@pytest.mark.parametrize('max_epochs', [1, 2, 3])
def test_val_check_interval_quarter(tmpdir, max_epochs):
class TestModel(SimpleModule):
def __init__(self):
super().__init__()
self.train_epoch_calls = 0
self.val_epoch_calls = 0
def on_train_epoch_start(self) -> None:
self.train_epoch_calls += 1
def on_validation_epoch_start(self) -> None:
if not self.trainer.running_sanity_check:
self.val_epoch_calls += 1
model = TestModel()
trainer = Trainer(
max_epochs=max_epochs,
val_check_interval=0.25,
logger=False,
)
trainer.fit(model)
assert model.val_epoch_calls == max_epochs * 4
@pytest.mark.parametrize('max_epochs', [1, 2, 3])
def test_val_check_interval_third(tmpdir, max_epochs):
class TestModel(SimpleModule):
def __init__(self):
super().__init__()
self.train_epoch_calls = 0
self.val_epoch_calls = 0
def on_train_epoch_start(self) -> None:
self.train_epoch_calls += 1
def on_validation_epoch_start(self) -> None:
if not self.trainer.running_sanity_check:
self.val_epoch_calls += 1
model = TestModel()
trainer = Trainer(
max_epochs=max_epochs,
val_check_interval=0.33,
logger=False,
)
trainer.fit(model)
assert model.val_epoch_calls == max_epochs * 3
| 26.882353 | 56 | 0.632385 | 288 | 2,285 | 4.628472 | 0.170139 | 0.101275 | 0.087772 | 0.085521 | 0.921981 | 0.921981 | 0.921981 | 0.921981 | 0.921981 | 0.921981 | 0 | 0.019324 | 0.275274 | 2,285 | 84 | 57 | 27.202381 | 0.785628 | 0 | 0 | 0.761905 | 0 | 0 | 0.013129 | 0 | 0 | 0 | 0 | 0 | 0.047619 | 1 | 0.190476 | false | 0 | 0.047619 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
91ef6c3b1b25f2a484728f7429cf3e93b4526ef7 | 110,887 | py | Python | scripts/pylint_custom_plugin/tests/test_pylint_custom_plugins.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-02-01T18:50:12.000Z | 2022-02-01T18:50:12.000Z | scripts/pylint_custom_plugin/tests/test_pylint_custom_plugins.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | scripts/pylint_custom_plugin/tests/test_pylint_custom_plugins.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import astroid
import pylint.testutils
import requests
from azure.core import PipelineClient
from azure.core.configuration import Configuration
from pylint_custom_plugin import pylint_guidelines_checker as checker
class TestClientMethodsHaveTracingDecorators(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientMethodsHaveTracingDecorators
def test_ignores_constructor(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_private_method(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def _private_method(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_private_method_async(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
async def _private_method(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(function_node)
def test_ignores_methods_with_decorators(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
from azure.core.tracing.decorator import distributed_trace
class SomeClient(): #@
@distributed_trace
def create_configuration(self, **kwargs): #@
pass
@distributed_trace
def get_thing(self, **kwargs): #@
pass
@distributed_trace
def list_thing(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(func_node_a)
self.checker.visit_functiondef(func_node_b)
self.checker.visit_functiondef(func_node_c)
def test_ignores_async_methods_with_decorators(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
from azure.core.tracing.decorator_async import distributed_trace_async
class SomeClient(): #@
@distributed_trace_async
async def create_configuration(self, **kwargs): #@
pass
@distributed_trace_async
async def get_thing(self, **kwargs): #@
pass
@distributed_trace_async
async def list_thing(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
self.checker.visit_asyncfunctiondef(func_node_c)
def test_finds_sync_decorator_on_async_method(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
from azure.core.tracing.decorator import distributed_trace
class SomeClient(): #@
@distributed_trace
async def create_configuration(self, **kwargs): #@
pass
@distributed_trace
async def get_thing(self, **kwargs): #@
pass
@distributed_trace
async def list_thing(self, **kwargs): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-tracing-decorator-async", node=func_node_a
),
pylint.testutils.Message(
msg_id="client-method-missing-tracing-decorator-async", node=func_node_b
),
pylint.testutils.Message(
msg_id="client-method-missing-tracing-decorator-async", node=func_node_c
),
):
self.checker.visit_asyncfunctiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
self.checker.visit_asyncfunctiondef(func_node_c)
def test_finds_async_decorator_on_sync_method(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
from azure.core.tracing.decorator_async import distributed_trace_async
class SomeClient(): #@
@distributed_trace_async
def create_configuration(self, **kwargs): #@
pass
@distributed_trace_async
def get_thing(self, **kwargs): #@
pass
@distributed_trace_async
def list_thing(self, **kwargs): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-tracing-decorator", node=func_node_a
),
pylint.testutils.Message(
msg_id="client-method-missing-tracing-decorator", node=func_node_b
),
pylint.testutils.Message(
msg_id="client-method-missing-tracing-decorator", node=func_node_c
),
):
self.checker.visit_functiondef(func_node_a)
self.checker.visit_functiondef(func_node_b)
self.checker.visit_functiondef(func_node_c)
def test_ignores_other_decorators(self):
class_node, func_node_a, func_node_b = astroid.extract_node(
"""
from azure.core.tracing.decorator import distributed_trace
class SomeClient(): #@
@classmethod
@distributed_trace
def download_thing(self, some, **kwargs): #@
pass
@distributed_trace
@decorator
def do_thing(self, some, **kwargs): #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_functiondef(func_node_a)
self.checker.visit_functiondef(func_node_b)
def test_ignores_other_decorators_async(self):
class_node, func_node_a, func_node_b = astroid.extract_node(
"""
from azure.core.tracing.decorator_async import distributed_trace_async
class SomeClient(): #@
@classmethod
@distributed_trace_async
async def download_thing(self, some, **kwargs): #@
pass
@distributed_trace_async
@decorator
async def do_thing(self, some, **kwargs): #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
def test_ignores_non_client_method(self):
class_node, func_node_a, func_node_b = astroid.extract_node(
"""
class SomethingElse(): #@
def download_thing(self, some, **kwargs): #@
pass
@classmethod
async def do_thing(self, some, **kwargs): #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_functiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_implementation.html#distributed-tracing"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientsDoNotUseStaticMethods(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientsDoNotUseStaticMethods
def test_ignores_constructor(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_private_method(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
@staticmethod
def _private_method(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_private_method_async(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
@staticmethod
async def _private_method(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(function_node)
def test_ignores_methods_with_other_decorators(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
class SomeClient(): #@
@distributed_trace
def create_configuration(self): #@
pass
@distributed_trace
def get_thing(self): #@
pass
@distributed_trace
def list_thing(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(func_node_a)
self.checker.visit_functiondef(func_node_b)
self.checker.visit_functiondef(func_node_c)
def test_ignores_async_methods_with_other_decorators(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
class SomeClient(): #@
@distributed_trace_async
async def create_configuration(self): #@
pass
@distributed_trace_async
async def get_thing(self): #@
pass
@distributed_trace_async
async def list_thing(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
self.checker.visit_asyncfunctiondef(func_node_c)
def test_finds_staticmethod_on_async_method(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
class SomeClient(): #@
@staticmethod
async def create_configuration(self): #@
pass
@staticmethod
async def get_thing(self): #@
pass
@staticmethod
async def list_thing(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-should-not-use-static-method", node=func_node_a
),
pylint.testutils.Message(
msg_id="client-method-should-not-use-static-method", node=func_node_b
),
pylint.testutils.Message(
msg_id="client-method-should-not-use-static-method", node=func_node_c
),
):
self.checker.visit_asyncfunctiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
self.checker.visit_asyncfunctiondef(func_node_c)
def test_finds_staticmethod_on_sync_method(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
class SomeClient(): #@
@staticmethod
def create_configuration(self): #@
pass
@staticmethod
def get_thing(self): #@
pass
@staticmethod
def list_thing(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-should-not-use-static-method", node=func_node_a
),
pylint.testutils.Message(
msg_id="client-method-should-not-use-static-method", node=func_node_b
),
pylint.testutils.Message(
msg_id="client-method-should-not-use-static-method", node=func_node_c
),
):
self.checker.visit_functiondef(func_node_a)
self.checker.visit_functiondef(func_node_b)
self.checker.visit_functiondef(func_node_c)
def test_ignores_other_multiple_decorators(self):
class_node, func_node_a, func_node_b = astroid.extract_node(
"""
class SomeClient(): #@
@classmethod
@distributed_trace
def download_thing(self, some, **kwargs): #@
pass
@distributed_trace
@decorator
def do_thing(self, some, **kwargs): #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_functiondef(func_node_a)
self.checker.visit_functiondef(func_node_b)
def test_ignores_other_multiple_decorators_async(self):
class_node, func_node_a, func_node_b = astroid.extract_node(
"""
class SomeClient(): #@
@classmethod
@distributed_trace_async
async def download_thing(self, some, **kwargs): #@
pass
@distributed_trace_async
@decorator
async def do_thing(self, some, **kwargs): #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
def test_ignores_non_client_method(self):
class_node, func_node_a, func_node_b = astroid.extract_node(
"""
class SomethingElse(): #@
@staticmethod
def download_thing(self, some, **kwargs): #@
pass
@staticmethod
async def do_thing(self, some, **kwargs): #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_functiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_implementation.html#method-signatures"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientHasApprovedMethodNamePrefix(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientHasApprovedMethodNamePrefix
def test_ignores_constructor(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_private_method(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def _private_method(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_if_exists_suffix(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def check_if_exists(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_from_prefix(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def from_connection_string(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_approved_prefix_names(self):
class_node, func_node_a, func_node_b, func_node_c, func_node_d, func_node_e, func_node_f, func_node_g, \
func_node_h, func_node_i, func_node_j, func_node_k, func_node_l = astroid.extract_node("""
class SomeClient(): #@
def create_configuration(self): #@
pass
def get_thing(self): #@
pass
def list_thing(self): #@
pass
def upsert_thing(self): #@
pass
def set_thing(self): #@
pass
def update_thing(self): #@
pass
def replace_thing(self): #@
pass
def append_thing(self): #@
pass
def add_thing(self): #@
pass
def delete_thing(self): #@
pass
def remove_thing(self): #@
pass
def begin_thing(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_non_client_with_unapproved_prefix_names(self):
class_node, function_node = astroid.extract_node(
"""
class SomethingElse(): #@
def download_thing(self, some, **kwargs): #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_nested_function_with_unapproved_prefix_names(self):
class_node, function_node = astroid.extract_node(
"""
class SomeClient(): #@
def create_configuration(self, **kwargs): #@
def nested(hello, world):
pass
"""
)
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_finds_unapproved_prefix_names(self):
class_node, func_node_a, func_node_b, func_node_c, func_node_d, func_node_e, func_node_f, func_node_g, \
func_node_h, func_node_i, func_node_j, func_node_k, func_node_l, func_node_m, func_node_n, func_node_o, \
func_node_p = astroid.extract_node("""
class SomeClient(): #@
@distributed_trace
def build_configuration(self): #@
pass
def generate_thing(self): #@
pass
def make_thing(self): #@
pass
def insert_thing(self): #@
pass
def put_thing(self): #@
pass
def creates_configuration(self): #@
pass
def gets_thing(self): #@
pass
def lists_thing(self): #@
pass
def upserts_thing(self): #@
pass
def sets_thing(self): #@
pass
def updates_thing(self): #@
pass
def replaces_thing(self): #@
pass
def appends_thing(self): #@
pass
def adds_thing(self): #@
pass
def deletes_thing(self): #@
pass
def removes_thing(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_a
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_b
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_c
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_d
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_e
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_f
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_g
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_h
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_i
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_j
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_k
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_l
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_m
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_n
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_o
),
pylint.testutils.Message(
msg_id="unapproved-client-method-name-prefix", node=func_node_p
)
):
self.checker.visit_classdef(class_node)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#service-operations"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientConstructorTakesCorrectParameters(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientConstructorTakesCorrectParameters
def test_finds_correct_params(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, thing_url, credential, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_non_constructor_methods(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def create_configuration(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_non_client_constructor_methods(self):
class_node, function_node = astroid.extract_node("""
class SomethingElse(): #@
def __init__(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_finds_constructor_without_kwargs(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, thing_url, credential=None): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="missing-client-constructor-parameter-kwargs", node=function_node
)
):
self.checker.visit_functiondef(function_node)
def test_finds_constructor_without_credentials(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, thing_url, **kwargs): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="missing-client-constructor-parameter-credential", node=function_node
)
):
self.checker.visit_functiondef(function_node)
def test_finds_constructor_with_no_params(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="missing-client-constructor-parameter-credential", node=function_node
),
pylint.testutils.Message(
msg_id="missing-client-constructor-parameter-kwargs", node=function_node
)
):
self.checker.visit_functiondef(function_node)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#client-configuration"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientMethodsUseKwargsWithMultipleParameters(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientMethodsUseKwargsWithMultipleParameters
def test_ignores_method_abiding_to_guidelines(self):
class_node, function_node, function_node_a, function_node_b, function_node_c, function_node_d, \
function_node_e, function_node_f, function_node_g, function_node_h, function_node_i, function_node_j, \
function_node_k, function_node_l, function_node_m = astroid.extract_node("""
class SomeClient(): #@
@distributed_trace
def do_thing(): #@
pass
def do_thing_a(self): #@
pass
def do_thing_b(self, one): #@
pass
def do_thing_c(self, one, two): #@
pass
def do_thing_d(self, one, two, three): #@
pass
def do_thing_e(self, one, two, three, four): #@
pass
def do_thing_f(self, one, two, three, four, five): #@
pass
def do_thing_g(self, one, two, three, four, five, six=6): #@
pass
def do_thing_h(self, one, two, three, four, five, six=6, seven=7): #@
pass
def do_thing_i(self, one, two, three, four, five, *, six=6, seven=7): #@
pass
def do_thing_j(self, one, two, three, four, five, *, six=6, seven=7): #@
pass
def do_thing_k(self, one, two, three, four, five, **kwargs): #@
pass
def do_thing_l(self, one, two, three, four, five, *args, **kwargs): #@
pass
def do_thing_m(self, one, two, three, four, five, *args, six, seven=7, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
self.checker.visit_functiondef(function_node_c)
self.checker.visit_functiondef(function_node_d)
self.checker.visit_functiondef(function_node_e)
self.checker.visit_functiondef(function_node_f)
self.checker.visit_functiondef(function_node_g)
self.checker.visit_functiondef(function_node_h)
self.checker.visit_functiondef(function_node_i)
self.checker.visit_functiondef(function_node_j)
self.checker.visit_functiondef(function_node_k)
self.checker.visit_functiondef(function_node_l)
self.checker.visit_functiondef(function_node_m)
def test_ignores_method_abiding_to_guidelines_async(self):
class_node, function_node, function_node_a, function_node_b, function_node_c, function_node_d, \
function_node_e, function_node_f, function_node_g, function_node_h, function_node_i, function_node_j, \
function_node_k, function_node_l, function_node_m = astroid.extract_node("""
class SomeClient(): #@
@distributed_trace_async
async def do_thing(): #@
pass
async def do_thing_a(self): #@
pass
async def do_thing_b(self, one): #@
pass
async def do_thing_c(self, one, two): #@
pass
async def do_thing_d(self, one, two, three): #@
pass
async def do_thing_e(self, one, two, three, four): #@
pass
async def do_thing_f(self, one, two, three, four, five): #@
pass
async def do_thing_g(self, one, two, three, four, five, six=6): #@
pass
async def do_thing_h(self, one, two, three, four, five, six=6, seven=7): #@
pass
async def do_thing_i(self, one, two, three, four, five, *, six=6, seven=7): #@
pass
async def do_thing_j(self, one, two, three, four, five, *, six=6, seven=7): #@
pass
async def do_thing_k(self, one, two, three, four, five, **kwargs): #@
pass
async def do_thing_l(self, one, two, three, four, five, *args, **kwargs): #@
pass
async def do_thing_m(self, one, two, three, four, five, *args, six, seven=7, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(function_node)
self.checker.visit_asyncfunctiondef(function_node_a)
self.checker.visit_asyncfunctiondef(function_node_b)
self.checker.visit_asyncfunctiondef(function_node_c)
self.checker.visit_asyncfunctiondef(function_node_d)
self.checker.visit_asyncfunctiondef(function_node_e)
self.checker.visit_asyncfunctiondef(function_node_f)
self.checker.visit_asyncfunctiondef(function_node_g)
self.checker.visit_asyncfunctiondef(function_node_h)
self.checker.visit_asyncfunctiondef(function_node_i)
self.checker.visit_asyncfunctiondef(function_node_j)
self.checker.visit_asyncfunctiondef(function_node_k)
self.checker.visit_asyncfunctiondef(function_node_l)
self.checker.visit_asyncfunctiondef(function_node_m)
def test_finds_methods_with_too_many_positional_args(self):
class_node, function_node, function_node_a, function_node_b, function_node_c, function_node_d, \
function_node_e, function_node_f = astroid.extract_node("""
class SomeClient(): #@
@distributed_trace
def do_thing(self, one, two, three, four, five, six): #@
pass
def do_thing_a(self, one, two, three, four, five, six, seven=7): #@
pass
def do_thing_b(self, one, two, three, four, five, six, *, seven): #@
pass
def do_thing_c(self, one, two, three, four, five, six, *, seven, eight, nine): #@
pass
def do_thing_d(self, one, two, three, four, five, six, **kwargs): #@
pass
def do_thing_e(self, one, two, three, four, five, six, *args, seven, eight, nine): #@
pass
def do_thing_f(self, one, two, three, four, five, six, *args, seven=7, eight=8, nine=9): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_b
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_c
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_d
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_e
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_f
)
):
self.checker.visit_functiondef(function_node)
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
self.checker.visit_functiondef(function_node_c)
self.checker.visit_functiondef(function_node_d)
self.checker.visit_functiondef(function_node_e)
self.checker.visit_functiondef(function_node_f)
def test_finds_methods_with_too_many_positional_args_async(self):
class_node, function_node, function_node_a, function_node_b, function_node_c, function_node_d, \
function_node_e, function_node_f = astroid.extract_node("""
class SomeClient(): #@
@distributed_trace_async
async def do_thing(self, one, two, three, four, five, six): #@
pass
async def do_thing_a(self, one, two, three, four, five, six, seven=7): #@
pass
async def do_thing_b(self, one, two, three, four, five, six, *, seven): #@
pass
async def do_thing_c(self, one, two, three, four, five, six, *, seven, eight, nine): #@
pass
async def do_thing_d(self, one, two, three, four, five, six, **kwargs): #@
pass
async def do_thing_e(self, one, two, three, four, five, six, *args, seven, eight, nine): #@
pass
async def do_thing_f(self, one, two, three, four, five, six, *args, seven=7, eight=8, nine=9): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_b
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_c
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_d
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_e
),
pylint.testutils.Message(
msg_id="client-method-has-more-than-5-positional-arguments", node=function_node_f
)
):
self.checker.visit_asyncfunctiondef(function_node)
self.checker.visit_asyncfunctiondef(function_node_a)
self.checker.visit_asyncfunctiondef(function_node_b)
self.checker.visit_asyncfunctiondef(function_node_c)
self.checker.visit_asyncfunctiondef(function_node_d)
self.checker.visit_asyncfunctiondef(function_node_e)
self.checker.visit_asyncfunctiondef(function_node_f)
def test_ignores_non_client_methods(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomethingElse(): #@
def do_thing(self, one, two, three, four, five, six): #@
pass
@distributed_trace_async
async def do_thing(self, one, two, three, four, five, six): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_asyncfunctiondef(function_node_b)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_implementation.html#method-signatures"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientMethodsHaveTypeAnnotations(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientMethodsHaveTypeAnnotations
def test_ignores_correct_type_annotations(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
def do_thing(self, one: str, two: int, three: bool, four: Union[str, thing], five: dict) -> int: #@
pass
async def do_thing(self, one: str, two: int, three: bool, four: Union[str, thing], five: dict) -> int: #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_asyncfunctiondef(function_node_b)
def test_ignores_correct_type_comments(self):
class_node, function_node_a, function_node_b, function_node_c = astroid.extract_node("""
class SomeClient(): #@
def do_thing_a(self, one, two, three, four, five): #@
# type: (str, str, str, str, str) -> None
pass
def do_thing_b(self, one, two): # type: (str, str) -> int #@
pass
def do_thing_c(self, #@
one, # type: str
two, # type: str
three, # type: str
four, # type: str
five # type: str
):
# type: (...) -> int
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
self.checker.visit_functiondef(function_node_c)
def test_ignores_correct_type_comments_async(self):
class_node, function_node_a, function_node_b, function_node_c = astroid.extract_node("""
class SomeClient(): #@
async def do_thing_a(self, one, two, three, four, five): #@
# type: (str, str, str, str, str) -> None
pass
async def do_thing_b(self, one, two): # type: (str, str) -> int #@
pass
async def do_thing_c(self, #@
one, # type: str
two, # type: str
three, # type: str
four, # type: str
five # type: str
):
# type: (...) -> int
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(function_node_a)
self.checker.visit_asyncfunctiondef(function_node_b)
self.checker.visit_asyncfunctiondef(function_node_c)
def test_ignores_no_parameter_method_with_annotations(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
def do_thing_a(self): #@
# type: () -> None
pass
def do_thing_b(self) -> None: #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_ignores_no_parameter_method_with_annotations_async(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
async def do_thing_a(self): #@
# type: () -> None
pass
async def do_thing_b(self) -> None: #@
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(function_node_a)
self.checker.visit_asyncfunctiondef(function_node_b)
def test_finds_no_parameter_method_without_annotations(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
def do_thing(self): #@
pass
async def do_thing(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node_b
),
):
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_finds_method_missing_annotations(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def do_thing(self, one, two, three): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node
)
):
self.checker.visit_functiondef(function_node)
def test_finds_method_missing_annotations_async(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
async def do_thing(self, one, two, three): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node
)
):
self.checker.visit_asyncfunctiondef(function_node)
def test_finds_constructor_without_annotations(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, one, two, three, four, five): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node
)
):
self.checker.visit_functiondef(function_node)
def test_finds_missing_return_annotation_but_has_type_hints(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
def do_thing_a(self, one: str, two: int, three: bool, four: Union[str, thing], five: dict): #@
pass
def do_thing_b(self, one, two, three, four, five): #@
# type: (str, str, str, str, str)
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node_b
),
):
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_finds_missing_return_annotation_but_has_type_hints_async(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
async def do_thing_a(self, one: str, two: int, three: bool, four: Union[str, thing], five: dict): #@
pass
async def do_thing_b(self, one, two, three, four, five): #@
# type: (str, str, str, str, str)
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node_b
),
):
self.checker.visit_asyncfunctiondef(function_node_a)
self.checker.visit_asyncfunctiondef(function_node_b)
def test_finds_missing_annotations_but_has_return_hint(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
def do_thing_a(self, one, two, three, four, five) -> None: #@
pass
def do_thing_b(self, one, two, three, four, five): #@
# type: -> None
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node_b
)
):
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_finds_missing_annotations_but_has_return_hint_async(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
async def do_thing_a(self, one, two, three, four, five) -> None: #@
pass
async def do_thing_b(self, one, two, three, four, five): #@
# type: -> None
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-method-missing-type-annotations", node=function_node_b
)
):
self.checker.visit_asyncfunctiondef(function_node_a)
self.checker.visit_asyncfunctiondef(function_node_b)
def test_ignores_non_client_methods(self):
class_node, function_node = astroid.extract_node("""
class SomethingElse(): #@
def do_thing(self, one, two, three, four, five, six): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_private_methods(self):
class_node, function_node = astroid.extract_node("""
class SomethingElse(): #@
def _do_thing(self, one, two, three, four, five, six): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_implementation.html#types-or-not"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientHasKwargsInPoliciesForCreateConfigurationMethod(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientHasKwargsInPoliciesForCreateConfigurationMethod
def test_ignores_config_policies_with_kwargs(self):
function_node_a, function_node_b = astroid.extract_node("""
def create_configuration(self, **kwargs): #@
config = Configuration(**kwargs)
config.headers_policy = StorageHeadersPolicy(**kwargs)
config.user_agent_policy = StorageUserAgentPolicy(**kwargs)
config.retry_policy = kwargs.get('retry_policy') or ExponentialRetry(**kwargs)
config.redirect_policy = RedirectPolicy(**kwargs)
config.logging_policy = StorageLoggingPolicy(**kwargs)
config.proxy_policy = ProxyPolicy(**kwargs)
return config
@staticmethod
def create_config(credential, api_version=None, **kwargs): #@
# type: (TokenCredential, Optional[str], Mapping[str, Any]) -> Configuration
if api_version is None:
api_version = KeyVaultClient.DEFAULT_API_VERSION
config = KeyVaultClient.get_configuration_class(api_version, aio=False)(credential, **kwargs)
config.authentication_policy = ChallengeAuthPolicy(credential, **kwargs)
return config
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_finds_config_policies_without_kwargs(self):
function_node_a, policy_a, policy_b, policy_c, function_node_b, policy_d = astroid.extract_node("""
def create_configuration(self, **kwargs): #@
config = Configuration(**kwargs)
config.headers_policy = StorageHeadersPolicy(**kwargs)
config.user_agent_policy = StorageUserAgentPolicy() #@
config.retry_policy = kwargs.get('retry_policy') or ExponentialRetry(**kwargs)
config.redirect_policy = RedirectPolicy(**kwargs)
config.logging_policy = StorageLoggingPolicy() #@
config.proxy_policy = ProxyPolicy() #@
return config
@staticmethod
def create_config(credential, api_version=None, **kwargs): #@
# type: (TokenCredential, Optional[str], Mapping[str, Any]) -> Configuration
if api_version is None:
api_version = KeyVaultClient.DEFAULT_API_VERSION
config = KeyVaultClient.get_configuration_class(api_version, aio=False)(credential, **kwargs)
config.authentication_policy = ChallengeAuthPolicy(credential) #@
return config
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="config-missing-kwargs-in-policy", node=policy_a
),
pylint.testutils.Message(
msg_id="config-missing-kwargs-in-policy", node=policy_b
),
pylint.testutils.Message(
msg_id="config-missing-kwargs-in-policy", node=policy_c
),
pylint.testutils.Message(
msg_id="config-missing-kwargs-in-policy", node=policy_d
)
):
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_ignores_policies_outside_create_config(self):
function_node_a, function_node_b = astroid.extract_node("""
def _configuration(self, **kwargs): #@
config = Configuration(**kwargs)
config.headers_policy = StorageHeadersPolicy(**kwargs)
config.user_agent_policy = StorageUserAgentPolicy(**kwargs)
config.retry_policy = kwargs.get('retry_policy') or ExponentialRetry()
config.redirect_policy = RedirectPolicy()
config.logging_policy = StorageLoggingPolicy()
config.proxy_policy = ProxyPolicy()
return config
@staticmethod
def some_other_method(credential, api_version=None, **kwargs): #@
# type: (TokenCredential, Optional[str], Mapping[str, Any]) -> Configuration
if api_version is None:
api_version = KeyVaultClient.DEFAULT_API_VERSION
config = KeyVaultClient.get_configuration_class(api_version, aio=False)(credential)
config.authentication_policy = ChallengeAuthPolicy(credential)
return config
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#client-configuration"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientUsesCorrectNamingConventions(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientUsesCorrectNamingConventions
def test_ignores_constructor(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_internal_client(self):
class_node, function_node = astroid.extract_node("""
class _BaseSomeClient(): #@
def __init__(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_private_method(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
def _private_method(self, **kwargs): #@
pass
async def _another_private_method(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_correct_client(self):
class_node = astroid.extract_node("""
class SomeClient(): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_non_client(self):
class_node, function_node = astroid.extract_node(
"""
class SomethingElse(): #@
def download_thing(self, some, **kwargs): #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_correct_method_names(self):
class_node, function_node_a, function_node_b, function_node_c = astroid.extract_node("""
class SomeClient(): #@
def from_connection_string(self, **kwargs): #@
pass
def get_thing(self, **kwargs): #@
pass
def delete_thing(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_correct_method_names_async(self):
class_node, function_node_a, function_node_b, function_node_c = astroid.extract_node("""
class SomeClient(): #@
def from_connection_string(self, **kwargs): #@
pass
def get_thing(self, **kwargs): #@
pass
def delete_thing(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_correct_class_constant(self):
class_node = astroid.extract_node("""
class SomeClient(): #@
MAX_SIZE = 14
MIN_SIZE = 2
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_finds_incorrectly_named_client(self):
class_node_a, class_node_b, class_node_c = astroid.extract_node("""
class some_client(): #@
pass
class Some_Client(): #@
pass
class someClient(): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=class_node_a
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=class_node_b
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=class_node_c
),
):
self.checker.visit_classdef(class_node_a)
self.checker.visit_classdef(class_node_b)
self.checker.visit_classdef(class_node_c)
def test_finds_incorrectly_named_methods(self):
class_node, func_node_a, func_node_b, func_node_c, func_node_d, func_node_e, func_node_f \
= astroid.extract_node("""
class SomeClient(): #@
def Create_Config(self): #@
pass
def getThing(self): #@
pass
def List_thing(self): #@
pass
def UpsertThing(self): #@
pass
def set_Thing(self): #@
pass
def Updatething(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_a
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_b
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_c
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_d
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_e
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_f
),
):
self.checker.visit_classdef(class_node)
def test_finds_incorrectly_named_methods_async(self):
class_node, func_node_a, func_node_b, func_node_c, func_node_d, func_node_e, func_node_f \
= astroid.extract_node("""
class SomeClient(): #@
async def Create_Config(self): #@
pass
async def getThing(self): #@
pass
async def List_thing(self): #@
pass
async def UpsertThing(self): #@
pass
async def set_Thing(self): #@
pass
async def Updatething(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_a
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_b
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_c
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_d
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_e
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=func_node_f
),
):
self.checker.visit_classdef(class_node)
def test_finds_incorrectly_named_class_constant(self):
class_node, const_a, const_b = astroid.extract_node("""
class SomeClient(): #@
max_size = 14 #@
min_size = 2 #@
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=const_a
),
pylint.testutils.Message(
msg_id="client-incorrect-naming-convention", node=const_b
),
):
self.checker.visit_classdef(class_node)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_implementation.html#naming-conventions"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientMethodsHaveKwargsParameter(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientMethodsHaveKwargsParameter
def test_ignores_private_methods(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def _create_configuration(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_properties(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
@property
def key_id(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_properties_async(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
@property
async def key_id(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(function_node)
def test_ignores_non_client_methods(self):
class_node, function_node = astroid.extract_node("""
class SomethingElse(): #@
def create_configuration(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_methods_with_kwargs(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
def get_thing(self, **kwargs): #@
pass
@distributed_trace
def remove_thing(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_finds_missing_kwargs(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
from azure.core.tracing.decorator import distributed_trace
class SomeClient(): #@
@distributed_trace
def get_thing(self): #@
pass
@distributed_trace
def remove_thing(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-kwargs", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-method-missing-kwargs", node=function_node_b
),
):
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_ignores_methods_with_kwargs_async(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
async def get_thing(self, **kwargs): #@
pass
async def remove_thing(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(function_node_a)
self.checker.visit_asyncfunctiondef(function_node_b)
def test_finds_missing_kwargs_async(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
from azure.core.tracing.decorator_async import distributed_trace_async
class SomeClient(): #@
@distributed_trace_async
async def get_thing(self): #@
pass
@distributed_trace_async
async def remove_thing(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-missing-kwargs", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-method-missing-kwargs", node=function_node_b
),
):
self.checker.visit_asyncfunctiondef(function_node_a)
self.checker.visit_asyncfunctiondef(function_node_b)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestAsyncClientCorrectNaming(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.AsyncClientCorrectNaming
def test_ignores_private_client(self):
class_node = astroid.extract_node("""
class _AsyncBaseSomeClient(): #@
def create_configuration(self):
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_correct_client(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def create_configuration(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_async_base_named_client(self):
class_node_a = astroid.extract_node("""
class AsyncSomeClientBase(): #@
def get_thing(self, **kwargs):
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node_a)
def test_finds_incorrectly_named_client(self):
class_node_a = astroid.extract_node("""
class AsyncSomeClient(): #@
def get_thing(self, **kwargs):
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="async-client-bad-name", node=class_node_a
),
):
self.checker.visit_classdef(class_node_a)
def test_ignores_non_client(self):
class_node, function_node = astroid.extract_node("""
class SomethingElse(): #@
def create_configuration(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#async-support"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestFileHasCopyrightHeader(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.FileHasCopyrightHeader
def test_copyright_header_acceptable(self):
file = open("./test_files/copyright_header_acceptable.py")
node = astroid.parse(file.read())
file.close()
with self.assertNoMessages():
self.checker.visit_module(node)
def test_copyright_header_violation(self):
file = open("./test_files/copyright_header_violation.py")
node = astroid.parse(file.read())
file.close()
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="file-needs-copyright-header", node=node
)
):
self.checker.visit_module(node)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/policies_opensource.html"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestSpecifyParameterNamesInCall(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.SpecifyParameterNamesInCall
def test_ignores_call_with_only_two_unnamed_params(self):
class_node, call_node = astroid.extract_node("""
class SomeClient(): #@
def do_thing(self):
self._client.thing(one, two) #@
""")
with self.assertNoMessages():
self.checker.visit_call(call_node)
def test_ignores_call_with_two_unnamed_params_and_one_named(self):
class_node, call_node = astroid.extract_node("""
class SomeClient(): #@
def do_thing(self):
self._client.thing(one, two, three=3) #@
""")
with self.assertNoMessages():
self.checker.visit_call(call_node)
def test_ignores_call_from_non_client(self):
class_node, call_node = astroid.extract_node("""
class SomethingElse(): #@
def do_thing(self):
self._other.thing(one, two, three) #@
""")
with self.assertNoMessages():
self.checker.visit_call(call_node)
def test_ignores_call_with_named_params(self):
class_node, call_node_a, call_node_b, call_node_c = astroid.extract_node("""
class SomethingElse(): #@
def do_thing_a(self):
self._other.thing(one=one, two=two, three=three) #@
def do_thing_b(self):
self._other.thing(zero, number, one=one, two=two, three=three) #@
def do_thing_c(self):
self._other.thing(zero, one=one, two=two, three=three) #@
""")
with self.assertNoMessages():
self.checker.visit_call(call_node_a)
self.checker.visit_call(call_node_b)
self.checker.visit_call(call_node_c)
def test_ignores_non_client_function_call(self):
call_node = astroid.extract_node("""
def do_thing():
self._client.thing(one, two, three) #@
""")
with self.assertNoMessages():
self.checker.visit_call(call_node)
def test_finds_call_with_more_than_two_unnamed_params(self):
class_node, call_node = astroid.extract_node("""
class SomeClient(): #@
def do_thing(self):
self._client.thing(one, two, three) #@
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="specify-parameter-names-in-call", node=call_node
),
):
self.checker.visit_call(call_node)
def test_finds_call_with_more_than_two_unnamed_params_and_some_named(self):
class_node, call_node = astroid.extract_node("""
class SomeClient(): #@
def do_thing(self):
self._client.thing(one, two, three, four=4, five=5) #@
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="specify-parameter-names-in-call", node=call_node
),
):
self.checker.visit_call(call_node)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_implementation.html#python-codestyle-positional-params"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientListMethodsUseCorePaging(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientListMethodsUseCorePaging
def test_ignores_private_methods(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def _list_thing(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_non_client_methods(self):
class_node, function_node = astroid.extract_node("""
class SomethingElse(): #@
def list_things(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_methods_return_ItemPaged(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
from azure.core.paging import ItemPaged
class SomeClient(): #@
def list_thing(self): #@
return ItemPaged()
@distributed_trace
def list_thing2(self): #@
return ItemPaged(
command, prefix=name_starts_with, results_per_page=results_per_page,
page_iterator_class=BlobPropertiesPaged)
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_ignores_methods_return_AsyncItemPaged(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
from azure.core.async_paging import AsyncItemPaged
class SomeClient(): #@
async def list_thing(self): #@
return AsyncItemPaged()
@distributed_trace
def list_thing2(self): #@
return AsyncItemPaged(
command, prefix=name_starts_with, results_per_page=results_per_page,
page_iterator_class=BlobPropertiesPaged)
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_finds_method_returning_something_else(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
from azure.core.polling import LROPoller
class SomeClient(): #@
def list_thing(self): #@
return list()
def list_thing2(self): #@
return LROPoller()
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-list-methods-use-paging", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-list-methods-use-paging", node=function_node_b
),
):
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_finds_method_returning_something_else_async(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
from azure.core.polling import LROPoller
class SomeClient(): #@
async def list_thing(self, **kwargs): #@
return list()
async def list_thing2(self, **kwargs): #@
from azure.core.polling import LROPoller
return LROPoller()
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-list-methods-use-paging", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-list-methods-use-paging", node=function_node_b
),
):
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#response-formats"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientLROMethodsUseCorePolling(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientLROMethodsUseCorePolling
def test_ignores_private_methods(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def _begin_thing(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_non_client_methods(self):
class_node, function_node = astroid.extract_node("""
class SomethingElse(): #@
def begin_things(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_methods_return_LROPoller(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
from azure.core.polling import LROPoller
class SomeClient(): #@
def begin_thing(self): #@
return LROPoller()
@distributed_trace
def begin_thing2(self): #@
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_finds_method_returning_something_else(self):
class_node, function_node_a, function_node_b = astroid.extract_node("""
class SomeClient(): #@
def begin_thing(self): #@
return list()
def begin_thing2(self): #@
return {}
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-lro-methods-use-polling", node=function_node_a
),
pylint.testutils.Message(
msg_id="client-lro-methods-use-polling", node=function_node_b
),
):
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#response-formats"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientLROMethodsUseCorrectNaming(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientLROMethodsUseCorrectNaming
def test_ignores_private_methods(self):
class_node, return_node = astroid.extract_node("""
from azure.core.polling import LROPoller
class SomeClient(): #@
def _do_thing(self):
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) #@
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
self.checker.visit_return(return_node)
def test_ignores_non_client_methods(self):
class_node, return_node = astroid.extract_node("""
from azure.core.polling import LROPoller
class SomethingElse(): #@
def begin_things(self):
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) #@
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
self.checker.visit_return(return_node)
def test_ignores_methods_return_LROPoller_and_correctly_named(self):
class_node, return_node_a, return_node_b = astroid.extract_node("""
from azure.core.polling import LROPoller
class SomeClient(): #@
def begin_thing(self):
return LROPoller() #@
@distributed_trace
def begin_thing2(self):
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) #@
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
self.checker.visit_return(return_node_a)
self.checker.visit_return(return_node_b)
def test_finds_incorrectly_named_method_returning_LROPoller(self):
class_node, function_node_a, return_node_a, function_node_b, return_node_b = astroid.extract_node("""
from azure.core.polling import LROPoller
class SomeClient(): #@
def poller_thing(self): #@
return LROPoller() #@
@distributed_trace
def start_thing2(self): #@
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) #@
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="lro-methods-use-correct-naming", node=function_node_a
),
pylint.testutils.Message(
msg_id="lro-methods-use-correct-naming", node=function_node_b
),
):
self.checker.visit_classdef(class_node)
self.checker.visit_return(return_node_a)
self.checker.visit_return(return_node_b)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#service-operations"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientConstructorDoesNotHaveConnectionStringParam(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientConstructorDoesNotHaveConnectionStringParam
def test_ignores_client_with_no_conn_str_in_constructor(self):
class_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self):
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_non_client_methods(self):
class_node, function_node = astroid.extract_node("""
class SomethingElse(): #@
def __init__(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_finds_client_method_using_conn_str_in_constructor_a(self):
class_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, connection_string):
return list()
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="connection-string-should-not-be-constructor-param", node=class_node
),
):
self.checker.visit_classdef(class_node)
def test_finds_client_method_using_conn_str_in_constructor_b(self):
class_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, conn_str):
return list()
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="connection-string-should-not-be-constructor-param", node=class_node
),
):
self.checker.visit_classdef(class_node)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#python-client-connection-string"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestPackageNameDoesNotUseUnderscoreOrPeriod(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.PackageNameDoesNotUseUnderscoreOrPeriod
def test_package_name_acceptable(self):
package_name = astroid.extract_node(
"""
PACKAGE_NAME = "correct-package-name"
"""
)
module_node = astroid.Module(name = "node", file="setup.py", doc = """ """)
module_node.body = [package_name]
with self.assertNoMessages():
self.checker.visit_module(module_node)
def test_package_name_violation(self):
package_name = astroid.extract_node(
"""
PACKAGE_NAME = "incorrect.package-name"
"""
)
module_node = astroid.Module(name = "node", file="setup.py", doc = """ """)
module_node.body = [package_name]
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="package-name-incorrect", node=module_node
)
):
self.checker.visit_module(module_node)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#packaging"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestServiceClientUsesNameWithClientSuffix(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ServiceClientUsesNameWithClientSuffix
def test_client_suffix_acceptable(self):
client_node = astroid.extract_node(
"""
class MyClient():
def __init__(self):
pass
"""
)
module_node = astroid.Module(name = "node", file="_my_client.py", doc = """ """)
module_node.body = [client_node]
with self.assertNoMessages():
self.checker.visit_module(module_node)
def test_client_suffix_violation(self):
client_node = astroid.extract_node(
"""
class Violation():
def __init__(self):
pass
"""
)
module_node = astroid.Module(name = "node", file="_my_client.py", doc = """ """)
module_node.body = [client_node]
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-suffix-needed", node=module_node
)
):
self.checker.visit_module(module_node)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#service-client"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestClientMethodNamesDoNotUseDoubleUnderscorePrefix(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.ClientMethodNamesDoNotUseDoubleUnderscorePrefix
def test_ignores_repr(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __repr__(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_constructor(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
def __init__(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_other_dunder(self):
class_node, function_node_a, function_node_b, function_node_c, function_node_d = astroid.extract_node("""
class SomeClient(): #@
def __enter__(self): #@
pass
def __exit__(self): #@
pass
def __aenter__(self): #@
pass
def __aexit__(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node_a)
self.checker.visit_functiondef(function_node_b)
self.checker.visit_functiondef(function_node_c)
self.checker.visit_functiondef(function_node_d)
def test_ignores_private_method(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
@staticmethod
def _private_method(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_private_method_async(self):
class_node, function_node = astroid.extract_node("""
class SomeClient(): #@
@staticmethod
async def _private_method(self, **kwargs): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(function_node)
def test_ignores_methods_with_decorators(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
class SomeClient(): #@
@distributed_trace
def create_configuration(self): #@
pass
@distributed_trace
def get_thing(self): #@
pass
@distributed_trace
def list_thing(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_functiondef(func_node_a)
self.checker.visit_functiondef(func_node_b)
self.checker.visit_functiondef(func_node_c)
def test_ignores_async_methods_with_decorators(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
class SomeClient(): #@
@distributed_trace_async
async def create_configuration(self): #@
pass
@distributed_trace_async
async def get_thing(self): #@
pass
@distributed_trace_async
async def list_thing(self): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
self.checker.visit_asyncfunctiondef(func_node_c)
def test_finds_double_underscore_on_async_method(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
class SomeClient(): #@
@staticmethod
async def __create_configuration(self): #@
pass
@staticmethod
async def __get_thing(self): #@
pass
@staticmethod
async def __list_thing(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-name-no-double-underscore", node=func_node_a
),
pylint.testutils.Message(
msg_id="client-method-name-no-double-underscore", node=func_node_b
),
pylint.testutils.Message(
msg_id="client-method-name-no-double-underscore", node=func_node_c
),
):
self.checker.visit_asyncfunctiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
self.checker.visit_asyncfunctiondef(func_node_c)
def test_finds_double_underscore_on_sync_method(self):
class_node, func_node_a, func_node_b, func_node_c = astroid.extract_node("""
class SomeClient(): #@
@staticmethod
def __create_configuration(self): #@
pass
@staticmethod
def __get_thing(self): #@
pass
@staticmethod
def __list_thing(self): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-method-name-no-double-underscore", node=func_node_a
),
pylint.testutils.Message(
msg_id="client-method-name-no-double-underscore", node=func_node_b
),
pylint.testutils.Message(
msg_id="client-method-name-no-double-underscore", node=func_node_c
),
):
self.checker.visit_functiondef(func_node_a)
self.checker.visit_functiondef(func_node_b)
self.checker.visit_functiondef(func_node_c)
def test_ignores_non_client_method(self):
class_node, func_node_a, func_node_b = astroid.extract_node(
"""
class SomethingElse(): #@
@staticmethod
def __download_thing(self, some, **kwargs): #@
pass
@staticmethod
async def __do_thing(self, some, **kwargs): #@
pass
"""
)
with self.assertNoMessages():
self.checker.visit_functiondef(func_node_a)
self.checker.visit_asyncfunctiondef(func_node_b)
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_implementation.html#public-vs-private"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestCheckDocstringAdmonitionNewline(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.CheckDocstringAdmonitionNewline
def test_ignores_correct_admonition_statement_in_function(self):
function_node = astroid.extract_node(
"""
def function_foo(x, y, z):
'''docstring
.. admonition:: Example:
.. literalinclude:: ../samples/sample_detect_language.py
'''
"""
)
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_ignores_correct_admonition_statement_in_function_with_comments(self):
function_node = astroid.extract_node(
"""
def function_foo(x, y, z):
'''docstring
.. admonition:: Example:
This is Example content.
Should support multi-line.
Can also include file:
.. literalinclude:: ../samples/sample_detect_language.py
'''
"""
)
with self.assertNoMessages():
self.checker.visit_functiondef(function_node)
def test_bad_admonition_statement_in_function(self):
function_node = astroid.extract_node(
"""
def function_foo(x, y, z):
'''docstring
.. admonition:: Example:
.. literalinclude:: ../samples/sample_detect_language.py
'''
"""
)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="docstring-admonition-needs-newline", node=function_node
)
):
self.checker.visit_functiondef(function_node)
def test_bad_admonition_statement_in_function_with_comments(self):
function_node = astroid.extract_node(
"""
def function_foo(x, y, z):
'''docstring
.. admonition:: Example:
This is Example content.
Should support multi-line.
Can also include file:
.. literalinclude:: ../samples/sample_detect_language.py
'''
"""
)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="docstring-admonition-needs-newline", node=function_node
)
):
self.checker.visit_functiondef(function_node)
def test_ignores_correct_admonition_statement_in_function_async(self):
function_node = astroid.extract_node(
"""
async def function_foo(x, y, z):
'''docstring
.. admonition:: Example:
.. literalinclude:: ../samples/sample_detect_language.py
'''
"""
)
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(function_node)
def test_ignores_correct_admonition_statement_in_function_with_comments_async(self):
function_node = astroid.extract_node(
"""
async def function_foo(x, y, z):
'''docstring
.. admonition:: Example:
This is Example content.
Should support multi-line.
Can also include file:
.. literalinclude:: ../samples/sample_detect_language.py
'''
"""
)
with self.assertNoMessages():
self.checker.visit_asyncfunctiondef(function_node)
def test_bad_admonition_statement_in_function_async(self):
function_node = astroid.extract_node(
"""
async def function_foo(x, y, z):
'''docstring
.. admonition:: Example:
.. literalinclude:: ../samples/sample_detect_language.py
'''
"""
)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="docstring-admonition-needs-newline", node=function_node
)
):
self.checker.visit_asyncfunctiondef(function_node)
def test_bad_admonition_statement_in_function_with_comments_async(self):
function_node = astroid.extract_node(
"""
async def function_foo(x, y, z):
'''docstring
.. admonition:: Example:
This is Example content.
Should support multi-line.
Can also include file:
.. literalinclude:: ../samples/sample_detect_language.py
'''
"""
)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="docstring-admonition-needs-newline", node=function_node
)
):
self.checker.visit_asyncfunctiondef(function_node)
def test_ignores_correct_admonition_statement_in_class(self):
class_node = astroid.extract_node(
"""
class SomeClient(object):
'''docstring
.. admonition:: Example:
.. literalinclude:: ../samples/sample_detect_language.py
'''
def __init__(self):
pass
"""
)
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_ignores_correct_admonition_statement_in_class_with_comments(self):
class_node = astroid.extract_node(
"""
class SomeClient(object):
'''docstring
.. admonition:: Example:
This is Example content.
Should support multi-line.
Can also include file:
.. literalinclude:: ../samples/sample_detect_language.py
'''
def __init__(self):
pass
"""
)
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_bad_admonition_statement_in_class(self):
class_node = astroid.extract_node(
"""
class SomeClient(object):
'''docstring
.. admonition:: Example:
.. literalinclude:: ../samples/sample_detect_language.py
'''
def __init__(self):
pass
"""
)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="docstring-admonition-needs-newline", node=class_node
)
):
self.checker.visit_classdef(class_node)
def test_bad_admonition_statement_in_class_with_comments(self):
class_node = astroid.extract_node(
"""
class SomeClient(object):
'''docstring
.. admonition:: Example:
This is Example content.
Should support multi-line.
Can also include file:
.. literalinclude:: ../samples/sample_detect_language.py
'''
def __init__(self):
pass
"""
)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="docstring-admonition-needs-newline", node=class_node
)
):
self.checker.visit_classdef(class_node)
class TestCheckNamingMismatchGeneratedCode(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.CheckNamingMismatchGeneratedCode
def test_import_naming_mismatch_violation(self):
import_one = astroid.extract_node(
'import Something'
)
import_two = astroid.extract_node(
'import Something2 as SomethingTwo'
)
assign_one = astroid.extract_node(
"""
__all__ =(
"Something",
"SomethingTwo",
)
"""
)
module_node = astroid.Module(name = "node", file="__init__.py", doc = """ """)
module_node.body = [import_one,import_two,assign_one]
for name in module_node.body[-1].assigned_stmts():
err_node = name.elts[1]
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="naming-mismatch", node=err_node ,confidence=None
)
):
self.checker.visit_module(module_node)
def test_import_from_naming_mismatch_violation(self):
import_one = astroid.extract_node(
'import Something'
)
import_two = astroid.extract_node(
'from Something2 import SomethingToo as SomethingTwo'
)
assign_one = astroid.extract_node(
"""
__all__ =(
"Something",
"SomethingTwo",
)
"""
)
module_node = astroid.Module(name = "node", file="__init__.py", doc = """ """)
module_node.body = [import_one,import_two,assign_one]
for name in module_node.body[-1].assigned_stmts():
err_node = name.elts[1]
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="naming-mismatch", node=err_node ,confidence=None
)
):
self.checker.visit_module(module_node)
def test_naming_mismatch_acceptable(self):
import_one = astroid.extract_node(
'import Something'
)
import_two = astroid.extract_node(
'import Something2 as SomethingTwo'
)
assign_one = astroid.extract_node(
"""
__all__ =(
"Something",
"Something2",
)
"""
)
module_node = astroid.Module(name = "node", file="__init__.py", doc = """ """)
module_node.body = [import_one,import_two,assign_one]
with self.assertNoMessages():
self.checker.visit_module(module_node)
def test_naming_mismatch_pylint_disable(self):
file = open("./test_files/__init__.py")
node = astroid.parse(file.read())
file.close()
with self.assertNoMessages():
self.checker.visit_module(node)
def test_guidelines_link_active(self):
url = "https://github.com/Azure/autorest/blob/main/docs/generate/built-in-directives.md"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
class TestCheckEnum(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.CheckEnum
def test_ignore_normal_class(self):
class_node = astroid.extract_node(
"""
class SomeClient(object):
my_list = []
"""
)
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_enum_capitalized_violation_python_two(self):
class_node = astroid.extract_node(
"""
from enum import Enum
from six import with_metaclass
from azure.core import CaseInsensitiveEnumMeta
class MyBadEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
One = "one"
"""
)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="enum-must-be-uppercase", node=class_node.body[0].targets[0]
)
):
self.checker.visit_classdef(class_node)
def test_enum_capitalized_violation_python_three(self):
class_node = astroid.extract_node(
"""
from enum import Enum
from azure.core import CaseInsensitiveEnumMeta
class MyBadEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):
One = "one"
"""
)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="enum-must-be-uppercase", node=class_node.body[0].targets[0]
)
):
self.checker.visit_classdef(class_node)
def test_inheriting_case_insensitive_violation(self):
class_node = astroid.extract_node(
"""
from enum import Enum
class MyGoodEnum(str, Enum):
ONE = "one"
"""
)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="enum-must-inherit-case-insensitive-enum-meta", node=class_node )
):
self.checker.visit_classdef(class_node)
def test_acceptable_python_three(self):
class_node = astroid.extract_node(
"""
from enum import Enum
from azure.core import CaseInsensitiveEnumMeta
class MyGoodEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):
ONE = "one"
"""
)
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_enum_file_acceptable_python_two(self):
file = open("./test_files/enum_checker_acceptable.py")
node = astroid.parse(file.read())
file.close()
with self.assertNoMessages():
self.checker.visit_classdef(node.body[3])
def test_enum_file_both_violation(self):
file = open("./test_files/enum_checker_violation.py")
node = astroid.parse(file.read())
file.close()
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="enum-must-inherit-case-insensitive-enum-meta", node=node.body[1]
),
pylint.testutils.Message(
msg_id="enum-must-be-uppercase", node=node.body[1].body[0].targets[0]
)
):
self.checker.visit_classdef(node.body[1])
def test_guidelines_link_active(self):
self._create_url_pipeline("https://azure.github.io/azure-sdk/python_design.html#enumerations")
self._create_url_pipeline("https://azure.github.io/azure-sdk/python_implementation.html#extensible-enumerations")
def _create_url_pipeline(self,url):
resp = requests.get(url)
assert resp.status_code == 200
class TestCheckAPIVersion(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = checker.CheckAPIVersion
def test_api_version_violation(self):
class_node = astroid.extract_node(
"""
class SomeClient(object):
'''
:param str something: something
'''
def __init__(self, something, **kwargs):
pass
"""
)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-accepts-api-version-keyword", node=class_node
)
):
self.checker.visit_classdef(class_node)
def test_api_version_acceptable(self):
class_node = astroid.extract_node(
"""
class SomeClient(object):
'''
:param str something: something
:keyword str api_version: api_version
'''
def __init__(self, something, **kwargs):
pass
"""
)
with self.assertNoMessages():
self.checker.visit_classdef(class_node)
def test_api_version_file_class_acceptable(self):
file = open("./test_files/api_version_checker_acceptable_class.py")
node = astroid.parse(file.read())
file.close()
with self.assertNoMessages():
self.checker.visit_classdef(node.body[0])
def test_api_version_file_init_acceptable(self):
file = open("./test_files/api_version_checker_acceptable_init.py")
node = astroid.parse(file.read())
file.close()
with self.assertNoMessages():
self.checker.visit_classdef(node.body[0])
def test_api_version_file_violation(self):
file = open("./test_files/api_version_checker_violation.py")
node = astroid.parse(file.read())
file.close()
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id="client-accepts-api-version-keyword", node=node.body[0]
)
):
self.checker.visit_classdef(node.body[0])
def test_guidelines_link_active(self):
url = "https://azure.github.io/azure-sdk/python_design.html#specifying-the-service-version"
config = Configuration()
client = PipelineClient(url, config=config)
request = client.get(url)
response = client._pipeline.run(request)
assert response.http_response.status_code == 200
| 37.323124 | 121 | 0.59162 | 11,226 | 110,887 | 5.539462 | 0.040175 | 0.064645 | 0.066639 | 0.048644 | 0.928907 | 0.919532 | 0.884524 | 0.870341 | 0.85479 | 0.84318 | 0 | 0.001876 | 0.31257 | 110,887 | 2,970 | 122 | 37.33569 | 0.813924 | 0.001281 | 0 | 0.774691 | 0 | 0.016755 | 0.356083 | 0.082456 | 0 | 0 | 0 | 0 | 0.076279 | 1 | 0.07672 | false | 0.091711 | 0.017196 | 0 | 0.125661 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
37da8861fd16686bd3b3fbac34379667d3f13f34 | 148 | py | Python | python/fix_float.py | ruddfawcett/spotify-data | e0c4dd127fbcdefd0fa4b706ec6bd5a39f433942 | [
"MIT"
] | null | null | null | python/fix_float.py | ruddfawcett/spotify-data | e0c4dd127fbcdefd0fa4b706ec6bd5a39f433942 | [
"MIT"
] | null | null | null | python/fix_float.py | ruddfawcett/spotify-data | e0c4dd127fbcdefd0fa4b706ec6bd5a39f433942 | [
"MIT"
] | null | null | null | import pandas as pd
data = pd.read_csv('billboard_spotify_matched_data.csv')
data.to_csv('billboard_spotify_matched_data.csv', float_format='%.6f')
| 37 | 70 | 0.810811 | 24 | 148 | 4.625 | 0.583333 | 0.216216 | 0.342342 | 0.468468 | 0.594595 | 0.594595 | 0 | 0 | 0 | 0 | 0 | 0.007194 | 0.060811 | 148 | 3 | 71 | 49.333333 | 0.791367 | 0 | 0 | 0 | 0 | 0 | 0.486486 | 0.459459 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
5303d1048aa0189ea979b69c127aa74c8d3ce058 | 57,948 | py | Python | db/create_db.py | BloomTech-Labs/airbnb-unit-price-analysis-ds | f9639cfd1dc696c6ef5fb899895147763218b972 | [
"MIT"
] | 1 | 2020-05-30T19:13:11.000Z | 2020-05-30T19:13:11.000Z | db/create_db.py | BloomTech-Labs/airbnb-unit-price-analysis-ds | f9639cfd1dc696c6ef5fb899895147763218b972 | [
"MIT"
] | 2 | 2019-09-27T18:10:14.000Z | 2021-04-30T20:50:41.000Z | db/create_db.py | BloomTech-Labs/airbnb-unit-price-analysis-ds | f9639cfd1dc696c6ef5fb899895147763218b972 | [
"MIT"
] | 1 | 2019-09-17T21:03:01.000Z | 2019-09-17T21:03:01.000Z | '''
dbname – the database name (database is a deprecated alias)
user – user name used to authenticate
password – password used to authenticate
host – database host address (defaults to UNIX socket if not provided)
port – connection port number (defaults to 5432 if not provided)
'''
import psycopg2
from dotenv import load_dotenv, find_dotenv
from settings import DB_NAME, DB_USERNAME, DB_PASSWORD, DB_HOST
import pandas as pd
import numpy as np
from urllib.request import urlretrieve
REMOVE_COLS = ['scrape_id','last_scraped','smart_location','calendar_last_scraped']
HOST_COLS = ['host_id','host_url','host_name','host_since',
'host_location','host_about','host_response_time','host_response_rate',
'host_acceptance_rate','host_is_superhost','host_thumbnail_url',
'host_picture_url','host_neighbourhood','host_listings_count',
'host_total_listings_count','host_verifications',
'host_has_profile_pic','host_identity_verified']
lists = ['http://data.insideairbnb.com/united-states/nc/asheville/2019-07-25/data/listings.csv',
'http://data.insideairbnb.com/united-states/nc/asheville/2019-06-26/data/listings.csv',
'http://data.insideairbnb.com/united-states/nc/asheville/2019-05-26/data/listings.csv',
'http://data.insideairbnb.com/united-states/nc/asheville/2019-04-23/data/listings.csv',
'http://data.insideairbnb.com/united-states/nc/asheville/2019-03-25/data/listings.csv',
'http://data.insideairbnb.com/united-states/nc/asheville/2019-02-17/data/listings.csv',
'http://data.insideairbnb.com/united-states/nc/asheville/2019-01-27/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nc/asheville/2018-12-16/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nc/asheville/2018-11-20/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nc/asheville/2018-10-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nc/asheville/2018-09-16/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nc/asheville/2018-08-21/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nc/asheville/2018-07-29/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nc/asheville/2018-04-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nc/asheville/2016-04-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/tx/austin/2019-07-12/data/listings.csv',
'http://data.insideairbnb.com/united-states/tx/austin/2019-06-08/data/listings.csv',
'http://data.insideairbnb.com/united-states/tx/austin/2019-05-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/tx/austin/2019-04-11/data/listings.csv',
'http://data.insideairbnb.com/united-states/tx/austin/2019-03-08/data/listings.csv',
'http://data.insideairbnb.com/united-states/tx/austin/2019-02-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/tx/austin/2019-01-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2018-12-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2018-11-12/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2018-10-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2018-09-12/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2018-08-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2018-07-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2018-05-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2018-04-12/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2017-03-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2015-11-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tx/austin/2015-05-23/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/boston/2019-07-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/boston/2019-06-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/boston/2019-05-19/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/boston/2019-04-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/boston/2019-03-12/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/boston/2019-02-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/boston/2019-01-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2018-12-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2018-11-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2018-10-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2018-09-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2018-08-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2018-07-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2018-05-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2018-04-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2017-10-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2016-09-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/boston/2015-10-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/fl/broward-county/2019-08-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/fl/broward-county/2019-07-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/fl/broward-county/2019-06-13/data/listings.csv',
'http://data.insideairbnb.com/united-states/fl/broward-county/2019-05-19/data/listings.csv',
'http://data.insideairbnb.com/united-states/fl/broward-county/2019-04-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/fl/broward-county/2019-03-11/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/cambridge/2019-08-28/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/cambridge/2019-07-20/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/cambridge/2019-06-24/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/cambridge/2019-05-23/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/cambridge/2019-04-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/cambridge/2019-03-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/cambridge/2019-02-13/data/listings.csv',
'http://data.insideairbnb.com/united-states/ma/cambridge/2019-01-22/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/cambridge/2018-12-15/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ma/cambridge/2018-11-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/il/chicago/2019-07-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/il/chicago/2019-06-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/il/chicago/2019-05-19/data/listings.csv',
'http://data.insideairbnb.com/united-states/il/chicago/2019-04-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/il/chicago/2019-03-12/data/listings.csv',
'http://data.insideairbnb.com/united-states/il/chicago/2019-02-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/il/chicago/2019-01-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/il/chicago/2018-12-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/il/chicago/2018-11-15/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/il/chicago/2018-10-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/il/chicago/2018-09-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/il/chicago/2018-08-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/il/chicago/2018-07-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/il/chicago/2018-05-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/il/chicago/2018-04-15/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/il/chicago/2017-05-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/il/chicago/2015-10-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2019-08-28/data/listings.csv',
'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2019-07-20/data/listings.csv',
'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2019-06-25/data/listings.csv',
'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2019-05-23/data/listings.csv',
'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2019-04-20/data/listings.csv',
'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2019-03-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2019-02-13/data/listings.csv',
'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2019-01-22/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2018-12-15/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2018-11-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2018-10-25/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2018-09-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/nv/clark-county-nv/2018-08-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/oh/columbus/2019-08-19/data/listings.csv',
'http://data.insideairbnb.com/united-states/oh/columbus/2019-07-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/oh/columbus/2019-06-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/oh/columbus/2019-05-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/oh/columbus/2019-04-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/oh/columbus/2019-03-12/data/listings.csv',
'http://data.insideairbnb.com/united-states/oh/columbus/2019-02-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/oh/columbus/2019-01-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/oh/columbus/2018-12-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/oh/columbus/2018-11-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/oh/columbus/2018-10-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/oh/columbus/2018-09-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/oh/columbus/2018-08-16/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/oh/columbus/2018-07-17/data/listings.csv',
'http://data.insideairbnb.com/united-states/co/denver/2019-07-28/data/listings.csv',
'http://data.insideairbnb.com/united-states/co/denver/2019-06-29/data/listings.csv',
'http://data.insideairbnb.com/united-states/co/denver/2019-05-29/data/listings.csv',
'http://data.insideairbnb.com/united-states/co/denver/2019-04-27/data/listings.csv',
'http://data.insideairbnb.com/united-states/co/denver/2019-03-28/data/listings.csv',
'http://data.insideairbnb.com/united-states/co/denver/2019-02-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/co/denver/2019-01-29/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/co/denver/2018-12-21/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/co/denver/2018-11-22/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/co/denver/2018-10-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/co/denver/2018-09-21/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/co/denver/2018-08-27/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/co/denver/2018-07-31/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/co/denver/2018-04-21/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/co/denver/2017-11-24/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/co/denver/2016-05-16/data/listings.csv',
'http://data.insideairbnb.com/united-states/hi/hawaii/2019-08-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/hi/hawaii/2019-07-08/data/listings.csv',
'http://data.insideairbnb.com/united-states/hi/hawaii/2019-06-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/hi/hawaii/2019-05-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/hi/hawaii/2019-04-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/hi/hawaii/2019-03-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/hi/hawaii/2019-02-02/data/listings.csv',
'http://data.insideairbnb.com/united-states/hi/hawaii/2019-01-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/hi/hawaii/2018-12-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/hi/hawaii/2018-11-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/hi/hawaii/2018-10-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/nj/jersey-city/2019-08-31/data/listings.csv',
'http://data.insideairbnb.com/united-states/nj/jersey-city/2019-07-29/data/listings.csv',
'http://data.insideairbnb.com/united-states/nj/jersey-city/2019-06-29/data/listings.csv',
'http://data.insideairbnb.com/united-states/nj/jersey-city/2019-05-29/data/listings.csv',
'http://data.insideairbnb.com/united-states/nj/jersey-city/2019-04-27/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/los-angeles/2019-07-08/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/los-angeles/2019-06-04/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/los-angeles/2019-05-05/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/los-angeles/2019-04-05/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/los-angeles/2019-03-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/los-angeles/2019-02-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/los-angeles/2019-01-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2018-12-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2018-11-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2018-10-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2018-09-08/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2018-08-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2018-07-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2018-06-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2018-05-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2018-04-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2017-05-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2017-04-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2017-03-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2016-08-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2016-07-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2016-05-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2016-01-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2015-11-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2015-09-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2015-07-25/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/los-angeles/2015-05-24/data/listings.csv',
'http://data.insideairbnb.com/united-states/tn/nashville/2019-07-13/data/listings.csv',
'http://data.insideairbnb.com/united-states/tn/nashville/2019-06-11/data/listings.csv',
'http://data.insideairbnb.com/united-states/tn/nashville/2019-05-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/tn/nashville/2019-04-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/tn/nashville/2019-03-11/data/listings.csv',
'http://data.insideairbnb.com/united-states/tn/nashville/2019-02-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/tn/nashville/2019-01-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2018-12-12/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2018-11-15/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2018-10-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2018-09-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2018-08-16/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2018-07-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2018-05-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2018-04-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2017-09-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2016-09-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2015-10-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2015-09-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/tn/nashville/2015-06-22/data/listings.csv',
'http://data.insideairbnb.com/united-states/la/new-orleans/2019-08-08/data/listings.csv',
'http://data.insideairbnb.com/united-states/la/new-orleans/2019-07-08/data/listings.csv',
'http://data.insideairbnb.com/united-states/la/new-orleans/2019-06-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/la/new-orleans/2019-05-05/data/listings.csv',
'http://data.insideairbnb.com/united-states/la/new-orleans/2019-04-05/data/listings.csv',
'http://data.insideairbnb.com/united-states/la/new-orleans/2019-03-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/la/new-orleans/2019-02-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/la/new-orleans/2019-01-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-12-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-11-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-10-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-09-08/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-08-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-07-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-05-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-04-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-03-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-02-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2018-01-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-12-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-11-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-10-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-09-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-08-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-07-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-06-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-05-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-04-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-04-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-03-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-02-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2017-01-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2016-12-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2016-11-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2016-10-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2016-09-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2016-08-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2016-07-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2016-06-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2016-05-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2016-04-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2016-02-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2015-09-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/la/new-orleans/2015-06-05/data/listings.csv',
'http://data.insideairbnb.com/united-states/ny/new-york-city/2019-08-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/ny/new-york-city/2019-07-08/data/listings.csv',
'http://data.insideairbnb.com/united-states/ny/new-york-city/2019-06-02/data/listings.csv',
'http://data.insideairbnb.com/united-states/ny/new-york-city/2019-05-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/ny/new-york-city/2019-04-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/ny/new-york-city/2019-03-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/ny/new-york-city/2019-02-01/data/listings.csv',
'http://data.insideairbnb.com/united-states/ny/new-york-city/2019-01-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-12-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-11-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-10-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-09-08/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-08-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-07-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-06-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-05-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-04-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-03-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-02-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-01-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2018-01-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-12-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-11-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-10-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-09-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-08-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-07-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-06-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-05-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-04-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-03-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-02-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2017-01-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-12-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-11-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-10-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-09-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-08-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-07-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-06-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-05-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-04-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-02-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2016-01-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2015-12-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2015-11-20/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2015-11-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2015-10-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2015-09-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2015-08-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2015-06-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2015-05-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2015-03-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ny/new-york-city/2015-01-01/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/oakland/2019-07-13/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/oakland/2019-06-13/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/oakland/2019-05-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/oakland/2019-04-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/oakland/2019-03-11/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/oakland/2019-02-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/oakland/2019-01-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/oakland/2018-12-12/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/oakland/2018-11-15/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/oakland/2018-10-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/oakland/2018-09-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/oakland/2018-08-16/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/oakland/2018-07-16/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/oakland/2018-05-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/oakland/2018-04-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/oakland/2016-05-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/oakland/2015-06-22/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/pacific-grove/2019-07-31/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/pacific-grove/2019-07-01/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/pacific-grove/2019-05-31/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/pacific-grove/2019-05-01/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/pacific-grove/2019-03-31/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/pacific-grove/2019-02-26/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/pacific-grove/2019-01-31/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/pacific-grove/2018-12-22/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/pacific-grove/2018-11-27/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/pacific-grove/2018-10-25/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/pacific-grove/2018-09-29/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/pacific-grove/2018-08-30/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/pacific-grove/2018-07-31/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/pacific-grove/2018-04-22/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/portland/2019-07-10/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/portland/2019-06-07/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/portland/2019-05-13/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/portland/2019-04-10/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/portland/2019-03-08/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/portland/2019-02-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/portland/2019-01-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2018-12-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2018-11-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2018-10-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2018-09-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2018-08-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2018-07-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2018-05-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2018-04-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2018-02-08/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2018-01-16/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-12-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-11-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-10-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-09-12/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-08-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-07-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-06-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-05-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-04-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-03-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-02-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2017-01-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-12-08/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-11-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-10-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-09-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-08-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-07-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-06-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-05-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-04-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-02-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2016-01-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2015-12-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2015-11-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2015-10-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2015-09-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2015-05-12/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/portland/2015-03-01/data/listings.csv',
'http://data.insideairbnb.com/united-states/ri/rhode-island/2019-07-30/data/listings.csv',
'http://data.insideairbnb.com/united-states/ri/rhode-island/2019-06-30/data/listings.csv',
'http://data.insideairbnb.com/united-states/ri/rhode-island/2019-05-30/data/listings.csv',
'http://data.insideairbnb.com/united-states/ri/rhode-island/2019-04-30/data/listings.csv',
'http://data.insideairbnb.com/united-states/ri/rhode-island/2019-03-31/data/listings.csv',
'http://data.insideairbnb.com/united-states/ri/rhode-island/2019-02-19/data/listings.csv',
'http://data.insideairbnb.com/united-states/ri/rhode-island/2019-01-30/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ri/rhode-island/2018-12-22/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ri/rhode-island/2018-11-27/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ri/rhode-island/2018-10-21/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/salem-or/2019-08-17/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/salem-or/2019-07-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/salem-or/2019-06-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/salem-or/2019-05-11/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/salem-or/2019-04-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/salem-or/2019-03-07/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/salem-or/2019-02-05/data/listings.csv',
'http://data.insideairbnb.com/united-states/or/salem-or/2019-01-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/salem-or/2018-12-08/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/salem-or/2018-11-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/salem-or/2018-10-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/salem-or/2018-09-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/or/salem-or/2018-08-10/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-diego/2019-08-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-diego/2019-07-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-diego/2019-06-13/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-diego/2019-05-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-diego/2019-04-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-diego/2019-03-11/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-diego/2019-02-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-diego/2019-01-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-diego/2018-12-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-diego/2018-11-15/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-diego/2018-10-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-diego/2018-09-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-diego/2018-08-16/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-diego/2018-07-16/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-diego/2018-05-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-diego/2018-04-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-diego/2016-07-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-diego/2015-06-22/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-francisco/2019-08-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-francisco/2019-07-08/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-francisco/2019-06-02/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-francisco/2019-05-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-francisco/2019-04-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-francisco/2019-03-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-francisco/2019-02-01/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-francisco/2019-01-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-12-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-11-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-10-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-09-08/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-08-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-07-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-05-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-04-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-03-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-02-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-01-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2018-01-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-12-07/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-12-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-11-08/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-11-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-10-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-09-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-08-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-07-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-06-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-05-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-04-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-03-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-02-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2017-01-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2016-12-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2016-11-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2016-10-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2016-09-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2016-08-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2016-07-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2016-06-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2016-05-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2016-04-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2016-02-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2015-12-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2015-11-01/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2015-09-02/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/san-francisco/2015-05-04/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/san-mateo-county/2019-06-05/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2019-07-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2019-06-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2019-05-13/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2019-04-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2019-03-07/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2019-02-05/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2019-01-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2018-12-08/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2018-11-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2018-10-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2018-09-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2018-08-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-clara-county/2018-07-07/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2019-07-30/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2019-06-30/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2019-05-30/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2019-04-30/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2019-03-30/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2019-02-19/data/listings.csv',
'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2019-01-30/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2018-12-22/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2018-11-26/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2018-10-20/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2018-09-28/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2018-08-30/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2018-07-31/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2018-04-22/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/ca/santa-cruz-county/2015-10-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/wa/seattle/2019-08-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/wa/seattle/2019-07-14/data/listings.csv',
'http://data.insideairbnb.com/united-states/wa/seattle/2019-06-13/data/listings.csv',
'http://data.insideairbnb.com/united-states/wa/seattle/2019-05-18/data/listings.csv',
'http://data.insideairbnb.com/united-states/wa/seattle/2019-04-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/wa/seattle/2019-03-11/data/listings.csv',
'http://data.insideairbnb.com/united-states/wa/seattle/2019-02-09/data/listings.csv',
'http://data.insideairbnb.com/united-states/wa/seattle/2019-01-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/wa/seattle/2018-12-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/wa/seattle/2018-11-15/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/wa/seattle/2018-10-11/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/wa/seattle/2018-09-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/wa/seattle/2018-08-16/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/wa/seattle/2018-07-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/wa/seattle/2018-05-17/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/wa/seattle/2018-04-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/wa/seattle/2016-01-04/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/wa/seattle/2015-06-22/data/listings.csv',
'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2019-08-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2019-07-08/data/listings.csv',
'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2019-06-02/data/listings.csv',
'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2019-05-05/data/listings.csv',
'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2019-04-03/data/listings.csv',
'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2019-03-06/data/listings.csv',
'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2019-02-01/data/listings.csv',
'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2019-01-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2018-12-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2018-11-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2018-10-03/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2018-09-08/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2018-08-06/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2018-07-05/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2018-05-09/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/mn/twin-cities-msa/2018-04-07/data/listings.csv',
'http://data.insideairbnb.com/united-states/dc/washington-dc/2019-07-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/dc/washington-dc/2019-06-16/data/listings.csv',
'http://data.insideairbnb.com/united-states/dc/washington-dc/2019-05-20/data/listings.csv',
'http://data.insideairbnb.com/united-states/dc/washington-dc/2019-04-15/data/listings.csv',
'http://data.insideairbnb.com/united-states/dc/washington-dc/2019-03-12/data/listings.csv',
'http://data.insideairbnb.com/united-states/dc/washington-dc/2019-02-11/data/listings.csv',
'http://data.insideairbnb.com/united-states/dc/washington-dc/2019-01-17/data/listings.csv']
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2018-12-13/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2018-11-15/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2018-10-12/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2018-09-14/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2018-08-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2018-07-20/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2018-05-18/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2018-04-15/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2017-05-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2017-03-10/data/listings.csv',
# 'http://data.insideairbnb.com/united-states/dc/washington-dc/2015-10-03/data/listings.csv']
def series_to_bool(series):
series = series.replace('t',True).replace('f',False)
return series
def clean_strs(series):
series = series.str.strip('$').str.replace(',','')
return series.astype(float)
i = 191
while i < len(lists):
url = urlretrieve(lists[i],f'l{i}.csv')
listing = pd.read_csv(f'l{i}.csv')
listing = listing.drop(REMOVE_COLS,axis=1)
listing = listing.drop(HOST_COLS,axis=1)
listing.rename(columns={'listing_url':'url'},inplace=True)
listing['zipcode'] = (listing['zipcode'].str.split('\n',expand=True)[0]).str.split('-',expand=True)[0]
#listing['zipcode'] = (listing['zipcode'].str.split('NY ',expand=True)[1])
listing['neighbourhood'] = 0
listing['neighbourhood_cleansed'] = 0
listing['neighbourhood_group_cleansed'] = 0
listing = listing.loc[(listing['zipcode'] != ' ') & (listing['zipcode'] != 'CA')]
listing['zipcode'] = listing['zipcode'].replace('MN 55404','55404')
listing['zipcode'] = listing['zipcode'].replace('CA 94158','94158')
listing['zipcode'] = listing['zipcode'].replace('CA 94103','94103')
listing['zipcode'] = listing['zipcode'].replace('CA 94108','94108')
listing['zipcode'] = listing['zipcode'].replace('CA 94115','94115')
listing['zipcode'] = listing['zipcode'].replace('CA 94109','94109')
listing['zipcode'] = listing['zipcode'].replace('CA 94105','94105')
listing['zipcode'] = listing['zipcode'].replace('CA 94102','94102')
listing['zipcode'] = listing['zipcode'].replace('CA ',' ')
listing['zipcode'] = listing['zipcode'].replace('',0)
listing['zipcode'] = listing['zipcode'].fillna(0).astype(float)
listing['is_location_exact'] = series_to_bool(listing['is_location_exact'])
listing['require_guest_profile_picture'] = series_to_bool(listing['require_guest_profile_picture'])
listing['require_guest_phone_verification'] = series_to_bool(listing['require_guest_phone_verification'])
listing['is_business_travel_ready'] = series_to_bool(listing['is_business_travel_ready'])
listing['has_availability'] = series_to_bool(listing['has_availability'])
listing['requires_license'] = series_to_bool(listing['requires_license'])
listing['instant_bookable'] = series_to_bool(listing['instant_bookable'])
listing['price'] = clean_strs(listing['price'])
listing['weekly_price'] = clean_strs(listing['weekly_price'])
listing['monthly_price'] = clean_strs(listing['monthly_price'])
listing['security_deposit'] = clean_strs(listing['security_deposit'])
listing['extra_people'] = clean_strs(listing['extra_people'])
listing['cleaning_fee'] = clean_strs(listing['cleaning_fee'])
if listing['cleaning_fee'].dtype == 'object':
listing['cleaning_fee'] = listing['cleaning_fee'].str.strip('$').str.strip(',').astype(float)
listing.drop('license',axis=1,inplace=True)
listing.drop('jurisdiction_names',axis=1,inplace=True)
listing['neighbourhood'].fillna(0)
listing['neighbourhood_group_cleansed'].fillna(0)
listing['square_feet'].fillna(0)
for col in listing.columns:
if 'url' in col:
listing[col] = listing[str(col)].astype(str)
for col in listing.columns:
if listing[str(col)].dtype == 'object':
listing[col].fillna('',inplace=True)
else:
listing[str(col)].fillna(0,inplace=True)
connection = psycopg2.connect(dbname="postgres",
user="ridley",
password="sAYD6HJETeGHy7a9",
host='airbnb.c38lxrcn63qf.us-west-1.rds.amazonaws.com',
port=5432)
cursor=connection.cursor()
sql = """INSERT INTO listing (id,
url,
name,
summary,
space,
description,
experiences_offered,
neighborhood_overview,
notes,
transit,
access,
interaction,
house_rules,
thumbnail_url,
medium_url,
picture_url,
xl_picture_url,
street,
neighbourhood,
neighbourhood_cleansed,
neighbourhood_group_cleansed,
city,
state,
zipcode,
market,
country_code,
country,
latitude,
longitude,
is_location_exact,
property_type,
room_type,
accommodates,
bathrooms,
bedrooms,
beds,
bed_type,
amenities,
square_feet,
price,
weekly_price,
monthly_price,
security_deposit,
cleaning_fee,
guests_included,
extra_people,
minimum_nights,
maximum_nights,
minimum_minimum_nights,
maximum_minimum_nights,
minimum_maximum_nights,
maximum_maximum_nights,
minimum_nights_avg_ntm,
maximum_nights_avg_ntm,
calendar_updated,
has_availability,
availability_30,
availability_60,
availability_90,
availability_365,
number_of_reviews,
number_of_reviews_ltm,
first_review,
last_review,
review_scores_rating,
review_scores_accuracy,
review_scores_cleanliness,
review_scores_checkin,
review_scores_communication,
review_scores_location,
review_scores_value,
requires_license,
instant_bookable,
is_business_travel_ready,
cancellation_policy,
require_guest_profile_picture,
require_guest_phone_verification,
calculated_host_listings_count,
calculated_host_listings_count_entire_homes,
calculated_host_listings_count_private_rooms,
calculated_host_listings_count_shared_rooms,
reviews_per_month)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
master_list = []
for row in listing.itertuples(index=False):
master_list.append(row)
if len(master_list) != 0:
cursor.executemany(sql, master_list)
connection.commit()
cursor.close()
i+=1 | 79.27223 | 265 | 0.724581 | 8,791 | 57,948 | 4.751678 | 0.038562 | 0.102461 | 0.256152 | 0.294575 | 0.911328 | 0.902734 | 0.889017 | 0.889017 | 0.889017 | 0.88782 | 0 | 0.082511 | 0.078087 | 57,948 | 731 | 266 | 79.27223 | 0.699306 | 0.532581 | 0 | 0.005479 | 0 | 0.545205 | 0.832542 | 0.046333 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005479 | false | 0.005479 | 0.016438 | 0 | 0.027397 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
5332d8488aa63acfff130c8811d40fb961b681f4 | 2,804 | py | Python | munimap/grid/refs_test.py | MrSnyder/bielefeldGEOCLIENT | 17c78b43fc2055d23a1bc4b5091da164756bf767 | [
"Apache-2.0"
] | 2 | 2022-02-07T13:20:45.000Z | 2022-02-14T21:40:06.000Z | munimap/grid/refs_test.py | MrSnyder/bielefeldGEOCLIENT | 17c78b43fc2055d23a1bc4b5091da164756bf767 | [
"Apache-2.0"
] | 4 | 2021-06-17T07:53:53.000Z | 2021-12-17T10:55:48.000Z | munimap/grid/refs_test.py | MrSnyder/bielefeldGEOCLIENT | 17c78b43fc2055d23a1bc4b5091da164756bf767 | [
"Apache-2.0"
] | 2 | 2021-06-01T09:41:55.000Z | 2022-02-14T17:33:33.000Z | from nose.tools import eq_
from .refs import is_adjacent, group_refs, reduce_refs
def test_is_adjecent():
assert is_adjacent((0, 0), (0, 0))
assert is_adjacent((0, 0), (-1, 0))
assert is_adjacent((0, 0), (0, -1))
assert is_adjacent((0, 0), (1, 0))
assert is_adjacent((0, 0), (0, 1))
assert not is_adjacent((0, 0), (1, 1))
assert not is_adjacent((0, 0), (-1, -1))
assert is_adjacent((0, 0), (0, 0))
assert is_adjacent((-1, 0), (0, 0))
assert is_adjacent((0, -1), (0, 0))
assert is_adjacent((1, 0), (0, 0))
assert is_adjacent((0, 1), (0, 0))
assert not is_adjacent((1, 1), (0, 0))
assert not is_adjacent((-1, -1), (0, 0))
assert not is_adjacent((0, 0), (-2, 0))
assert not is_adjacent((0, 0), (0, -2))
assert not is_adjacent((0, 0), (2, 0))
assert not is_adjacent((0, 0), (0, 2))
assert not is_adjacent((0, 0), (2, 2))
assert not is_adjacent((0, 0), (-2, -2))
def test_grouped():
eq_(group_refs([(0, 0), (0, 1)]),
[[(0, 0), (0, 1)]])
eq_(group_refs([(0, 0), (0, 2)]),
[[(0, 0)], [(0, 2)]])
eq_(group_refs([(0, 0), (0, 2), (0, 1)]),
[[(0, 0), (0, 1), (0, 2)]])
eq_(group_refs([(0, 0), (1, 2), (0, 1), (-1, 0), (2, 2)]),
[[(0, 0), (0, 1), (-1, 0)], [(1, 2), (2, 2)]])
def test_reduce_refs():
# X
eq_(reduce_refs([(0, 0)]),
[(0, 0)])
# XX
# left to right
eq_(reduce_refs([(0, 0), (1, 0)]),
[(0, 0), (1, 0)])
# XX
# left to right
eq_(reduce_refs([(1, 0), (0, 0)]),
[(0, 0), (1, 0)])
# XXX
# X
# left, top to right, low
eq_(reduce_refs([(0, 1), (1, 1), (2, 1), (2, 0)]),
[(0, 1), (2, 0)])
# XXX
# X
# left, low to right, top
eq_(reduce_refs([(0, 1), (1, 1), (2, 1), (0, 0)]),
[(0, 0), (2, 1)])
# XXX
# X X
# left, low to right, top
eq_(reduce_refs([(0, 1), (1, 1), (2, 1), (0, 0), (2, 0)]),
[(0, 0), (2, 1)])
# XXX
# X
# left, top to right, low
eq_(reduce_refs([(0, 1), (1, 1), (2, 1), (2, 0)]),
[(0, 1), (2, 0)])
# XX
# XX
# left, top to right, low
eq_(reduce_refs([(0, 1), (1, 1), (1, 0), (2, 0)]),
[(0, 1), (2, 0)])
# XX
# XX
# left, low to right, top
eq_(reduce_refs([(0, 0), (1, 0), (1, 1), (2, 1)]),
[(0, 0), (2, 1)])
# XX
# XX
# X
# left, low to right, top
eq_(reduce_refs([(0, 0), (0, 1), (1, 1), (1, 2), (2, 2)]),
[(0, 0), (2, 2)])
# X
# XX
# X
# left, low to left, top
eq_(reduce_refs([(0, 0), (0, 1), (1, 1), (0, 2)]),
[(0, 0), (0, 2)])
# X
# XX
# XX
# left, middle to right, low
eq_(reduce_refs([(0, 1), (1, 0), (1, 1), (1, 2), (2, 0)]),
[(0, 1), (2, 0)])
| 24.382609 | 62 | 0.42582 | 481 | 2,804 | 2.355509 | 0.058212 | 0.130627 | 0.076787 | 0.148279 | 0.865843 | 0.830538 | 0.799647 | 0.741395 | 0.667255 | 0.61165 | 0 | 0.134912 | 0.307418 | 2,804 | 114 | 63 | 24.596491 | 0.448507 | 0.115193 | 0 | 0.22807 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.350877 | 1 | 0.052632 | true | 0 | 0.035088 | 0 | 0.087719 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
53465a4ffb03be375bc2a391eaf87863b121cf7c | 6,230 | py | Python | tests/components/devolo_home_control/test_config_flow.py | pcaston/core | e74d946cef7a9d4e232ae9e0ba150d18018cfe33 | [
"Apache-2.0"
] | 1 | 2021-07-08T20:09:55.000Z | 2021-07-08T20:09:55.000Z | tests/components/devolo_home_control/test_config_flow.py | pcaston/core | e74d946cef7a9d4e232ae9e0ba150d18018cfe33 | [
"Apache-2.0"
] | 47 | 2021-02-21T23:43:07.000Z | 2022-03-31T06:07:10.000Z | tests/components/devolo_home_control/test_config_flow.py | OpenPeerPower/core | f673dfac9f2d0c48fa30af37b0a99df9dd6640ee | [
"Apache-2.0"
] | null | null | null | """Test the devolo_home_control config flow."""
from unittest.mock import patch
import pytest
from openpeerpower import config_entries, data_entry_flow, setup
from openpeerpower.components.devolo_home_control.const import DEFAULT_MYDEVOLO, DOMAIN
from openpeerpower.config_entries import SOURCE_USER
from .const import (
DISCOVERY_INFO,
DISCOVERY_INFO_WRONG_DEVICE,
DISCOVERY_INFO_WRONG_DEVOLO_DEVICE,
)
from tests.common import MockConfigEntry
async def test_form(opp):
"""Test we get the form."""
await setup.async_setup_component(opp, "persistent_notification", {})
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["step_id"] == "user"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
await _setup(opp, result)
@pytest.mark.credentials_invalid
async def test_form_invalid_credentials_user(opp):
"""Test if we get the error message on invalid credentials."""
await setup.async_setup_component(opp, "persistent_notification", {})
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["step_id"] == "user"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
result = await opp.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
assert result["errors"] == {"base": "invalid_auth"}
async def test_form_already_configured(opp):
"""Test if we get the error message on already configured."""
with patch(
"openpeerpower.components.devolo_home_control.Mydevolo.uuid",
return_value="123456",
):
MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}).add_to_opp(opp)
result = await opp.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={"username": "test-username", "password": "test-password"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_form_advanced_options(opp):
"""Test if we get the advanced options if user has enabled it."""
result = await opp.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER, "show_advanced_options": True},
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"openpeerpower.components.devolo_home_control.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch(
"openpeerpower.components.devolo_home_control.Mydevolo.uuid",
return_value="123456",
):
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "test-password",
"mydevolo_url": "https://test_mydevolo_url.test",
},
)
await opp.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "devolo Home Control"
assert result2["data"] == {
"username": "test-username",
"password": "test-password",
"mydevolo_url": "https://test_mydevolo_url.test",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_zeroconf(opp):
"""Test that the zeroconf confirmation form is served."""
result = await opp.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data=DISCOVERY_INFO,
)
assert result["step_id"] == "zeroconf_confirm"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
await _setup(opp, result)
@pytest.mark.credentials_invalid
async def test_form_invalid_credentials_zeroconf(opp):
"""Test if we get the error message on invalid credentials."""
await setup.async_setup_component(opp, "persistent_notification", {})
result = await opp.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data=DISCOVERY_INFO,
)
assert result["step_id"] == "zeroconf_confirm"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await opp.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
assert result["errors"] == {"base": "invalid_auth"}
async def test_zeroconf_wrong_device(opp):
"""Test that the zeroconf ignores wrong devices."""
result = await opp.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data=DISCOVERY_INFO_WRONG_DEVOLO_DEVICE,
)
assert result["reason"] == "Not a devolo Home Control gateway."
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
result = await opp.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data=DISCOVERY_INFO_WRONG_DEVICE,
)
assert result["reason"] == "Not a devolo Home Control gateway."
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def _setup(opp, result):
"""Finish configuration steps."""
with patch(
"openpeerpower.components.devolo_home_control.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch(
"openpeerpower.components.devolo_home_control.Mydevolo.uuid",
return_value="123456",
):
result2 = await opp.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
await opp.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "devolo Home Control"
assert result2["data"] == {
"username": "test-username",
"password": "test-password",
"mydevolo_url": DEFAULT_MYDEVOLO,
}
assert len(mock_setup_entry.mock_calls) == 1
| 33.858696 | 87 | 0.673515 | 730 | 6,230 | 5.486301 | 0.146575 | 0.068165 | 0.041948 | 0.062921 | 0.819476 | 0.789513 | 0.777278 | 0.775281 | 0.748814 | 0.730087 | 0 | 0.006866 | 0.205136 | 6,230 | 183 | 88 | 34.043716 | 0.801898 | 0.006581 | 0 | 0.681159 | 0 | 0 | 0.214397 | 0.067303 | 0 | 0 | 0 | 0 | 0.202899 | 1 | 0 | false | 0.050725 | 0.050725 | 0 | 0.050725 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
53482600f0b84ad37a695f5dddb794a8ec495808 | 88 | py | Python | policy.py | aayn/policy-iteration-dp | 074634473db397039612367c3b25d765b1dc264a | [
"MIT"
] | null | null | null | policy.py | aayn/policy-iteration-dp | 074634473db397039612367c3b25d765b1dc264a | [
"MIT"
] | null | null | null | policy.py | aayn/policy-iteration-dp | 074634473db397039612367c3b25d765b1dc264a | [
"MIT"
] | null | null | null | import numpy as np
def make_policy(all_states):
return {s: 0 for s in all_states}
| 14.666667 | 37 | 0.715909 | 17 | 88 | 3.529412 | 0.823529 | 0.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014493 | 0.215909 | 88 | 5 | 38 | 17.6 | 0.855072 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
7291abe08781c57a5de7523d403f98c45b79fbbf | 7,131 | py | Python | tests/test_cardano_blocks.py | mathiasfrohlich/blockfrost-python | 4b391a711683732c47fa9183532f14c189b75124 | [
"Apache-2.0"
] | 1 | 2022-01-17T14:26:05.000Z | 2022-01-17T14:26:05.000Z | tests/test_cardano_blocks.py | mathiasfrohlich/blockfrost-python | 4b391a711683732c47fa9183532f14c189b75124 | [
"Apache-2.0"
] | null | null | null | tests/test_cardano_blocks.py | mathiasfrohlich/blockfrost-python | 4b391a711683732c47fa9183532f14c189b75124 | [
"Apache-2.0"
] | null | null | null | from blockfrost import BlockFrostApi, ApiError
from blockfrost.api.cardano.blocks import \
BlockResponse
hash = '4ea1ba291e8eef538635a53e59fddba7810d1679631cc3aed7c8e6c4091a516a'
slot_number = 412162133
epoch_number = 425
def test_block_latest(requests_mock):
api = BlockFrostApi()
mock_data = {
"time": 1641338934,
"height": 15243593,
"hash": hash,
"slot": slot_number,
"epoch": epoch_number,
"epoch_slot": 12,
"slot_leader": "pool1pu5jlj4q9w9jlxeu370a3c9myx47md5j5m2str0naunn2qnikdy",
"size": 3,
"tx_count": 1,
"output": "128314491794",
"fees": "592661",
"block_vrf": "vrf_vk1wf2k6lhujezqcfe00l6zetxpnmh9n6mwhpmhm0dvfh3fxgmdnrfqkms8ty",
"previous_block": "43ebccb3ac72c7cebd0d9b755a4b08412c9f5dcb81b8a0ad1e3c197d29d47b05",
"next_block": "8367f026cf4b03e116ff8ee5daf149b55ba5a6ec6dec04803b8dc317721d15fa",
"confirmations": 4698
}
requests_mock.get(f"{api.url}/blocks/latest", json=mock_data)
mock_object = BlockResponse(**mock_data)
assert api.block_latest() == mock_object
def test_block_latest_transactions(requests_mock):
api = BlockFrostApi()
mock_data = [
"8788591983aa73981fc92d6cddbbe643959f5a784e84b8bee0db15823f575a5b",
"4eef6bb7755d8afbeac526b799f3e32a624691d166657e9d862aaeb66682c036",
"52e748c4dec58b687b90b0b40d383b9fe1f24c1a833b7395cdf07dd67859f46f",
"e8073fd5318ff43eca18a852527166aa8008bee9ee9e891f585612b7e4ba700b"
]
requests_mock.get(f"{api.url}/blocks/latest/txs", json=mock_data)
mock_object = mock_data
assert api.block_latest_transactions() == mock_object
def test_block(requests_mock):
api = BlockFrostApi()
mock_data = {
"time": 1641338934,
"height": 15243593,
"hash": hash,
"slot": slot_number,
"epoch": epoch_number,
"epoch_slot": 12,
"slot_leader": "pool1pu5jlj4q9w9jlxeu370a3c9myx47md5j5m2str0naunn2qnikdy",
"size": 3,
"tx_count": 1,
"output": "128314491794",
"fees": "592661",
"block_vrf": "vrf_vk1wf2k6lhujezqcfe00l6zetxpnmh9n6mwhpmhm0dvfh3fxgmdnrfqkms8ty",
"previous_block": "43ebccb3ac72c7cebd0d9b755a4b08412c9f5dcb81b8a0ad1e3c197d29d47b05",
"next_block": "8367f026cf4b03e116ff8ee5daf149b55ba5a6ec6dec04803b8dc317721d15fa",
"confirmations": 4698
}
requests_mock.get(f"{api.url}/blocks/{hash}", json=mock_data)
mock_object = BlockResponse(**mock_data)
assert api.block(hash_or_number=hash) == mock_object
def test_block_slot(requests_mock):
api = BlockFrostApi()
mock_data = {
"time": 1641338934,
"height": 15243593,
"hash": hash,
"slot": slot_number,
"epoch": epoch_number,
"epoch_slot": 12,
"slot_leader": "pool1pu5jlj4q9w9jlxeu370a3c9myx47md5j5m2str0naunn2qnikdy",
"size": 3,
"tx_count": 1,
"output": "128314491794",
"fees": "592661",
"block_vrf": "vrf_vk1wf2k6lhujezqcfe00l6zetxpnmh9n6mwhpmhm0dvfh3fxgmdnrfqkms8ty",
"previous_block": "43ebccb3ac72c7cebd0d9b755a4b08412c9f5dcb81b8a0ad1e3c197d29d47b05",
"next_block": "8367f026cf4b03e116ff8ee5daf149b55ba5a6ec6dec04803b8dc317721d15fa",
"confirmations": 4698
}
requests_mock.get(f"{api.url}/blocks/slot/{slot_number}", json=mock_data)
mock_object = BlockResponse(**mock_data)
assert api.block_slot(slot_number=slot_number) == mock_object
def test_block_epoch_slot(requests_mock):
api = BlockFrostApi()
mock_data = {
"time": 1641338934,
"height": 15243593,
"hash": hash,
"slot": slot_number,
"epoch": epoch_number,
"epoch_slot": 12,
"slot_leader": "pool1pu5jlj4q9w9jlxeu370a3c9myx47md5j5m2str0naunn2qnikdy",
"size": 3,
"tx_count": 1,
"output": "128314491794",
"fees": "592661",
"block_vrf": "vrf_vk1wf2k6lhujezqcfe00l6zetxpnmh9n6mwhpmhm0dvfh3fxgmdnrfqkms8ty",
"previous_block": "43ebccb3ac72c7cebd0d9b755a4b08412c9f5dcb81b8a0ad1e3c197d29d47b05",
"next_block": "8367f026cf4b03e116ff8ee5daf149b55ba5a6ec6dec04803b8dc317721d15fa",
"confirmations": 4698
}
requests_mock.get(f"{api.url}/blocks/epoch/{epoch_number}/slot/{slot_number}", json=mock_data)
mock_object = BlockResponse(**mock_data)
assert api.block_epoch_slot(epoch_number=epoch_number, slot_number=slot_number) == mock_object
def test_blocks_next(requests_mock):
api = BlockFrostApi()
mock_data = [
{
"time": 1641338934,
"height": 15243593,
"hash": hash,
"slot": slot_number,
"epoch": epoch_number,
"epoch_slot": 12,
"slot_leader": "pool1pu5jlj4q9w9jlxeu370a3c9myx47md5j5m2str0naunn2qnikdy",
"size": 3,
"tx_count": 1,
"output": "128314491794",
"fees": "592661",
"block_vrf": "vrf_vk1wf2k6lhujezqcfe00l6zetxpnmh9n6mwhpmhm0dvfh3fxgmdnrfqkms8ty",
"previous_block": "43ebccb3ac72c7cebd0d9b755a4b08412c9f5dcb81b8a0ad1e3c197d29d47b05",
"next_block": "8367f026cf4b03e116ff8ee5daf149b55ba5a6ec6dec04803b8dc317721d15fa",
"confirmations": 4698
}
]
requests_mock.get(f"{api.url}/blocks/{hash}/next", json=mock_data)
mock_object = [BlockResponse(**data) for data in mock_data]
assert api.blocks_next(hash_or_number=hash) == mock_object
def test_blocks_previous(requests_mock):
api = BlockFrostApi()
mock_data = [
{
"time": 1641338934,
"height": 15243593,
"hash": hash,
"slot": slot_number,
"epoch": epoch_number,
"epoch_slot": 12,
"slot_leader": "pool1pu5jlj4q9w9jlxeu370a3c9myx47md5j5m2str0naunn2qnikdy",
"size": 3,
"tx_count": 1,
"output": "128314491794",
"fees": "592661",
"block_vrf": "vrf_vk1wf2k6lhujezqcfe00l6zetxpnmh9n6mwhpmhm0dvfh3fxgmdnrfqkms8ty",
"previous_block": "43ebccb3ac72c7cebd0d9b755a4b08412c9f5dcb81b8a0ad1e3c197d29d47b05",
"next_block": "8367f026cf4b03e116ff8ee5daf149b55ba5a6ec6dec04803b8dc317721d15fa",
"confirmations": 4698
}
]
requests_mock.get(f"{api.url}/blocks/{hash}/previous", json=mock_data)
mock_object = [BlockResponse(**data) for data in mock_data]
assert api.blocks_previous(hash_or_number=hash) == mock_object
def test_block_transactions(requests_mock):
api = BlockFrostApi()
mock_data = [
"8788591983aa73981fc92d6cddbbe643959f5a784e84b8bee0db15823f575a5b",
"4eef6bb7755d8afbeac526b799f3e32a624691d166657e9d862aaeb66682c036",
"52e748c4dec58b687b90b0b40d383b9fe1f24c1a833b7395cdf07dd67859f46f",
"e8073fd5318ff43eca18a852527166aa8008bee9ee9e891f585612b7e4ba700b"
]
requests_mock.get(f"{api.url}/blocks/{hash}/txs", json=mock_data)
mock_object = mock_data
assert api.block_transactions(hash_or_number=hash) == mock_object
| 39.181319 | 98 | 0.684616 | 565 | 7,131 | 8.366372 | 0.109735 | 0.040618 | 0.026655 | 0.047387 | 0.929977 | 0.917072 | 0.909033 | 0.909033 | 0.883012 | 0.866935 | 0 | 0.219295 | 0.212172 | 7,131 | 181 | 99 | 39.39779 | 0.622108 | 0 | 0 | 0.743902 | 0 | 0 | 0.434862 | 0.32548 | 0 | 0 | 0 | 0 | 0.04878 | 1 | 0.04878 | false | 0 | 0.012195 | 0 | 0.060976 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
f42d626bf0bbd0b195d6198c71e14ba1b041906d | 11,696 | py | Python | intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py | Stienvdh/statrick | 7b092fc42171e226718a70a285a4b323f2f395ad | [
"MIT"
] | null | null | null | intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py | Stienvdh/statrick | 7b092fc42171e226718a70a285a4b323f2f395ad | [
"MIT"
] | null | null | null | intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py | Stienvdh/statrick | 7b092fc42171e226718a70a285a4b323f2f395ad | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Dell EMC OpenManage Ansible Modules
# Version 2.1.2
# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_server_config_profile
from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock
from pytest import importorskip
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
importorskip("omsdk.sdkfile")
importorskip("omsdk.sdkcreds")
class TestServerConfigProfile(FakeAnsibleModule):
module = idrac_server_config_profile
@pytest.fixture
def idrac_server_configure_profile_mock(self, mocker):
omsdk_mock = MagicMock()
idrac_obj = MagicMock()
omsdk_mock.file_share_manager = idrac_obj
omsdk_mock.config_mgr = idrac_obj
return idrac_obj
@pytest.fixture
def idrac_file_manager_server_config_profile_mock(self, mocker):
try:
file_manager_obj = mocker.patch(
MODULE_PATH + 'idrac_server_config_profile.file_share_manager')
except AttributeError:
file_manager_obj = MagicMock()
obj = MagicMock()
file_manager_obj.create_share_obj.return_value = obj
return file_manager_obj
@pytest.fixture
def idrac_connection_server_configure_profile_mock(self, mocker, idrac_server_configure_profile_mock):
idrac_conn_class_mock = mocker.patch(MODULE_PATH + 'idrac_server_config_profile.iDRACConnection',
return_value=idrac_server_configure_profile_mock)
idrac_conn_class_mock.return_value.__enter__.return_value = idrac_server_configure_profile_mock
return idrac_server_configure_profile_mock
def test_main_idrac_server_config_profile_import_success_Case01(
self, idrac_connection_server_configure_profile_mock, idrac_default_args, mocker,
idrac_file_manager_server_config_profile_mock):
idrac_default_args.update({"share_name": "sharename", "share_user": "sharename", "share_password": "sharepswd",
"command": "import", "job_wait": True, "scp_components": "IDRAC",
"scp_file": "scp_file.xml"})
message = {"Status": "Success"}
mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_import_server_config_profile',
return_value=message)
mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_server_config_profile',
return_value=("export_status"))
result = self._run_module(idrac_default_args)
assert result == {'msg': 'Successfully imported the Server Configuration Profile.',
'scp_status': {'Status': 'Success'},
'changed': True}
def test_main_idrac_server_config_profile_import_success_Case02(
self, idrac_connection_server_configure_profile_mock, idrac_default_args, mocker,
idrac_file_manager_server_config_profile_mock):
idrac_default_args.update({"share_name": "sharename", "share_user": "sharename", "share_password": "sharepswd",
"command": "import", "job_wait": False, "scp_components": "IDRAC",
"scp_file": "scp_file.xml"})
message = {"Status": "Success"}
mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_import_server_config_profile',
return_value=message)
mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_server_config_profile',
return_value=("export_status"))
result = self._run_module(idrac_default_args)
assert result == {'changed': True,
'msg': 'Successfully triggered the job to import the Server Configuration Profile.',
'scp_status': {'Status': 'Success'}}
def test_main_idrac_server_config_profile_export_success_Case01(
self, idrac_connection_server_configure_profile_mock, idrac_default_args, mocker,
idrac_file_manager_server_config_profile_mock):
idrac_default_args.update({"share_name": "sharename", "share_user": "sharename", "share_password": "sharepswd",
"command": "export", "job_wait": True, "scp_components": "IDRAC",
"scp_file": "scp_file.xml"})
message = {"Status": "Success"}
mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_import_server_config_profile',
return_value=("import_status"))
mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_server_config_profile',
return_value=message)
result = self._run_module(idrac_default_args)
assert result == {'msg': 'Successfully exported the Server Configuration Profile.',
'scp_status': {'Status': 'Success'},
'changed': False}
def test_main_idrac_server_config_profile_export_success_Case02(
self, idrac_connection_server_configure_profile_mock, idrac_default_args, mocker,
idrac_file_manager_server_config_profile_mock):
idrac_default_args.update({"share_name": "sharename", "share_user": "sharename", "share_password": "sharepswd",
"command": "export", "job_wait": False, "scp_components": "IDRAC",
"scp_file": "scp_file.xml"})
message = {"Status": "Success"}
mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_import_server_config_profile',
return_value=("import_status"))
mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_server_config_profile',
return_value=message)
result = self._run_module(idrac_default_args)
assert result == {'changed': False,
'msg': 'Successfully triggered the job to export the Server Configuration Profile.',
'scp_status': {'Status': 'Success'}}
@pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
def test_main_idrac_server_config_profile_exception_handling_case(
self, exc_type, mocker, idrac_default_args, idrac_connection_server_configure_profile_mock,
idrac_file_manager_server_config_profile_mock):
idrac_default_args.update({"share_name": "sharename", "share_user": "sharename", "share_password": "sharepswd",
"command": "export", "job_wait": True, "scp_components": "IDRAC",
"scp_file": "scp_file.xml"})
mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_import_server_config_profile',
side_effect=exc_type('test'))
mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_server_config_profile',
side_effect=exc_type('test'))
result = self._run_module_with_fail_json(idrac_default_args)
assert 'msg' in result
assert result['failed'] is True
def test_run_import_server_config_profile_success_case(
self, idrac_connection_server_configure_profile_mock, idrac_default_args,
idrac_file_manager_server_config_profile_mock):
idrac_default_args.update({"share_name": "sharename", "share_user": "sharename", "share_password": "sharepswd",
"command": "export", "job_wait": True, "scp_components": "IDRAC",
"scp_file": "scp_file.xml", "end_host_power_state": "On",
"shutdown_type": "Graceful"})
message = {"Status": "Success"}
f_module = self.get_module_mock(params=idrac_default_args)
idrac_connection_server_configure_profile_mock.config_mgr.scp_import.return_value = message
result = self.module.run_import_server_config_profile(idrac_connection_server_configure_profile_mock, f_module)
assert result == {"Status": "Success"}
def test_run_import_server_config_profile_runtimeerror_case(
self, idrac_connection_server_configure_profile_mock, idrac_default_args,
idrac_file_manager_server_config_profile_mock):
idrac_default_args.update({"share_name": "sharename", "share_user": "sharename", "share_password": "sharepswd",
"command": "export", "job_wait": True, "scp_components": "IDRAC",
"scp_file": "scp_file.xml", "end_host_power_state": "On",
"shutdown_type": "Graceful"})
message = {"Status": "Failed"}
f_module = self.get_module_mock(params=idrac_default_args)
obj = MagicMock()
idrac_connection_server_configure_profile_mock.config_mgr = obj
obj.scp_import = Mock(return_value=message)
with pytest.raises(Exception) as ex:
self.module.run_import_server_config_profile(idrac_connection_server_configure_profile_mock, f_module)
assert "Failed to import scp." == str(ex.value)
def test_run_export_server_config_profile_success_case(
self, idrac_connection_server_configure_profile_mock, idrac_default_args,
idrac_file_manager_server_config_profile_mock):
idrac_default_args.update({"share_name": "sharename", "share_user": "sharename", "share_password": "sharepswd",
"command": "export", "job_wait": True, "scp_components": "IDRAC",
"scp_file": "scp_file.xml", "end_host_power_state": "On",
"shutdown_type": "Graceful", "export_format": "XML", "export_use": "Default"})
message = {"Status": "Success"}
f_module = self.get_module_mock(params=idrac_default_args)
idrac_connection_server_configure_profile_mock.config_mgr.scp_export.return_value = message
result = self.module.run_export_server_config_profile(idrac_connection_server_configure_profile_mock, f_module)
assert result == {"Status": "Success"}
def test_run_export_server_config_profile_runtimeerror_case(
self, idrac_connection_server_configure_profile_mock, idrac_default_args,
idrac_file_manager_server_config_profile_mock):
idrac_default_args.update({"share_name": "sharename", "share_user": "sharename", "share_password": "sharepswd",
"command": "export", "job_wait": True, "scp_components": "IDRAC",
"scp_file": "scp_file.xml", "end_host_power_state": "On",
"shutdown_type": "Graceful", "export_format": "XML", "export_use": "Default"})
message = {"Status": "Failed"}
f_module = self.get_module_mock(params=idrac_default_args)
obj = MagicMock()
idrac_connection_server_configure_profile_mock.config_mgr = obj
obj.scp_export = Mock(return_value=message)
with pytest.raises(Exception) as ex:
self.module.run_export_server_config_profile(idrac_connection_server_configure_profile_mock, f_module)
assert "Failed to export scp." == str(ex.value)
| 61.235602 | 119 | 0.668947 | 1,292 | 11,696 | 5.592105 | 0.123065 | 0.078062 | 0.123599 | 0.082768 | 0.842491 | 0.831696 | 0.797232 | 0.753633 | 0.721246 | 0.661038 | 0 | 0.002239 | 0.236149 | 11,696 | 190 | 120 | 61.557895 | 0.80647 | 0.019921 | 0 | 0.595092 | 0 | 0 | 0.23876 | 0.06495 | 0 | 0 | 0 | 0 | 0.06135 | 1 | 0.07362 | false | 0.055215 | 0.177914 | 0 | 0.282209 | 0.006135 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
f439bf2e502da66586b9f9e8c59fabced760f9e2 | 47,849 | py | Python | Utils/Data/Features/Generated/EngagerFeature/NumberOfPreviousEngagementBetweenCreatorAndEngager.py | MaurizioFD/recsys-challenge-2020-twitter | 95dc024fb4f8777aa62e1304536daece640428de | [
"Apache-2.0"
] | 44 | 2020-07-09T11:31:17.000Z | 2022-03-04T05:50:48.000Z | Utils/Data/Features/Generated/EngagerFeature/NumberOfPreviousEngagementBetweenCreatorAndEngager.py | kiminh/recsys-challenge-2020-twitter | 567f0db40be7db3d21c360f2ca6cdf2addc7c698 | [
"Apache-2.0"
] | 3 | 2020-10-02T18:55:21.000Z | 2020-10-13T22:13:58.000Z | Utils/Data/Features/Generated/EngagerFeature/NumberOfPreviousEngagementBetweenCreatorAndEngager.py | kiminh/recsys-challenge-2020-twitter | 567f0db40be7db3d21c360f2ca6cdf2addc7c698 | [
"Apache-2.0"
] | 9 | 2020-08-08T14:55:59.000Z | 2021-09-06T09:17:03.000Z | import numpy as np
from Utils.Data.DatasetUtils import is_test_or_val_set, get_train_set_id_from_test_or_val_set, \
get_test_or_val_set_id_from_train
from Utils.Data.Features.Generated.TweetFeature.IsEngagementType import *
from Utils.Data.Features.MappedFeatures import MappedFeatureEngagerId, MappedFeatureCreatorId
import time
def find_and_increase_engager(eng_id, cre_id, dictionary):
# Number of time the user_1 has interacted with user_2
current_count = dictionary.get((cre_id, eng_id), 0)
dictionary[(cre_id, eng_id)] = current_count + 1
return current_count
def find_and_increase_creator(eng_id, cre_id, dictionary):
# Number of time the user_1 has interacted with user_2
current_count = dictionary.get((eng_id, cre_id), 0)
dictionary[(cre_id, eng_id)] = dictionary.get((cre_id, eng_id), 0) + 1
return current_count
class EngagerFeatureNumberOfPreviousLikeEngagementBetweenCreatorAndEngagerByCreator(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_like_engagement_betweet_creator_and_engager_by_creator",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsLike(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousLikeEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousLikeEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousLikeEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousLikeEngagementBetweenCreatorAndEngagerByCreator(test_dataset_id).save_feature(
result)
class EngagerFeatureNumberOfPreviousLikeEngagementBetweenCreatorAndEngagerByEngager(GeneratedFeaturePickle):
# Has the engager ever liked a tweet of the creator? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__(
"engager_feature_number_of_previous_like_engagement_betweet_creator_and_engager_by_engager",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsLike(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousLikeEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousLikeEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousLikeEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousLikeEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).save_feature(result)
class EngagerFeatureNumberOfPreviousReplyEngagementBetweenCreatorAndEngagerByCreator(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_reply_engagement_betweet_creator_and_engager_by_creator",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsReply(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousReplyEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousReplyEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousReplyEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousReplyEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).save_feature(
result)
class EngagerFeatureNumberOfPreviousReplyEngagementBetweenCreatorAndEngagerByEngager(GeneratedFeaturePickle):
# Has the engager ever liked a tweet of the creator? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__(
"engager_feature_number_of_previous_reply_engagement_betweet_creator_and_engager_by_engager",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsReply(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousReplyEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousReplyEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousReplyEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousReplyEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).save_feature(result)
class EngagerFeatureNumberOfPreviousRetweetEngagementBetweenCreatorAndEngagerByCreator(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_retweet_engagement_betweet_creator_and_engager_by_creator",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsRetweet(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousRetweetEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousRetweetEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousRetweetEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousRetweetEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).save_feature(
result)
class EngagerFeatureNumberOfPreviousRetweetEngagementBetweenCreatorAndEngagerByEngager(GeneratedFeaturePickle):
# Has the engager ever liked a tweet of the creator? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__(
"engager_feature_number_of_previous_retweet_engagement_betweet_creator_and_engager_by_engager",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsRetweet(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousRetweetEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousRetweetEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousRetweetEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousRetweetEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).save_feature(result)
class EngagerFeatureNumberOfPreviousCommentEngagementBetweenCreatorAndEngagerByCreator(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_comment_engagement_betweet_creator_and_engager_by_creator",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsComment(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousCommentEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousCommentEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousCommentEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousCommentEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).save_feature(
result)
class EngagerFeatureNumberOfPreviousCommentEngagementBetweenCreatorAndEngagerByEngager(GeneratedFeaturePickle):
# Has the engager ever liked a tweet of the creator? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__(
"engager_feature_number_of_previous_comment_engagement_betweet_creator_and_engager_by_engager",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsComment(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousCommentEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousCommentEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousCommentEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousCommentEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).save_feature(result)
class EngagerFeatureNumberOfPreviousPositiveEngagementBetweenCreatorAndEngagerByCreator(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_positive_engagement_betweet_creator_and_engager_by_creator",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsPositive(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousPositiveEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousPositiveEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousPositiveEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousPositiveEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).save_feature(
result)
class EngagerFeatureNumberOfPreviousPositiveEngagementBetweenCreatorAndEngagerByEngager(GeneratedFeaturePickle):
# Has the engager ever liked a tweet of the creator? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__(
"engager_feature_number_of_previous_positive_engagement_betweet_creator_and_engager_by_engager",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsPositive(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousPositiveEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousPositiveEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousPositiveEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousPositiveEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).save_feature(result)
class EngagerFeatureNumberOfPreviousNegativeEngagementBetweenCreatorAndEngagerByCreator(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_negative_engagement_betweet_creator_and_engager_by_creator",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsNegative(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousNegativeEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousNegativeEngagementBetweenCreatorAndEngagerByCreator(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousNegativeEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((eng_id, cre_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousNegativeEngagementBetweenCreatorAndEngagerByCreator(
test_dataset_id).save_feature(
result)
class EngagerFeatureNumberOfPreviousNegativeEngagementBetweenCreatorAndEngagerByEngager(GeneratedFeaturePickle):
# Has the engager ever liked a tweet of the creator? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__(
"engager_feature_number_of_previous_negative_engagement_betweet_creator_and_engager_by_engager",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_between_engager_and_creator/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsNegative(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, engagement_dict)
if engagement
else engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousNegativeEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousNegativeEngagementBetweenCreatorAndEngagerByEngager(
train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousNegativeEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create()
], axis=1)
result = pd.DataFrame(
[engagement_dict.get((cre_id, eng_id), 0)
for eng_id, cre_id
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name])],
index=dataframe.index
)
EngagerFeatureNumberOfPreviousNegativeEngagementBetweenCreatorAndEngagerByEngager(
test_dataset_id).save_feature(result) | 48.332323 | 148 | 0.684487 | 4,984 | 47,849 | 6.157303 | 0.026284 | 0.087982 | 0.04927 | 0.052007 | 0.835897 | 0.835441 | 0.834202 | 0.819115 | 0.802757 | 0.801812 | 0 | 0.001923 | 0.249974 | 47,849 | 990 | 149 | 48.332323 | 0.853182 | 0.071914 | 0 | 0.918098 | 1 | 0 | 0.09625 | 0.09625 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034346 | false | 0 | 0.006605 | 0 | 0.059445 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
f4867670d1248c2e26eb852a4a12005ebc98cc31 | 1,712 | py | Python | src/pgpy/pg_common.py | luk036/pgpy | 988a9cc7a76ad50acd4a561f42600ec7c713c6f0 | [
"MIT"
] | null | null | null | src/pgpy/pg_common.py | luk036/pgpy | 988a9cc7a76ad50acd4a561f42600ec7c713c6f0 | [
"MIT"
] | 2 | 2018-06-03T08:11:16.000Z | 2020-01-18T14:46:48.000Z | src/pgpy/pg_common.py | luk036/pgpy | 988a9cc7a76ad50acd4a561f42600ec7c713c6f0 | [
"MIT"
] | 2 | 2018-01-17T04:17:27.000Z | 2018-06-03T08:09:41.000Z | # -*- coding: utf-8 -*-
"""
Common Functions for Projective Geometry
"""
def cross0(v, w):
"""[summary]
Arguments:
v (type): [description]
w (type): [description]
Returns:
[type]: [description]
"""
return cross2(v[1:], w[1:])
def cross1(v, w):
"""[summary]
Arguments:
v (type): [description]
w (type): [description]
Returns:
[type]: [description]
"""
return cross2(v[0:3:2], w[0:3:2])
def cross2(v, w):
"""[summary]
Arguments:
v (type): [description]
w (type): [description]
Returns:
[type]: [description]
"""
return v[0] * w[1] - w[0] * v[1]
def cross(v, w):
"""[summary]
Arguments:
v (type): [description]
w (type): [description]
Returns:
[type]: [description]
"""
return (cross0(v, w), -cross1(v, w), cross2(v, w))
def dot_c(v, w):
"""[summary]
Arguments:
v (type): [description]
w (type): [description]
Returns:
[type]: [description]
"""
x1, y1, z1 = v
x2, y2, z2 = w
return x1 * x2 + y1 * y2 + z1 * z2
def plucker_c(ld, v, mu, w):
"""[summary]
Arguments:
ld (type): [description]
v (type): [description]
mu (type): [description]
w (type): [description]
Returns:
[type]: [description]
"""
x1, y1, z1 = v
x2, y2, z2 = w
return (ld * x1 + mu * x2, ld * y1 + mu * y2, ld * z1 + mu * z2)
def dot1(v, w):
"""[summary]
Arguments:
v (type): [description]
w (type): [description]
Returns:
[type]: [description]
"""
return v[0] * w[0] + v[1] * w[1]
| 16.784314 | 68 | 0.478972 | 203 | 1,712 | 4.029557 | 0.182266 | 0.42176 | 0.145477 | 0.171149 | 0.702934 | 0.702934 | 0.702934 | 0.702934 | 0.702934 | 0.702934 | 0 | 0.044014 | 0.336449 | 1,712 | 101 | 69 | 16.950495 | 0.676056 | 0.536799 | 0 | 0.222222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.388889 | false | 0 | 0 | 0 | 0.777778 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 9 |
be2faddc44e91a47e905651bd47e9b8b6d04678a | 7,101 | py | Python | sandbox/mack/policies.py | wsjeon/multiagent-gail | e7dd75f0dee17e33e55d7f4e24d40649fd648cf3 | [
"MIT"
] | 9 | 2019-10-11T09:11:28.000Z | 2021-12-26T00:10:24.000Z | sandbox/mack/policies.py | wsjeon/multiagent-gail | e7dd75f0dee17e33e55d7f4e24d40649fd648cf3 | [
"MIT"
] | null | null | null | sandbox/mack/policies.py | wsjeon/multiagent-gail | e7dd75f0dee17e33e55d7f4e24d40649fd648cf3 | [
"MIT"
] | 3 | 2019-11-12T12:36:59.000Z | 2021-12-24T19:43:49.000Z | import numpy as np
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
import rl.common.tf_util as U
from rl.acktr.utils import conv, fc, dense, conv_to_fc, sample, kl_div
class CategoricalPolicy(object):
def __init__(self, sess, ob_space, ac_space, ob_spaces, ac_spaces,
nenv, nsteps, nstack, reuse=False, name='model'):
nbatch = nenv * nsteps
ob_shape = (nbatch, ob_space.shape[0] * nstack)
all_ob_shape = (nbatch, sum([obs.shape[0] for obs in ob_spaces]) * nstack)
nact = ac_space.n
all_ac_shape = (nbatch, (sum([ac.n for ac in ac_spaces]) - nact) * nstack)
X = tf.placeholder(tf.float32, ob_shape, name='X') # obs
X_v = tf.placeholder(tf.float32, all_ob_shape, name='X_v')
A_v = tf.placeholder(tf.float32, all_ac_shape, name='A_v')
with tf.variable_scope('policy_{}'.format(name), reuse=reuse):
h1 = fc(X, 'fc1', nh=128, init_scale=np.sqrt(2))
h2 = fc(h1, 'fc2', nh=128, init_scale=np.sqrt(2))
pi = fc(h2, 'pi', nact, act=lambda x: x)
with tf.variable_scope('value_{}'.format(name), reuse=reuse):
if len(ob_spaces) > 1:
Y = tf.concat([X_v, A_v], axis=1)
else:
Y = X_v
h3 = fc(Y, 'fc3', nh=256, init_scale=np.sqrt(2))
h4 = fc(h3, 'fc4', nh=256, init_scale=np.sqrt(2))
vf = fc(h4, 'v', 1, act=lambda x: x)
v0 = vf[:, 0]
a0 = sample(pi)
self.initial_state = [] # not stateful
def step(ob, obs, a_v, *_args, **_kwargs):
if a_v is not None:
a, v = sess.run([a0, v0], {X: ob, X_v: obs, A_v: a_v})
else:
a, v = sess.run([a0, v0], {X: ob, X_v: obs})
return a, v, [] # dummy state
def value(ob, a_v, *_args, **_kwargs):
if a_v is not None:
return sess.run(v0, {X_v: ob, A_v: a_v})
else:
return sess.run(v0, {X_v: ob})
self.X = X
self.X_v = X_v
self.A_v = A_v
self.pi = pi
self.vf = vf
self.step = step
self.value = value
class GaussianPolicy(object):
def __init__(self, sess, ob_space, ac_space, ob_spaces, ac_spaces,
nenv, nsteps, nstack, reuse=False, name='model'):
nbatch = nenv * nsteps
ob_shape = (nbatch, ob_space.shape[0] * nstack)
all_ob_shape = (nbatch, sum([obs.shape[0] for obs in ob_spaces]) * nstack)
nact = ac_space.shape[0]
all_ac_shape = (nbatch, (sum([ac.shape[0] for ac in ac_spaces]) - nact) * nstack)
X = tf.placeholder(tf.float32, ob_shape) # obs
X_v = tf.placeholder(tf.float32, all_ob_shape)
A_v = tf.placeholder(tf.float32, all_ac_shape)
with tf.variable_scope('policy_{}'.format(name), reuse=reuse):
h1 = fc(X, 'fc1', nh=64, init_scale=np.sqrt(2), act=tf.nn.tanh)
h2 = fc(h1, 'fc2', nh=64, init_scale=np.sqrt(2), act=tf.nn.tanh)
pi = fc(h2, 'pi', nact, act=lambda x: x, init_scale=0.01)
with tf.variable_scope('policy_{}'.format(name), reuse=reuse):
logstd = tf.get_variable('sigma', shape=[nact], dtype=tf.float32,
initializer=tf.constant_initializer(0.0))
logstd = tf.expand_dims(logstd, 0)
std = tf.exp(logstd)
std = tf.tile(std, [nbatch, 1])
with tf.variable_scope('value_{}'.format(name), reuse=reuse):
if len(ob_spaces) > 1:
Y = tf.concat([X_v, A_v], axis=1)
else:
Y = X_v
h3 = fc(Y, 'fc3', nh=64, init_scale=np.sqrt(2), act=tf.nn.tanh)
h4 = fc(h3, 'fc4', nh=64, init_scale=np.sqrt(2), act=tf.nn.tanh)
vf = fc(h4, 'v', 1, act=lambda x: x)
v0 = vf[:, 0]
a0 = pi + tf.random_normal(tf.shape(std), 0.0, 1.0) * std
self.initial_state = [] # not stateful
def step(ob, obs, a_v, *_args, **_kwargs):
if a_v is not None:
a, v = sess.run([a0, v0], {X: ob, X_v: obs, A_v: a_v})
else:
a, v = sess.run([a0, v0], {X: ob, X_v: obs})
return a, v, [] # dummy state
def value(ob, a_v, *_args, **_kwargs):
if a_v is not None:
return sess.run(v0, {X_v: ob, A_v: a_v})
else:
return sess.run(v0, {X_v: ob})
self.X = X
self.X_v = X_v
self.A_v = A_v
self.pi = pi
self.vf = vf
self.std = std
self.logstd = logstd
self.step = step
self.value = value
self.mean_std = tf.concat([pi, std], axis=1)
class MultiCategoricalPolicy(object):
def __init__(self, sess, ob_space, ac_space, ob_spaces, ac_spaces,
nenv, nsteps, nstack, reuse=False, name='model'):
nbins = 11
nbatch = nenv * nsteps
ob_shape = (nbatch, ob_space.shape[0] * nstack)
all_ob_shape = (nbatch, sum([obs.shape[0] for obs in ob_spaces]) * nstack)
nact = ac_space.shape[0]
all_ac_shape = (nbatch, (sum([ac.shape[0] for ac in ac_spaces]) - nact) * nstack)
X = tf.placeholder(tf.float32, ob_shape) # obs
X_v = tf.placeholder(tf.float32, all_ob_shape)
A_v = tf.placeholder(tf.float32, all_ac_shape)
with tf.variable_scope('policy_{}'.format(name), reuse=reuse):
h1 = fc(X, 'fc1', nh=128, init_scale=np.sqrt(2))
h2 = fc(h1, 'fc2', nh=128, init_scale=np.sqrt(2))
pi = fc(h2, 'pi', nact * nbins, act=lambda x: x)
with tf.variable_scope('value_{}'.format(name), reuse=reuse):
if len(ob_spaces) > 1:
Y = tf.concat([X_v, A_v], axis=1)
else:
Y = X_v
h3 = fc(Y, 'fc3', nh=256, init_scale=np.sqrt(2))
h4 = fc(h3, 'fc4', nh=256, init_scale=np.sqrt(2))
vf = fc(h4, 'v', 1, act=lambda x: x)
v0 = vf[:, 0]
pi = tf.reshape(pi, [nbatch, nact, nbins])
a0 = sample(pi, axis=2)
self.initial_state = [] # not stateful
def step(ob, obs, a_v, *_args, **_kwargs):
# output continuous actions within [-1, 1]
if a_v is not None:
a, v = sess.run([a0, v0], {X: ob, X_v: obs, A_v: a_v})
else:
a, v = sess.run([a0, v0], {X: ob, X_v: obs})
a = transform(a)
return a, v, [] # dummy state
def value(ob, a_v, *_args, **_kwargs):
if a_v is not None:
return sess.run(v0, {X_v: ob, A_v: a_v})
else:
return sess.run(v0, {X_v: ob})
def transform(a):
# transform from [0, 9] to [-0.8, 0.8]
a = np.array(a, dtype=np.float32)
a = (a - (nbins - 1) / 2) / (nbins - 1) * 2.0
return a
self.X = X
self.X_v = X_v
self.A_v = A_v
self.pi = pi
self.vf = vf
self.step = step
self.value = value | 39.45 | 89 | 0.518941 | 1,093 | 7,101 | 3.200366 | 0.122598 | 0.026301 | 0.011149 | 0.051458 | 0.815895 | 0.810749 | 0.797313 | 0.797313 | 0.797313 | 0.768153 | 0 | 0.037584 | 0.333052 | 7,101 | 180 | 90 | 39.45 | 0.701014 | 0.023095 | 0 | 0.751634 | 0 | 0 | 0.019059 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.065359 | false | 0 | 0.026144 | 0 | 0.176471 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
be41615f287bead64092dfd372d26a6dd1cc5287 | 4,294 | py | Python | game/migrations/0010_auto_20200117_0017.py | smrsan/django-backgammon-server | 02eee8fea2c4aa0e40b333a35b0bb09d7b444230 | [
"MIT"
] | null | null | null | game/migrations/0010_auto_20200117_0017.py | smrsan/django-backgammon-server | 02eee8fea2c4aa0e40b333a35b0bb09d7b444230 | [
"MIT"
] | 6 | 2021-03-18T22:43:08.000Z | 2021-09-22T18:31:02.000Z | game/migrations/0010_auto_20200117_0017.py | smrsan/django-backgammon-server | 02eee8fea2c4aa0e40b333a35b0bb09d7b444230 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.2 on 2020-01-17 00:17
from django.db import migrations
import game.fields
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('game', '0009_auto_20200116_1946'),
]
operations = [
migrations.RemoveField(
model_name='board',
name='black_bead_01_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_02_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_03_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_04_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_05_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_06_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_07_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_08_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_09_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_10_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_11_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_12_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_13_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_14_position',
),
migrations.RemoveField(
model_name='board',
name='black_bead_15_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_01_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_02_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_03_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_04_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_05_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_06_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_07_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_08_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_09_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_10_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_11_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_12_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_13_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_14_position',
),
migrations.RemoveField(
model_name='board',
name='white_bead_15_position',
),
migrations.AddField(
model_name='board',
name='beads',
field=jsonfield.fields.JSONField(default={'black': [6, 6, 6, 6, 6, 8, 8, 8, 13, 13, 13, 13, 13, 24, 24], 'white': [19, 19, 19, 19, 19, 17, 17, 17, 12, 12, 12, 12, 12, 1, 1]}),
),
migrations.AlterField(
model_name='game',
name='invite_token',
field=game.fields.RandomStrField(default='i913ZesCfMcvzR289BTJSxj3', max_length=24),
),
]
| 29.410959 | 187 | 0.535864 | 395 | 4,294 | 5.503797 | 0.162025 | 0.132475 | 0.199632 | 0.25667 | 0.794848 | 0.794848 | 0.794848 | 0.794848 | 0.774609 | 0 | 0 | 0.054132 | 0.354681 | 4,294 | 145 | 188 | 29.613793 | 0.730422 | 0.01048 | 0 | 0.669065 | 1 | 0 | 0.211208 | 0.16647 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.021583 | 0 | 0.043165 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
be671624a5dbb579100020cc161c8a76929277b7 | 89 | py | Python | tests/test_cafa4_mapping.py | LucaCappelletti94/hpo_downloader | 14497478626c234539659b9f55b16496e6b6b86b | [
"MIT"
] | null | null | null | tests/test_cafa4_mapping.py | LucaCappelletti94/hpo_downloader | 14497478626c234539659b9f55b16496e6b6b86b | [
"MIT"
] | null | null | null | tests/test_cafa4_mapping.py | LucaCappelletti94/hpo_downloader | 14497478626c234539659b9f55b16496e6b6b86b | [
"MIT"
] | null | null | null | from hpo_downloader import cafa4_mapping
def test_cafa4_mapping():
cafa4_mapping()
| 14.833333 | 40 | 0.797753 | 12 | 89 | 5.5 | 0.666667 | 0.545455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.039474 | 0.146067 | 89 | 5 | 41 | 17.8 | 0.828947 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
bea3996136cc113df8d77d5cbf17a39aefb98460 | 127 | py | Python | python/testData/inspections/ReplaceNotEqOperator.py | Tasemo/intellij-community | 50aeaf729b7073e91c7c77487a1f155e0dfe3fcd | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/inspections/ReplaceNotEqOperator.py | Tasemo/intellij-community | 50aeaf729b7073e91c7c77487a1f155e0dfe3fcd | [
"Apache-2.0"
] | null | null | null | python/testData/inspections/ReplaceNotEqOperator.py | Tasemo/intellij-community | 50aeaf729b7073e91c7c77487a1f155e0dfe3fcd | [
"Apache-2.0"
] | null | null | null | print(<warning descr="Python versions 3.5, 3.6, 3.7, 3.8, 3.9, 3.10 do not support <>, use != instead">a<caret> <> b</warning>) | 127 | 127 | 0.629921 | 26 | 127 | 3.076923 | 0.769231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.118182 | 0.133858 | 127 | 1 | 127 | 127 | 0.609091 | 0 | 0 | 0 | 0 | 1 | 0.617188 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 1 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
beb4ddca962e9c1af00a6cc4e6f74c3070f5e677 | 2,874 | py | Python | Solutions/008.py | exynil/project-euler | 1fdae19b09c461dfef4a7e35c7d33259aac9db51 | [
"Apache-2.0"
] | null | null | null | Solutions/008.py | exynil/project-euler | 1fdae19b09c461dfef4a7e35c7d33259aac9db51 | [
"Apache-2.0"
] | 1 | 2019-04-23T19:35:01.000Z | 2019-04-23T19:35:01.000Z | Solutions/008.py | exynil/project-euler | 1fdae19b09c461dfef4a7e35c7d33259aac9db51 | [
"Apache-2.0"
] | null | null | null | # Наибольшее произведение четырех последовательных цифр в нижеприведенном 1000-значном числе равно 9 × 9 × 8 × 9 = 5832.
#
# 73167176531330624919225119674426574742355349194934
# 96983520312774506326239578318016984801869478851843
# 85861560789112949495459501737958331952853208805511
# 12540698747158523863050715693290963295227443043557
# 66896648950445244523161731856403098711121722383113
# 62229893423380308135336276614282806444486645238749
# 30358907296290491560440772390713810515859307960866
# 70172427121883998797908792274921901699720888093776
# 65727333001053367881220235421809751254540594752243
# 52584907711670556013604839586446706324415722155397
# 53697817977846174064955149290862569321978468622482
# 83972241375657056057490261407972968652414535100474
# 82166370484403199890008895243450658541227588666881
# 16427171479924442928230863465674813919123162824586
# 17866458359124566529476545682848912883142607690042
# 24219022671055626321111109370544217506941658960408
# 07198403850962455444362981230987879927244284909188
# 84580156166097919133875499200524063689912560717606
# 05886116467109405077541002256983155200055935729725
# 71636269561882670428252483600823257530420752963450
#
# Найдите наибольшее произведение тринадцати последовательных цифр в данном числе.
# python 3.7.3
digits = ('73167176531330624919225119674426574742355349194934'
'96983520312774506326239578318016984801869478851843'
'85861560789112949495459501737958331952853208805511'
'12540698747158523863050715693290963295227443043557'
'66896648950445244523161731856403098711121722383113'
'62229893423380308135336276614282806444486645238749'
'30358907296290491560440772390713810515859307960866'
'70172427121883998797908792274921901699720888093776'
'65727333001053367881220235421809751254540594752243'
'52584907711670556013604839586446706324415722155397'
'53697817977846174064955149290862569321978468622482'
'83972241375657056057490261407972968652414535100474'
'82166370484403199890008895243450658541227588666881'
'16427171479924442928230863465674813919123162824586'
'17866458359124566529476545682848912883142607690042'
'24219022671055626321111109370544217506941658960408'
'07198403850962455444362981230987879927244284909188'
'84580156166097919133875499200524063689912560717606'
'05886116467109405077541002256983155200055935729725'
'71636269561882670428252483600823257530420752963450')
position = 0
greatest_product = 0
for i in range(0, len(digits) - 4):
product = 1
for j in range(0, 4):
product *= int(digits[i + j])
if greatest_product < product:
greatest_product = product
position = i
print(f'Позиция: {position}')
print(f'Произведение равно: {greatest_product}')
| 42.895522 | 120 | 0.829506 | 118 | 2,874 | 20.194915 | 0.5 | 0.025178 | 0.017625 | 0.167856 | 0.839278 | 0.839278 | 0.839278 | 0.839278 | 0.839278 | 0.839278 | 0 | 0.8088 | 0.130132 | 2,874 | 66 | 121 | 43.545455 | 0.1432 | 0.428671 | 0 | 0 | 0 | 0 | 0.654895 | 0.619579 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.064516 | 0 | 0 | 1 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
22cfa6d8558637a0dc6e0dfe5df1f7fdc68a2587 | 9,452 | py | Python | tests/integration/widgets/test_datepicker.py | daledali/bokeh | c4f0debe7bd230d7e1aa8500716e8e997c04f528 | [
"BSD-3-Clause"
] | 1 | 2020-01-19T03:17:18.000Z | 2020-01-19T03:17:18.000Z | tests/integration/widgets/test_datepicker.py | daledali/bokeh | c4f0debe7bd230d7e1aa8500716e8e997c04f528 | [
"BSD-3-Clause"
] | null | null | null | tests/integration/widgets/test_datepicker.py | daledali/bokeh | c4f0debe7bd230d7e1aa8500716e8e997c04f528 | [
"BSD-3-Clause"
] | null | null | null | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from datetime import date
# Bokeh imports
from bokeh._testing.util.selenium import RECORD
from bokeh.layouts import column
from bokeh.models import (
Circle,
ColumnDataSource,
CustomAction,
CustomJS,
DatePicker,
Plot,
Range1d,
)
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
pytest_plugins = (
"bokeh._testing.plugins.project",
)
@pytest.mark.integration
@pytest.mark.selenium
class Test_DatePicker(object):
def test_basic(self, bokeh_model_page):
dp = DatePicker(title='Select date', value=date(2019, 9, 20), min_date=date(2019, 9, 1), max_date="2019-09-30", css_classes=["foo"])
page = bokeh_model_page(dp)
el = page.driver.find_element_by_css_selector('.foo label')
assert el.text == "Select date"
el = page.driver.find_element_by_css_selector('.flatpickr-calendar')
assert "inline" not in el.get_attribute("class")
assert page.has_no_console_errors()
def test_inline(self, bokeh_model_page):
dp = DatePicker(title='Select date', value=date(2019, 9, 20), min_date=date(2019, 9, 1), max_date="2019-09-30",
inline=True, css_classes=["foo"])
page = bokeh_model_page(dp)
el = page.driver.find_element_by_css_selector('.foo label')
assert el.text == "Select date"
el = page.driver.find_element_by_css_selector('.flatpickr-calendar')
assert "inline" in el.get_attribute("class")
assert page.has_no_console_errors()
def test_widget_disabled(self, bokeh_model_page):
dp = DatePicker(title='Select date', value=date(2019, 9, 20), min_date=date(2019, 9, 1), max_date="2019-09-30",
disabled=True, css_classes=["foo"])
page = bokeh_model_page(dp)
el = page.driver.find_element_by_css_selector('.flatpickr-input')
assert el.get_attribute("disabled") == "true"
assert page.has_no_console_errors()
def test_disabled_dates(self, bokeh_model_page):
dp = DatePicker(title='Select date', value=date(2019, 9, 20), min_date=date(2019, 9, 1), max_date="2019-09-30",
disabled_dates=["2019-09-14", ("2019-09-16", date(2019, 9, 18))], css_classes=["foo"])
page = bokeh_model_page(dp)
el = page.driver.find_element_by_css_selector('.foo label')
el.click()
# not disabled
el = page.driver.find_element_by_css_selector('span[aria-label="September 13, 2019"]')
assert "flatpickr-disabled" not in el.get_attribute("class")
el = page.driver.find_element_by_css_selector('span[aria-label="September 14, 2019"]')
assert "flatpickr-disabled" in el.get_attribute("class")
# not disabled
el = page.driver.find_element_by_css_selector('span[aria-label="September 15, 2019"]')
assert "flatpickr-disabled" not in el.get_attribute("class")
el = page.driver.find_element_by_css_selector('span[aria-label="September 16, 2019"]')
assert "flatpickr-disabled" in el.get_attribute("class")
el = page.driver.find_element_by_css_selector('span[aria-label="September 17, 2019"]')
assert "flatpickr-disabled" in el.get_attribute("class")
el = page.driver.find_element_by_css_selector('span[aria-label="September 18, 2019"]')
assert "flatpickr-disabled" in el.get_attribute("class")
# not disabled
el = page.driver.find_element_by_css_selector('span[aria-label="September 19, 2019"]')
assert "flatpickr-disabled" not in el.get_attribute("class")
assert page.has_no_console_errors()
def test_enabled_dates(self, bokeh_model_page):
dp = DatePicker(title='Select date', value=date(2019, 9, 20), min_date=date(2019, 9, 1), max_date="2019-09-30",
enabled_dates=["2019-09-14", ("2019-09-16", date(2019, 9, 18))], css_classes=["foo"])
page = bokeh_model_page(dp)
el = page.driver.find_element_by_css_selector('.foo label')
el.click()
# not enabled
el = page.driver.find_element_by_css_selector('span[aria-label="September 13, 2019"]')
assert "flatpickr-disabled" in el.get_attribute("class")
el = page.driver.find_element_by_css_selector('span[aria-label="September 14, 2019"]')
assert "flatpickr-disabled" not in el.get_attribute("class")
# not enabled
el = page.driver.find_element_by_css_selector('span[aria-label="September 15, 2019"]')
assert "flatpickr-disabled" in el.get_attribute("class")
el = page.driver.find_element_by_css_selector('span[aria-label="September 16, 2019"]')
assert "flatpickr-disabled" not in el.get_attribute("class")
el = page.driver.find_element_by_css_selector('span[aria-label="September 17, 2019"]')
assert "flatpickr-disabled" not in el.get_attribute("class")
el = page.driver.find_element_by_css_selector('span[aria-label="September 18, 2019"]')
assert "flatpickr-disabled" not in el.get_attribute("class")
# not enabled
el = page.driver.find_element_by_css_selector('span[aria-label="September 19, 2019"]')
assert "flatpickr-disabled" in el.get_attribute("class")
assert page.has_no_console_errors()
def test_js_on_change_executes(self, bokeh_model_page):
dp = DatePicker(title='Select date', value=date(2019, 9, 20), min_date=date(2019, 9, 1), max_date="2019-09-30", css_classes=["foo"])
dp.js_on_change('value', CustomJS(code=RECORD("value", "cb_obj.value")))
page = bokeh_model_page(dp)
el = page.driver.find_element_by_css_selector('.foo input')
el.click()
el = page.driver.find_element_by_css_selector('span[aria-label="September 16, 2019"]')
el.click()
results = page.results
assert results['value'] == '2019-09-16'
el = page.driver.find_element_by_css_selector('.bk-input')
assert el.get_attribute('value') == '2019-09-16'
assert page.has_no_console_errors()
def test_server_on_change_round_trip(self, bokeh_server_page):
def modify_doc(doc):
source = ColumnDataSource(dict(x=[1, 2], y=[1, 1], val=["a", "b"]))
plot = Plot(plot_height=400, plot_width=400, x_range=Range1d(0, 1), y_range=Range1d(0, 1), min_border=0)
plot.add_tools(CustomAction(callback=CustomJS(args=dict(s=source), code=RECORD("data", "s.data"))))
plot.add_glyph(source, Circle(x='x', y='y', size=20))
dp = DatePicker(title='Select date', value=date(2019, 9, 20), min_date=date(2019, 9, 1), max_date="2019-09-30", css_classes=["foo"])
def cb(attr, old, new):
source.data['val'] = [old, new]
dp.on_change('value', cb)
doc.add_root(column(dp, plot))
page = bokeh_server_page(modify_doc)
el = page.driver.find_element_by_css_selector('.foo input')
el.click()
el = page.driver.find_element_by_css_selector('span[aria-label="September 16, 2019"]')
el.click()
page.click_custom_action()
results = page.results
assert results['data']['val'] == ['2019-09-20', '2019-09-16']
def test_server_update_disabled(self, bokeh_server_page):
def modify_doc(doc):
source = ColumnDataSource(dict(x=[1, 2], y=[1, 1], val=["a", "b"]))
plot = Plot(plot_height=400, plot_width=400, x_range=Range1d(0, 1), y_range=Range1d(0, 1), min_border=0)
plot.add_tools(CustomAction(callback=CustomJS(args=dict(s=source), code=RECORD("data", "s.data"))))
plot.add_glyph(source, Circle(x='x', y='y', size=20))
dp = DatePicker(title='Select date', value=date(2019, 9, 20), min_date=date(2019, 9, 1), max_date="2019-09-30", css_classes=["foo"])
def cb(attr, old, new):
source.data['val'] = [old, new]
dp.disabled_dates = ["2019-09-15"]
dp.on_change('value', cb)
doc.add_root(column(dp, plot))
page = bokeh_server_page(modify_doc)
el = page.driver.find_element_by_css_selector('.foo input')
el.click()
el = page.driver.find_element_by_css_selector('span[aria-label="September 16, 2019"]')
el.click()
page.click_custom_action()
el = page.driver.find_element_by_css_selector('span[aria-label="September 15, 2019"]')
assert "flatpickr-disabled" in el.get_attribute("class")
results = page.results
assert results['data']['val'] == ['2019-09-20', '2019-09-16']
| 41.638767 | 144 | 0.60675 | 1,233 | 9,452 | 4.436334 | 0.124899 | 0.03181 | 0.06362 | 0.084826 | 0.866362 | 0.851554 | 0.851554 | 0.851554 | 0.832176 | 0.83053 | 0 | 0.055833 | 0.194668 | 9,452 | 226 | 145 | 41.823009 | 0.662769 | 0.098921 | 0 | 0.71223 | 0 | 0 | 0.190981 | 0.058637 | 0 | 0 | 0 | 0 | 0.215827 | 1 | 0.086331 | false | 0 | 0.035971 | 0 | 0.129496 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fe0adc0be1335a3e37afd327666745d5080764e5 | 520 | py | Python | python_snippets/merge_dictionaries_in_python_39.py | charles545587/python_snippets | 8b884c0587b89b5a725f5cbaf369af8e94ca1fdb | [
"Apache-2.0"
] | 2 | 2021-02-01T11:27:15.000Z | 2021-03-02T17:58:35.000Z | python_snippets/merge_dictionaries_in_python_39.py | charles545587/python_snippets | 8b884c0587b89b5a725f5cbaf369af8e94ca1fdb | [
"Apache-2.0"
] | null | null | null | python_snippets/merge_dictionaries_in_python_39.py | charles545587/python_snippets | 8b884c0587b89b5a725f5cbaf369af8e94ca1fdb | [
"Apache-2.0"
] | null | null | null | # Merge dictionaries in python 3.9 using
# merge (|) and (|=)
# Merging in earlier versions
first_dict = {'orange': 1, 'apples': 2}
second_dict = {'bananas': 3, 'pears': 4}
combined_dict = {**first_dict, **second_dict}
print(combined_dict)
# {'orange': 1, 'apples': 2, 'bananas': 3, 'pears': 4}
# Merging in python 3.9
first_dict = {'orange': 1, 'apples': 2}
second_dict = {'bananas': 3, 'pears': 4}
combined_dict = first_dict | second_dict
print(combined_dict)
# {'orange': 1, 'apples': 2, 'bananas': 3, 'pears': 4}
| 28.888889 | 54 | 0.651923 | 76 | 520 | 4.302632 | 0.302632 | 0.110092 | 0.134557 | 0.207951 | 0.752294 | 0.752294 | 0.752294 | 0.752294 | 0.752294 | 0.752294 | 0 | 0.045249 | 0.15 | 520 | 17 | 55 | 30.588235 | 0.69457 | 0.409615 | 0 | 0.75 | 0 | 0 | 0.16 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.25 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4310b845db202503ae9238aa985321efa35a9507 | 13,816 | py | Python | src/prefect/tasks/docker/images.py | skyline-ai/prefect | 92430f2f91215d6c27d92ad67df67ccd639e587c | [
"Apache-2.0"
] | null | null | null | src/prefect/tasks/docker/images.py | skyline-ai/prefect | 92430f2f91215d6c27d92ad67df67ccd639e587c | [
"Apache-2.0"
] | null | null | null | src/prefect/tasks/docker/images.py | skyline-ai/prefect | 92430f2f91215d6c27d92ad67df67ccd639e587c | [
"Apache-2.0"
] | null | null | null | from typing import Any
import docker
from prefect import Task
from prefect.utilities.tasks import defaults_from_attrs
class ListImages(Task):
"""
Task for listing Docker images.
Note that all initialization arguments can optionally be provided or overwritten at runtime.
Args:
- repository_name (str, optional): Only show images belonging to this repository;
if not provided then it will list all images from the local Docker server
- all_layers (bool, optional): Show intermediate image layers
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
- **kwargs (dict, optional): additional keyword arguments to pass to the Task
constructor
"""
def __init__(
self,
repository_name: str = None,
all_layers: bool = False,
docker_server_url: str = "unix:///var/run/docker.sock",
**kwargs: Any
):
self.repository_name = repository_name
self.all_layers = all_layers
self.docker_server_url = docker_server_url
super().__init__(**kwargs)
@defaults_from_attrs("repository_name", "all_layers", "docker_server_url")
def run(
self,
repository_name: str = None,
all_layers: bool = False,
docker_server_url: str = "unix:///var/run/docker.sock",
) -> list:
"""
Task run method.
Args:
- repository_name (str, optional): Only show images belonging to this repository;
if not provided then it will list all images from the local Docker server
- all_layers (bool, optional): Show intermediate image layers
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
Returns:
- list: A list of dictionaries containing information about the images found
"""
client = docker.APIClient(base_url=docker_server_url, version="auto")
return client.images(name=repository_name, all=all_layers)
class PullImage(Task):
"""
Task for pulling a Docker image.
Note that all initialization arguments can optionally be provided or overwritten at runtime.
Args:
- repository (str, optional): The repository to pull the image from
- tag (str, optional): The tag of the image to pull; if not specified then the
`latest` tag will be pulled
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
- **kwargs (dict, optional): additional keyword arguments to pass to the Task
constructor
"""
def __init__(
self,
repository: str = None,
tag: str = None,
docker_server_url: str = "unix:///var/run/docker.sock",
**kwargs: Any
):
self.repository = repository
self.tag = tag
self.docker_server_url = docker_server_url
super().__init__(**kwargs)
@defaults_from_attrs("repository", "tag", "docker_server_url")
def run(
self,
repository: str = None,
tag: str = None,
docker_server_url: str = "unix:///var/run/docker.sock",
) -> str:
"""
Task run method.
Args:
- repository (str, optional): The repository to pull the image from
- tag (str, optional): The tag of the image to pull; if not specified then the
`latest` tag will be pulled
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
Returns:
- str: The output from Docker for pulling the image
Raises:
- ValueError: if `repository` is `None`
"""
if not repository:
raise ValueError("A repository to pull the image from must be specified.")
client = docker.APIClient(base_url=docker_server_url, version="auto")
return client.pull(repository=repository, tag=tag)
class PushImage(Task):
"""
Task for pushing a Docker image.
Note that all initialization arguments can optionally be provided or overwritten at runtime.
Args:
- repository (str, optional): The repository to push the image to
- tag (str, optional): The tag for the image to push; if not specified then the
`latest` tag will be pushed
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
- **kwargs (dict, optional): additional keyword arguments to pass to the Task
constructor
"""
def __init__(
self,
repository: str = None,
tag: str = None,
docker_server_url: str = "unix:///var/run/docker.sock",
**kwargs: Any
):
self.repository = repository
self.tag = tag
self.docker_server_url = docker_server_url
super().__init__(**kwargs)
@defaults_from_attrs("repository", "tag", "docker_server_url")
def run(
self,
repository: str = None,
tag: str = None,
docker_server_url: str = "unix:///var/run/docker.sock",
) -> str:
"""
Task run method.
Args:
- repository (str, optional): The repository to push the image to
- tag (str, optional): The tag for the image to push; if not specified then the
`latest` tag will be pushed
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
Returns:
- str: The output from Docker for pushing the image
Raises:
- ValueError: if `repository` is `None`
"""
if not repository:
raise ValueError("A repository to push the image to must be specified.")
client = docker.APIClient(base_url=docker_server_url, version="auto")
return client.push(repository=repository, tag=tag)
class RemoveImage(Task):
"""
Task for removing a Docker image.
Note that all initialization arguments can optionally be provided or overwritten at runtime.
Args:
- image (str, optional): The image to remove
- force (bool, optional): Force removal of the image
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
- **kwargs (dict, optional): additional keyword arguments to pass to the Task
constructor
"""
def __init__(
self,
image: str = None,
force: bool = False,
docker_server_url: str = "unix:///var/run/docker.sock",
**kwargs: Any
):
self.image = image
self.force = force
self.docker_server_url = docker_server_url
super().__init__(**kwargs)
@defaults_from_attrs("image", "force", "docker_server_url")
def run(
self,
image: str = None,
force: bool = False,
docker_server_url: str = "unix:///var/run/docker.sock",
) -> None:
"""
Task run method.
Args:
- image (str, optional): The image to remove
- force (bool, optional): Force removal of the image
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
Raises:
- ValueError: if `image` is `None`
"""
if not image:
raise ValueError("The name of an image to remove must be provided.")
client = docker.APIClient(base_url=docker_server_url, version="auto")
client.remove_image(image=image, force=force)
class TagImage(Task):
"""
Task for tagging a Docker image.
Note that all initialization arguments can optionally be provided or overwritten at runtime.
Args:
- image (str, optional): The image to tag
- repository (str, optional): The repository to set for the tag
- tag (str, optional): The tag name for the image
- force (bool, optional): Force tagging of the image
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
- **kwargs (dict, optional): additional keyword arguments to pass to the Task
constructor
"""
def __init__(
self,
image: str = None,
repository: str = None,
tag: str = None,
force: bool = False,
docker_server_url: str = "unix:///var/run/docker.sock",
**kwargs: Any
):
self.image = image
self.repository = repository
self.tag = tag
self.force = force
self.docker_server_url = docker_server_url
super().__init__(**kwargs)
@defaults_from_attrs("image", "repository", "tag", "force", "docker_server_url")
def run(
self,
image: str = None,
repository: str = None,
tag: str = None,
force: bool = False,
docker_server_url: str = "unix:///var/run/docker.sock",
) -> bool:
"""
Task run method.
Args:
- image (str, optional): The image to tag
- repository (str, optional): The repository to set for the tag
- tag (str, optional): The tag name for the image
- force (bool, optional): Force tagging of the image
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
Returns:
- bool: Whether or not the tagging was successful
Raises:
- ValueError: if either `image` or `repository` are `None`
"""
if not image or not repository:
raise ValueError("Both image and repository must be provided.")
client = docker.APIClient(base_url=docker_server_url, version="auto")
return client.tag(image=image, repository=repository, tag=tag, force=force)
class BuildImage(Task):
"""
Task for building a Docker image.
Note that all initialization arguments can optionally be provided or overwritten at runtime.
Args:
- path (str, optional): The path to the directory containing the Dockerfile
- tag (str, optional): The tag to give the final image
- nocache (bool, optional): Don't use cache when set to `True`
- rm (bool, optional): Remove intermediate containers; defaults to `True`
- forcerm (bool, optional): Always remove intermediate containers, even after
unsuccessful builds; defaults to `False`
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
- **kwargs (dict, optional): additional keyword arguments to pass to the Task
constructor
"""
def __init__(
self,
path: str = None,
tag: str = None,
nocache: bool = False,
rm: bool = True,
forcerm: bool = False,
docker_server_url: str = "unix:///var/run/docker.sock",
**kwargs: Any
):
self.path = path
self.tag = tag
self.nocache = nocache
self.rm = rm
self.forcerm = forcerm
self.docker_server_url = docker_server_url
super().__init__(**kwargs)
@defaults_from_attrs("path", "tag", "nocache", "rm", "forcerm", "docker_server_url")
def run(
self,
path: str = None,
tag: str = None,
nocache: bool = False,
rm: bool = True,
forcerm: bool = False,
docker_server_url: str = "unix:///var/run/docker.sock",
) -> None:
"""
Task run method.
Args:
- path (str, optional): The path to the directory containing the Dockerfile
- tag (str, optional): The tag to give the final image
- nocache (bool, optional): Don't use cache when set to `True`
- rm (bool, optional): Remove intermediate containers; defaults to `True`
- forcerm (bool, optional): Always remove intermediate containers, even after
unsuccessful builds; defaults to `False`
- docker_server_url (str, optional): URL for the Docker server. Defaults to
`unix:///var/run/docker.sock` however other hosts such as `tcp://0.0.0.0:2375`
can be provided
Raises:
- ValueError: if either `path` is `None`
"""
if not path:
raise ValueError(
"A path to a directory containing a Dockerfile must be provided."
)
client = docker.APIClient(base_url=docker_server_url, version="auto")
return client.build(path=path, tag=tag, nocache=nocache, rm=rm, forcerm=forcerm)
| 35.979167 | 96 | 0.601766 | 1,720 | 13,816 | 4.728488 | 0.086047 | 0.091479 | 0.088528 | 0.053117 | 0.876675 | 0.867454 | 0.861183 | 0.854912 | 0.854912 | 0.854912 | 0 | 0.009956 | 0.302113 | 13,816 | 383 | 97 | 36.073107 | 0.833541 | 0.51643 | 0 | 0.757962 | 0 | 0 | 0.143434 | 0.056882 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076433 | false | 0 | 0.025478 | 0 | 0.171975 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4334fd0f3af4850ddf9cbb705f91bce2d1c103b8 | 10,555 | py | Python | pattern/3.py | itspuneet/itspuneet | d44f78afcff275aa56f03bba738ac3e4f2c30843 | [
"bzip2-1.0.6"
] | null | null | null | pattern/3.py | itspuneet/itspuneet | d44f78afcff275aa56f03bba738ac3e4f2c30843 | [
"bzip2-1.0.6"
] | null | null | null | pattern/3.py | itspuneet/itspuneet | d44f78afcff275aa56f03bba738ac3e4f2c30843 | [
"bzip2-1.0.6"
] | null | null | null | Python 3.8.1 (tags/v3.8.1:1b293b6, Dec 18 2019, 22:39:24) [MSC v.1916 32 bit (Intel)] on win32
Type "help", "copyright", "credits" or "license()" for more information.
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py
Traceback (most recent call last):
File "C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py", line 9, in <module>
a[i+3]='4'
IndexError: list assignment index out of range
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py
Traceback (most recent call last):
File "C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py", line 15, in <module>
for k in i:
TypeError: 'int' object is not iterable
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py
Traceback (most recent call last):
File "C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py", line 9, in <module>
if a[i]==' ':
IndexError: list index out of range
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py
what is your name
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py
Traceback (most recent call last):
File "C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py", line 11, in <module>
for i in j:
TypeError: 'int' object is not iterable
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py
Traceback (most recent call last):
File "C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py", line 13, in <module>
b[z]=a[z]
NameError: name 'b' is not defined
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py
Traceback (most recent call last):
File "C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py", line 14, in <module>
b[z]=a[z]
IndexError: list assignment index out of range
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py
Traceback (most recent call last):
File "C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py", line 14, in <module>
b[z]=a[z]
IndexError: list assignment index out of range
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py
>>>
Traceback (most recent call last):
File "C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/5.py", line 14, in <module>
b[z]=a[z]
IndexError: list assignment index out of range
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/sample.py
Traceback (most recent call last):
File "C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/sample.py", line 3, in <module>
a[i+1]='2'
TypeError: can only concatenate str (not "int") to str
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/sample.py
*
*
*
*
*
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/sample.py
* * * * *
* * * *
* * *
* *
*
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/string program/sample.py
*
* *
* * *
* * * *
* * * * *
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/1.py
*
* *
* * *
* * * *
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/1.py
* * * * *
* * * *
* * *
* *
*
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/1.py
*
* *
* * *
* * * *
* * * * *
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/1.py
*
*
*
*
*
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/1.py
*
*
*
*
*
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
x x x x x
x x x x
x x x
x x
x
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
x
x x
x x x
x x x x
x x x x x
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
x x x x x
x x x x
x x x
x x
x
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
x
x x
x x x
x x x x
x x x x x
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
x
x x
x x x
x x x x
x x x x x
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
x x x x x
x x x x x
x x x x x
x x x x x
x x x x x
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
x x x x
x x x
x x
x
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
x x x x x
x x x x
x x x
x x
x
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
0 0 0 1 0 2 0 3 0 4
1 1 1 2 1 3 1 4
2 2 2 3 2 4
3 3 3 4
4 4
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
0 0 0 1 0 2 0 3 0 4
1 0 1 1 1 2 1 3 1 4
2 0 2 1 2 2 2 3 2 4
3 0 3 1 3 2 3 3 3 4
4 0 4 1 4 2 4 3 4 4
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 0 ) ( 0 1 ) ( 0 2 ) ( 0 3 ) ( 0 4 )
( 1 0 ) ( 1 1 ) ( 1 2 ) ( 1 3 ) ( 1 4 )
( 2 0 ) ( 2 1 ) ( 2 2 ) ( 2 3 ) ( 2 4 )
( 3 0 ) ( 3 1 ) ( 3 2 ) ( 3 3 ) ( 3 4 )
( 4 0 ) ( 4 1 ) ( 4 2 ) ( 4 3 ) ( 4 4 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( -1 ) ( -2 ) ( -3 ) ( -4 )
( 1 ) ( 0 ) ( -1 ) ( -2 ) ( -3 )
( 2 ) ( 1 ) ( 0 ) ( -1 ) ( -2 )
( 3 ) ( 2 ) ( 1 ) ( 0 ) ( -1 )
( 4 ) ( 3 ) ( 2 ) ( 1 ) ( 0 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( -1 ) ( -2 ) ( -3 ) ( -4 )
( 0 ) ( -1 ) ( -2 ) ( -3 )
( 0 ) ( -1 ) ( -2 )
( 0 ) ( -1 )
( 0 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 )
( 0 )
( 0 )
( 0 )
( 0 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 1 )
( 2 ) ( 1 )
( 3 ) ( 2 ) ( 1 )
( 4 ) ( 3 ) ( 2 ) ( 1 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( -1 ) ( -2 ) ( -3 ) ( -4 )
( -1 ) ( -2 ) ( -3 )
( -1 ) ( -2 )
( -1 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( 1 ) ( 2 ) ( 3 ) ( 4 )
( 1 ) ( 2 ) ( 3 ) ( 4 ) ( 5 )
( 2 ) ( 3 ) ( 4 ) ( 5 ) ( 6 )
( 3 ) ( 4 ) ( 5 ) ( 6 ) ( 7 )
( 4 ) ( 5 ) ( 6 ) ( 7 ) ( 8 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( 1 ) ( 2 ) ( 3 ) ( 4 )
( -1 ) ( 0 ) ( 1 ) ( 2 ) ( 3 )
( -2 ) ( -1 ) ( 0 ) ( 1 ) ( 2 )
( -3 ) ( -2 ) ( -1 ) ( 0 ) ( 1 )
( -4 ) ( -3 ) ( -2 ) ( -1 ) ( 0 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( 1 ) ( 2 ) ( 3 ) ( 4 )
( 0 ) ( 1 ) ( 2 ) ( 3 )
( 0 ) ( 1 ) ( 2 )
( 0 ) ( 1 )
( 0 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
x x x x x
x x x x x
x x x x x
x x x x x
x x x x x
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
x
x x
x x x
x x x x
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( 1 ) ( 2 ) ( 3 ) ( 4 )
( -1 ) ( 0 ) ( 1 ) ( 2 ) ( 3 )
( -2 ) ( -1 ) ( 0 ) ( 1 ) ( 2 )
( -3 ) ( -2 ) ( -1 ) ( 0 ) ( 1 )
( -4 ) ( -3 ) ( -2 ) ( -1 ) ( 0 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( 1 ) ( 2 ) ( 3 ) ( 4 )
( 1 ) ( 2 ) ( 3 ) ( 4 ) ( 5 )
( 2 ) ( 3 ) ( 4 ) ( 5 ) ( 6 )
( 3 ) ( 4 ) ( 5 ) ( 6 ) ( 7 )
( 4 ) ( 5 ) ( 6 ) ( 7 ) ( 8 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( 0 ) ( 0 ) ( 0 ) ( 0 )
( 0 ) ( 1 ) ( 2 ) ( 3 ) ( 4 )
( 0 ) ( 2 ) ( 4 ) ( 6 ) ( 8 )
( 0 ) ( 3 ) ( 6 ) ( 9 ) ( 12 )
( 0 ) ( 4 ) ( 8 ) ( 12 ) ( 16 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 1 ) ( 2 ) ( 3 ) ( 4 )
( 2 ) ( 4 ) ( 6 ) ( 8 )
( 3 ) ( 6 ) ( 9 ) ( 12 )
( 4 ) ( 8 ) ( 12 ) ( 16 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 1 ) ( 2 ) ( 3 ) ( 4 )
( 2 ) ( 4 ) ( 6 ) ( 8 )
( 3 ) ( 6 ) ( 9 ) ( 12 )
( 4 ) ( 8 ) ( 12 ) ( 16 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( -1 ) ( -2 ) ( -3 ) ( -4 )
( 1 ) ( 0 ) ( -1 ) ( -2 ) ( -3 )
( 2 ) ( 1 ) ( 0 ) ( -1 ) ( -2 )
( 3 ) ( 2 ) ( 1 ) ( 0 ) ( -1 )
( 4 ) ( 3 ) ( 2 ) ( 1 ) ( 0 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( -1 ) ( -2 ) ( -3 ) ( -4 )
( 0 ) ( -1 ) ( -2 ) ( -3 )
( 0 ) ( -1 ) ( -2 )
( 0 ) ( -1 )
( 0 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( -1 ) ( -2 ) ( -3 ) ( -4 )
( 0 ) ( -1 ) ( -2 ) ( -3 )
( 0 ) ( -1 ) ( -2 )
( 0 ) ( -1 )
( 0 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
( 0 ) ( -1 ) ( -2 ) ( -3 ) ( -4 )
( 0 ) ( -1 ) ( -2 ) ( -3 )
( 0 ) ( -1 ) ( -2 )
( 0 ) ( -1 )
( 0 )
>>>
= RESTART: C:/Users/Mr Puneet Tiwari/AppData/Local/Programs/Python/Python38-32/pattern/2.py
* * * * *
* * * *
* * *
* *
*
>>> | 30.862573 | 123 | 0.558408 | 1,717 | 10,555 | 3.432732 | 0.057076 | 0.050899 | 0.071259 | 0.088225 | 0.951476 | 0.950458 | 0.947404 | 0.945877 | 0.945877 | 0.94452 | 0 | 0.099708 | 0.25315 | 10,555 | 342 | 124 | 30.862573 | 0.647977 | 0 | 0 | 0.781155 | 0 | 0.027356 | 0.081253 | 0.048458 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
433aebf6cf4cf18d7c487467e3940cb9926986cb | 9,052 | py | Python | tests/v2/test_0222-count-with-axis0.py | colesbury/awkward-1.0 | d036ab18eb54de8a2571d9f179d315ac8ee22119 | [
"BSD-3-Clause"
] | null | null | null | tests/v2/test_0222-count-with-axis0.py | colesbury/awkward-1.0 | d036ab18eb54de8a2571d9f179d315ac8ee22119 | [
"BSD-3-Clause"
] | null | null | null | tests/v2/test_0222-count-with-axis0.py | colesbury/awkward-1.0 | d036ab18eb54de8a2571d9f179d315ac8ee22119 | [
"BSD-3-Clause"
] | null | null | null | # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
import pytest # noqa: F401
import numpy as np # noqa: F401
import awkward as ak # noqa: F401
def test():
nums = [
[
17,
11,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
17,
11,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
17,
11,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
17,
11,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
17,
11,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
17,
11,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
17,
11,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
17,
11,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
17,
11,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
17,
11,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
]
sample = []
for outer in nums:
sample.append([])
for inner in outer:
sample[-1].append([0] * inner)
assert ak._v2.operations.describe.is_valid(
ak._v2.operations.reducers.count(sample, axis=0)
)
| 15.137124 | 87 | 0.095559 | 623 | 9,052 | 1.383628 | 0.070626 | 0.812065 | 1.183295 | 1.531323 | 0.669374 | 0.669374 | 0.669374 | 0.669374 | 0.669374 | 0.669374 | 0 | 0.392857 | 0.832965 | 9,052 | 597 | 88 | 15.162479 | 0.177249 | 0.013036 | 0 | 0.959391 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001692 | 1 | 0.001692 | false | 0 | 0.005076 | 0 | 0.006768 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
4a2c7d848ea13476644f984e77ddf2e404d63b9c | 208 | py | Python | Arya/admin.py | yezimai/oldboyProject | 889eebc2e6158b07ac0964b25eb01df743ad0117 | [
"Apache-2.0"
] | null | null | null | Arya/admin.py | yezimai/oldboyProject | 889eebc2e6158b07ac0964b25eb01df743ad0117 | [
"Apache-2.0"
] | null | null | null | Arya/admin.py | yezimai/oldboyProject | 889eebc2e6158b07ac0964b25eb01df743ad0117 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
import models
# Register your models here.
admin.site.register(models.Host)
admin.site.register(models.Group)
| 23.111111 | 39 | 0.783654 | 29 | 208 | 5.448276 | 0.62069 | 0.113924 | 0.21519 | 0.291139 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005405 | 0.110577 | 208 | 8 | 40 | 26 | 0.848649 | 0.230769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
4a4a3a5b70219fd36f03fce63f90bc7d7b29bcc5 | 2,864 | py | Python | test/expressions/expr19.py | abjugard/MagicPython | 2802ded681e0ab1a1057821c1da287147d639505 | [
"MIT"
] | 1,482 | 2015-10-16T21:59:32.000Z | 2022-03-30T11:44:40.000Z | test/expressions/expr19.py | abjugard/MagicPython | 2802ded681e0ab1a1057821c1da287147d639505 | [
"MIT"
] | 226 | 2015-10-15T15:53:44.000Z | 2022-03-25T03:08:27.000Z | test/expressions/expr19.py | abjugard/MagicPython | 2802ded681e0ab1a1057821c1da287147d639505 | [
"MIT"
] | 129 | 2015-10-20T02:41:49.000Z | 2022-03-22T01:44:36.000Z | a. #foo
a.
#foo
a. \
#foo
a. 'bar'
a.
'bar'
a. \
'bar'
a : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
: meta.member.access.python, source.python
# : comment.line.number-sign.python, punctuation.definition.comment.python, source.python
foo : comment.line.number-sign.python, source.python
a : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
: meta.member.access.python, source.python
# : comment.line.number-sign.python, punctuation.definition.comment.python, source.python
foo : comment.line.number-sign.python, source.python
a : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
: meta.member.access.python, source.python
\ : meta.member.access.python, punctuation.separator.continuation.line.python, source.python
: meta.member.access.python, source.python
# : comment.line.number-sign.python, punctuation.definition.comment.python, source.python
foo : comment.line.number-sign.python, source.python
a : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
: meta.member.access.python, source.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.single.python
bar : source.python, string.quoted.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.single.python
a : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
: meta.member.access.python, source.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.docstring.single.python
bar : source.python, string.quoted.docstring.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.docstring.single.python
a : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
: meta.member.access.python, source.python
\ : meta.member.access.python, punctuation.separator.continuation.line.python, source.python
: meta.member.access.python, source.python
' : meta.member.access.python, punctuation.definition.string.begin.python, source.python, string.quoted.single.python
bar : meta.member.access.python, source.python, string.quoted.single.python
' : meta.member.access.python, punctuation.definition.string.end.python, source.python, string.quoted.single.python
| 55.076923 | 129 | 0.668645 | 319 | 2,864 | 6.003135 | 0.0721 | 0.231854 | 0.272585 | 0.218277 | 1 | 0.987467 | 0.979112 | 0.928982 | 0.901828 | 0.901828 | 0 | 0 | 0.21648 | 2,864 | 51 | 130 | 56.156863 | 0.853387 | 0.107891 | 0 | 0.761905 | 0 | 0.095238 | 0.003531 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
4a529eb343726a1bfb10754b7f329968a851d6b5 | 28,829 | py | Python | image.py | dgarrett/Inkplate-micropython | a9f5786810090244730a0cdcae1c9ce65c78d11b | [
"MIT"
] | 22 | 2020-09-17T14:23:32.000Z | 2021-05-03T13:24:09.000Z | image.py | dgarrett/Inkplate-micropython | a9f5786810090244730a0cdcae1c9ce65c78d11b | [
"MIT"
] | 6 | 2021-07-09T13:58:19.000Z | 2022-02-10T11:05:59.000Z | image.py | dgarrett/Inkplate-micropython | a9f5786810090244730a0cdcae1c9ce65c78d11b | [
"MIT"
] | 7 | 2020-12-22T00:02:25.000Z | 2021-04-01T11:21:09.000Z | image = bytearray(
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xff\xff\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xff\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\xff\xff\xff\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xff\xff\xff\xff\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xff\xff\xff\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\xff\xff\xff\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\xff\xff\xff\xff\xff\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xff\xff\x00\x00\x3f\xff\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xff\xf8\x00\x00\x0f\xff\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xff\x80\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x7f\xff\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\xf8\x00\x00\x00\x00\x0f\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\xf0\x00\x00\x00\x00\x07\xff\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xff\xc0\x00\x00\x00\x00\x01\xff\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xff\x80\x00\x00\x00\x00\x00\xff\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xfe\x00\x00\x00\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xfc\x00\x00\x00\x00\x00\x00\x1f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xf8\x00\x00\x00\x00\x00\x00\x0f\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xf0\x00\x00\x00\x00\x00\x00\x07\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xe0\x00\x00\x00\x00\x00\x00\x03\xff\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xc0\x00\x00\x00\x00\x00\x00\x01\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\x80\x00\x00\x00\x00\x00\x00\x00\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\x80\x00\x00\x00\x00\x00\x00\x00\x7f\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xfc\x00\x00\x7f\xff\xff\xf8\x00\x00\x0f\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xf8\x00\x07\xff\xff\xff\xfc\x00\x00\x0f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc3\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x01\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xf8\x00\x0f\xff\xff\xff\xfc\x00\x00\x0f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc3\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x01\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xf0\x00\x3f\xff\xff\xff\xfc\x00\x00\x07\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc3\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x03\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xf0\x00\x7f\xff\xff\xff\xfc\x00\x00\x03\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc3\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x03\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xe0\x01\xff\xff\xff\xff\xff\xff\x80\x03\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc3\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x01\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xe0\x03\xff\xfc\x00\x7d\xff\xff\x80\x03\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc3\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xc0\x07\xfe\x30\x00\x30\xff\xff\x80\x01\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xc0\x0f\xfc\x30\x00\x30\xff\xff\x80\x01\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xc0\x1f\xf0\x38\x70\x20\xfe\x1f\x00\x00\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xc0\x1f\xe0\x3f\xf8\x78\xfc\x00\x00\x00\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x1f\xff\x80\x01\xff\xfc\x00\x00\x1f\xff\xff\xc1\xc0\x00\xff\xfe\x00\x0f\xff\xfe\x00\x00\xe0\x00\x7f\xff\x00\x00\x07\xff\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x80\x3f\xc0\x3f\xf8\xf8\xfc\x00\x00\x00\xff\x00\x00\x00\x0f\xff\xff\xf0\x00\x00\x00\x03\xff\xff\xc0\x1f\xff\xff\xc0\x01\xff\xff\xff\xc3\xe0\x0f\xff\xff\xe0\x1f\xff\xff\xf0\x00\xf0\x07\xff\xff\xf0\x00\x7f\xff\xff\x00\x00\x00\x7f\xff\xfe\x00\x07\xff\xff\xe0\x0f\xff\xff\x07\xff\xfc\x00\xff\x80\x3f\x80\x3f\xf8\xf8\xfc\x00\x00\x00\xff\x80\x00\x00\x1f\xff\xff\xf8\x00\x00\x00\x0f\xff\xff\xc0\x3f\xff\xff\xe0\x03\xff\xff\xff\xc3\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xfc\x01\xf0\x0f\xff\xff\xf8\x00\xff\xff\xff\x80\x00\x00\xff\xff\xff\x00\x0f\xff\xff\xf0\x0f\xff\xff\x8f\xff\xff\x00\xff\x00\x7f\x00\x00\xf0\x78\xfc\x00\x00\x00\xff\x80\x00\x00\x3f\xff\xff\xfc\x00\x00\x00\x1f\xff\xff\xc0\xff\xff\xff\xf0\x0f\xff\xff\xff\xc3\xf0\x3f\xff\xff\xf8\x1f\xff\xff\xfe\x01\xf0\x3f\xff\xff\xfc\x03\xff\xff\xff\xc0\x00\x03\xff\xff\xff\xc0\x1f\xff\xff\xfc\x0f\xff\xff\xff\xff\xff\xc0\xff\x00\xff\x00\x00\x00\x30\xfc\x00\x00\x00\x7f\x80\x00\x00\x3f\xff\xff\xfe\x00\x00\x00\x3f\xff\xff\xc0\xff\xff\xff\xf8\x0f\xff\xff\xff\xc3\xf0\x7f\xff\xff\xfc\x1f\xff\xff\xff\x01\xf0\x3f\xff\xff\xfe\x03\xff\xff\xff\xe0\x00\x03\xff\xff\xff\xc0\x3f\xff\xff\xfc\x0f\xff\xff\xff\xff\xff\xc0\xff\x00\xfe\x00\x00\x00\x00\xfc\x00\x00\x00\x7f\xc0\x00\x00\x7f\x80\x01\xff\x00\x00\x00\x7f\xe0\x00\x01\xfe\x00\x07\xf8\x1f\xe0\x00\x0f\xc3\xf0\xff\x00\x03\xfc\x1f\x80\x01\xff\x81\xf0\x7f\x80\x01\xff\x07\xf8\x00\x1f\xf0\x00\x03\xf0\x00\x0f\xe0\x3f\x00\x00\xfe\x0f\xc0\x07\xff\x00\x1f\xf0\xff\x80\xfc\x00\x00\x00\x00\xfc\x00\x00\x00\x7f\xc0\x00\x00\xfe\x00\x00\x7f\x00\x00\x00\xff\x80\x00\x01\xfc\x00\x01\xfc\x1f\xc0\x00\x07\xc3\xf0\xfe\x00\x00\xfe\x1f\x00\x00\x7f\xc1\xf0\x7f\x00\x00\x7f\x07\xf0\x00\x07\xf0\x00\x07\xe0\x00\x07\xf0\x7e\x00\x00\x7f\x0f\x80\x03\xff\x00\x07\xf0\xff\xfc\xfc\xfc\x00\x00\x00\xfc\x00\x00\x00\x7f\xc0\x00\x00\xfc\x00\x00\x3f\x00\x00\x00\xfe\x00\x00\x03\xf0\x00\x00\xfc\x3f\x00\x00\x07\xc3\xf0\xfc\x00\x00\x7e\x1f\x00\x00\x1f\xc1\xf0\xfe\x00\x00\x3f\x0f\xe0\x00\x03\xf0\x00\x07\xc0\x00\x03\xf0\x7c\x00\x00\x3f\x0f\x80\x00\xff\x00\x03\xf0\xff\xff\xfd\xfe\x00\x00\x00\xfe\x00\x00\x00\x3f\xc0\x00\x00\xfc\x00\x00\x3f\x00\x00\x00\xfc\x00\x00\x03\xf0\x00\x00\xfc\x3f\x00\x00\x07\xc3\xf1\xf8\x00\x00\x3e\x1f\x00\x00\x0f\xc1\xf0\xfc\x00\x00\x1f\x0f\xc0\x00\x01\xf0\x00\x0f\xc0\x00\x01\xf0\xfc\x00\x00\x1f\x0f\x80\x00\x7f\x00\x01\xf8\xff\xff\xff\xff\x01\xc0\x00\xff\xfc\x00\x00\x3f\xc0\x00\x00\xf8\x00\x00\x1f\x00\x00\x01\xf8\x00\x00\x03\xf0\x00\x00\x7c\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x07\xe1\xf0\xfc\x00\x00\x1f\x0f\xc0\x00\x01\xf0\x00\x0f\x80\x00\x01\xf0\xf8\x00\x00\x1f\x0f\x80\x00\x3f\x00\x00\xfc\xff\xff\xff\xff\x83\xe0\x00\xff\xfc\x00\x00\x3f\xc0\x00\x00\xf8\x00\x00\x1f\x00\x00\x01\xf0\x00\x00\x03\xe0\x00\x00\xfc\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xe1\xf0\xfc\x00\x00\x0f\x0f\xc0\x00\x03\xf0\x00\x0f\x80\x00\x00\xf0\xf8\x00\x00\x0f\x0f\x80\x00\x3f\x00\x00\xfc\xff\xff\xff\xff\xff\xe0\x00\xff\xfc\x00\x00\x3f\xc0\x00\x00\xf8\x00\x00\x3f\x00\x00\x01\xf0\x00\x00\x00\x00\x00\x3f\xfc\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xe1\xf0\xfc\x00\x00\x00\x00\x00\x00\x7f\xf0\x00\x0f\x80\x00\x00\x00\xf8\x00\x00\x0f\x0f\x80\x00\x3f\x00\x00\x7e\xff\xff\xff\xff\xff\xe0\x00\xff\xfc\x00\x00\x3f\xc0\x00\x00\xf8\x00\x00\x7f\x00\x00\x03\xf0\x00\x00\x00\x00\x00\xff\xfc\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x00\x00\x00\x03\xff\xf0\x00\x0f\x80\x00\x00\x00\xf8\x00\x00\x0f\x0f\x80\x00\x3f\x00\x00\x7e\x3f\xfd\xfd\xfe\x03\xc0\x00\xfe\x00\x00\x00\x3f\xc0\x00\x00\xf8\x00\x0f\xff\x00\x00\x03\xf0\x00\x00\x00\x00\x3f\xff\xfc\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x00\x00\x00\xff\xff\xf0\x00\x0f\x80\x00\x00\x00\xf8\x00\x00\x0f\x0f\x80\x00\x1f\x00\x00\x3f\x3f\xfc\xfc\xfc\x00\x80\x00\xfc\x00\x00\x00\x3f\xc0\x00\x00\xf8\x00\x7f\xfe\x00\x00\x03\xf0\x00\x00\x00\x01\xff\xff\xfc\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x00\x00\x07\xff\xff\xf0\x00\x0f\x80\x00\x00\x00\xf8\x00\x00\x0f\x0f\x80\x00\x1f\x00\x00\x3f\x00\x00\xfc\x00\x00\x00\x00\xfc\x00\x00\x00\x7f\xc0\x00\x00\xf8\x1f\xff\xfc\x07\xff\xc3\xf0\x00\x00\x00\x3f\xff\xff\xfc\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x00\x00\xff\xff\xff\xf0\x00\x0f\x80\x00\x00\x00\xf8\x00\x00\x0f\x0f\x80\x00\x1f\x00\x00\x3f\x00\x00\xfe\x00\x00\x00\x00\xfc\x00\x00\x00\x7f\xc0\x00\x00\xfc\xff\xff\xfc\x0f\xff\xc3\xf0\x00\x00\x00\xff\xff\xfc\x7c\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x00\x01\xff\xff\xf1\xf0\x00\x0f\x80\x00\x00\x00\xf8\x00\x00\x0f\x0f\x80\x00\x1f\x00\x00\x3f\x00\x00\xff\x00\x70\x00\x00\xfc\x00\x00\x00\x7f\xc0\x00\x00\xff\xff\xff\xc0\x0f\xff\xc3\xf0\x00\x00\x00\xff\xff\x00\x7c\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x00\x03\xff\xfc\x00\xf0\x00\x0f\x80\x00\x00\x00\xf8\x00\x00\x0f\x0f\x80\x00\x1f\x00\x00\x3f\x00\x00\x7f\x00\xf8\x02\x00\xfc\x00\x00\x00\x7f\x80\x00\x00\xff\xff\xfe\x00\x0f\xff\xc3\xf0\x00\x00\x01\xff\xf8\x00\x7c\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x00\x07\xff\xe0\x00\xf0\x00\x0f\x80\x00\x00\x00\xf8\x00\x00\x0f\x0f\x80\x00\x1f\x00\x00\x3f\x00\x00\x3f\x80\xf8\x0f\x00\xfc\x00\x00\x00\xff\x80\x00\x00\xff\xff\xc0\x00\x07\xff\xc3\xf0\x00\x00\x03\xff\x00\x00\x7c\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x00\x07\xfc\x00\x00\xf0\x00\x0f\x80\x00\x00\x00\xf8\x00\x00\x0f\x0f\x80\x00\x1f\x00\x00\x3f\x00\x00\x3f\xc0\xf8\x0f\x80\xfc\x00\x00\x00\xff\x80\x00\x00\xff\xfe\x00\x00\x00\x00\x03\xf0\x00\x00\x03\xf8\x00\x00\x7c\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x00\x0f\xe0\x00\x00\xf0\x00\x0f\x80\x00\x00\x00\xf8\x00\x00\x0f\x0f\x80\x00\x1f\x00\x00\x3f\x00\x00\x1f\xe0\xf0\x0f\x80\xfc\x00\x00\x00\xff\x80\x00\x00\xff\x80\x00\x0f\x00\x00\x03\xf0\x00\x00\x03\xf0\x00\x00\x7c\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x0f\x0f\xc0\x00\x00\xf0\x00\x0f\x80\x00\x00\xe0\xf8\x00\x00\x0f\x0f\x80\x00\x1f\x00\x00\x3f\x00\x00\x1f\xf0\x60\x0f\x80\xfc\x00\x00\x00\xff\x00\x00\x00\xfc\x00\x00\x1f\x00\x00\x03\xf0\x00\x00\x03\xe0\x00\x00\x7c\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x0f\x0f\xc0\x00\x01\xf0\x00\x0f\x80\x00\x00\xf0\xf8\x00\x00\x0f\x0f\x80\x00\x1f\x00\x00\x3f\x00\x00\x0f\xfc\x20\x06\x00\xff\xff\x80\x00\xff\x00\x00\x00\xfc\x00\x00\x3f\x00\x00\x03\xf0\x00\x00\x03\xf0\x00\x00\x7c\x3f\x00\x00\x07\xc3\xf1\xf0\x00\x00\x3f\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x1f\x0f\xc0\x00\x01\xf0\x00\x0f\x80\x00\x01\xf0\xf8\x00\x00\x1f\x0f\x80\x00\x1f\x00\x00\x3f\x00\x00\x07\xfe\x70\x0e\x00\xff\xff\x80\x00\xff\x00\x00\x00\xfc\x00\x00\x3f\x00\x00\x03\xf0\x00\x00\x03\xf0\x00\x00\xfc\x3f\x00\x00\x0f\xc3\xf0\xf8\x00\x00\x3e\x1f\x00\x00\x03\xf1\xf0\xfc\x00\x00\x1f\x0f\xc0\x00\x01\xf0\x00\x0f\xc0\x00\x01\xf0\xfc\x00\x00\x1f\x0f\x80\x00\x1f\x00\x00\x3f\x07\x80\x03\xff\xf8\x3e\x01\xff\xff\x80\x01\xff\x00\x00\x00\xfc\x00\x00\x3f\x00\x00\x03\xf0\x00\x00\x03\xf8\x00\x00\xfc\x3f\x80\x00\x0f\xc3\xf0\xfc\x00\x00\x7e\x1f\x00\x00\x03\xf1\xf0\x7e\x00\x00\x3f\x0f\xe0\x00\x03\xf0\x00\x07\xc0\x00\x03\xf0\x7c\x00\x00\x3f\x0f\x80\x00\x1f\x00\x00\x3f\x0f\xc0\x01\xff\xff\xff\xff\xff\xff\x80\x03\xff\x00\x00\x00\xfe\x00\x00\xfe\x00\x00\x03\xf0\x00\x00\x03\xfc\x00\x01\xfc\x1f\xc0\x00\x1f\xc3\xf0\xfe\x00\x00\xfe\x1f\x00\x00\x03\xf1\xf0\x7f\x00\x00\x7f\x07\xf0\x00\x07\xf0\x0e\x07\xe0\x00\x07\xe0\x7e\x00\x00\x7e\x0f\x80\x00\x1f\x00\x00\x3f\x0f\xe0\x00\xff\xff\xff\xff\xfe\xff\x00\x03\xfe\x00\x00\x00\x7f\xff\xff\xfe\x00\x00\x03\xf0\x00\x00\x01\xff\xff\xff\xf8\x1f\xff\xff\xff\x83\xf0\xff\xff\xff\xfc\x1f\x00\x00\x03\xf1\xf0\x3f\xff\xff\xfe\x07\xff\xff\xff\xe0\x1f\x03\xff\xff\xff\xe0\x3f\xff\xff\xfe\x0f\x80\x00\x1f\x00\x00\x3f\x1f\xf0\x00\x7f\xff\xff\xff\xfc\x00\x00\x03\xfe\x00\x00\x00\x3f\xff\xff\xfc\x00\x00\x03\xf0\x00\x00\x00\xff\xff\xff\xf0\x0f\xff\xff\xff\x83\xf0\x7f\xff\xff\xfc\x1f\x00\x00\x03\xf1\xf0\x3f\xff\xff\xfe\x03\xff\xff\xff\xe0\x3f\x03\xff\xff\xff\xc0\x3f\xff\xff\xfc\x0f\x80\x00\x1f\x00\x00\x3f\x1f\xf0\x00\x1f\xff\xff\xff\xfc\x00\x00\x03\xfc\x00\x00\x00\x3f\xff\xff\xf8\x00\x00\x03\xf0\x00\x00\x00\xff\xff\xff\xf0\x07\xff\xff\xff\x03\xf0\x3f\xff\xff\xf8\x1f\x00\x00\x03\xf1\xf0\x1f\xff\xff\xfc\x01\xff\xff\xff\xc0\x3f\x01\xff\xff\xff\x80\x1f\xff\xff\xf8\x0f\x80\x00\x1f\x00\x00\x3f\x0f\xf0\x00\x07\xff\xff\xff\xfc\x00\x00\x07\xfc\x00\x00\x00\x0f\xff\xff\xf0\x00\x00\x03\xf0\x00\x00\x00\x7f\xff\xff\xe0\x03\xff\xff\xfe\x03\xf0\x1f\xff\xff\xf0\x1f\x00\x00\x03\xf1\xf0\x0f\xff\xff\xf8\x00\xff\xff\xff\x80\x3f\x00\xff\xff\xff\x00\x0f\xff\xff\xf0\x0f\x80\x00\x1f\x00\x00\x3f\x0f\xf8\x00\x00\x7f\xff\xff\xf8\x00\x00\x0f\xfc\x00\x00\x00\x03\xff\xff\xc0\x00\x00\x01\xe0\x00\x00\x00\x0f\xff\xff\x00\x00\xff\xff\xf0\x03\xe0\x07\xff\xff\xc0\x0f\x00\x00\x03\xe0\xf0\x03\xff\xff\xe0\x00\x3f\xff\xfe\x00\x0e\x00\x1f\xff\xf8\x00\x01\xff\xff\x80\x07\x00\x00\x0e\x00\x00\x3c\x0f\xfc\x00\x00\x0f\xf8\x00\x00\x00\x00\x0f\xf8\x00\x00\x00\x00\xff\xfe\x00\x00\x00\x00\xc0\x00\x00\x00\x01\xff\xfc\x00\x00\x1f\xff\xc0\x00\xc0\x00\xff\xfe\x00\x06\x00\x00\x00\xc0\x60\x00\x7f\xff\x00\x00\x07\xff\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\x80\x00\x00\x00\x00\x00\x00\x00\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xc0\x00\x00\x00\x00\x00\x00\x00\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xe0\x00\x00\x00\x00\x00\x00\x03\xff\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xf0\x00\x00\x00\x00\x00\x00\x03\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xf8\x00\x00\x00\x00\x00\x00\x0f\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x0f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xfe\x00\x00\x00\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xff\x00\x00\x00\x00\x00\x00\x7f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xff\xc0\x00\x00\x00\x00\x00\xff\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\xe0\x00\x00\x00\x00\x03\xff\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\xf8\x00\x00\x00\x00\x0f\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfc\x00\x00\x00\x00\x1f\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xff\xc0\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xff\xe0\x00\x00\x03\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xff\xff\x00\x00\x3f\xff\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xff\xff\xe0\x03\xff\xff\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\xff\xff\xff\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xff\xff\xff\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xff\xff\xff\xff\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\xff\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xff\xff\xff\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\xff\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
)
| 7,207.25 | 28,807 | 0.749766 | 7,203 | 28,829 | 3.000833 | 0.005692 | 1.310479 | 1.779181 | 2.257876 | 0.955309 | 0.938099 | 0.910201 | 0.893269 | 0.86787 | 0.843164 | 0 | 0.422484 | 0.000312 | 28,829 | 3 | 28,808 | 9,609.666667 | 0.327516 | 0 | 0 | 0 | 0 | 0.333333 | 0.998994 | 0.998994 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 18 |
4a6090c0a5b2c196308aadb5099ad951c86e95dc | 35,279 | py | Python | tests/unit_test/framework/lock.py | Chisanan232/multirunnable | 7223e49750dc3d3ccf7ebcd3d292138916b582f2 | [
"Apache-2.0"
] | 1 | 2022-03-18T15:20:53.000Z | 2022-03-18T15:20:53.000Z | tests/unit_test/framework/lock.py | Chisanan232/multirunnable | 7223e49750dc3d3ccf7ebcd3d292138916b582f2 | [
"Apache-2.0"
] | null | null | null | tests/unit_test/framework/lock.py | Chisanan232/multirunnable | 7223e49750dc3d3ccf7ebcd3d292138916b582f2 | [
"Apache-2.0"
] | null | null | null | from gevent.threading import get_ident as get_gevent_ident
from gevent import sleep as gevent_sleep
from typing import Union
from abc import ABCMeta, abstractmethod, ABC
import multiprocessing
import threading
import asyncio
import pytest
import random
import time
from multirunnable.adapter import Lock, RLock, Semaphore, BoundedSemaphore
from multirunnable.mode import FeatureMode
from multirunnable import PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION
from ...test_config import Worker_Size, Worker_Pool_Size, Task_Size, Semaphore_Value
_Worker_Size = Worker_Size
_Worker_Pool_Size = Worker_Pool_Size
_Task_Size = Task_Size
_Process_Manager = multiprocessing.Manager()
_Semaphore_Value = Semaphore_Value
_Sleep_Time: int = 1
_Random_Start_Time: int = 60
_Random_End_Time: int = 80
class FeatureTestSpec(metaclass=ABCMeta):
@abstractmethod
def test_get_feature_instance(self, **kwargs):
pass
def _feature_instance_testing(self, _lock_inst: Union[Lock, RLock, Semaphore, BoundedSemaphore]):
_lock_operator = _lock_inst._feature_operator
_global_feature_inst = _lock_operator._get_feature_instance()
_feature_inst = _lock_operator._feature_instance
assert _feature_inst is _global_feature_inst, f"The feature property should be the '{_lock_inst.__class__}' instance we set."
@abstractmethod
def test_feature_in_parallel(self, **kwargs):
pass
@abstractmethod
def test_feature_by_pykeyword_with_in_parallel(self, **kwargs):
pass
@abstractmethod
def test_feature_in_concurrent(self, **kwargs):
pass
@abstractmethod
def test_feature_by_pykeyword_with_in_concurrent(self, **kwargs):
pass
@staticmethod
def _get_collection_dict(mode: FeatureMode) -> dict:
if mode is FeatureMode.Parallel:
return _Process_Manager.dict()
else:
return {}
@staticmethod
def _get_collection_list(mode: FeatureMode) -> list:
if mode is FeatureMode.Parallel:
return _Process_Manager.list()
else:
return []
@staticmethod
def _get_worker_id(mode: FeatureMode) -> str:
if mode is FeatureMode.Parallel:
return str(multiprocessing.current_process().pid)
elif mode is FeatureMode.Concurrent:
return str(threading.get_ident())
elif mode is FeatureMode.GreenThread:
return str(get_gevent_ident())
elif mode is FeatureMode.Asynchronous:
if (PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION) > (3, 6):
_async_task = asyncio.current_task(loop=asyncio.get_event_loop())
else:
_async_task = asyncio.Task.current_task()
_async_task_id = id(_async_task)
return str(_async_task_id)
else:
raise ValueError
@staticmethod
def _sleep(mode: FeatureMode) -> None:
if mode is FeatureMode.Parallel or mode is FeatureMode.Concurrent:
time.sleep(_Sleep_Time)
elif mode is FeatureMode.GreenThread:
gevent_sleep(_Sleep_Time)
elif mode is FeatureMode.Asynchronous:
asyncio.sleep(_Sleep_Time)
else:
raise ValueError
class LockTestSpec(FeatureTestSpec, ABC):
@abstractmethod
def test_feature_in_green_thread(self, **kwargs):
pass
@abstractmethod
def test_feature_by_pykeyword_with_in_green_thread(self, **kwargs):
pass
@abstractmethod
def test_feature_in_asynchronous_tasks(self, **kwargs):
pass
@abstractmethod
def test_feature_by_pykeyword_with_in_asynchronous_tasks(self, **kwargs):
pass
@staticmethod
def _feature_testing(mode: FeatureMode, _lock, running_function):
_done_timestamp = FeatureTestSpec._get_collection_dict(mode)
def _target_testing():
# Save a timestamp into list
_lock.acquire()
_worker_id = FeatureTestSpec._get_worker_id(mode)
FeatureTestSpec._sleep(mode)
_time = float(time.time())
_done_timestamp[_worker_id] = _time
_lock.release()
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing)
LockTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _feature_testing_by_pykeyword_with(mode: FeatureMode, _lock, running_function):
_done_timestamp = FeatureTestSpec._get_collection_dict(mode)
def _target_testing():
# Save a time stamp into list
try:
with _lock:
_worker_id = FeatureTestSpec._get_worker_id(mode)
FeatureTestSpec._sleep(mode)
_time = float(time.time())
_done_timestamp[_worker_id] = _time
except Exception as e:
assert False, f"Occur something unexpected issue. Please check it. Exception: {e}"
else:
assert True, "Testing code successfully."
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing)
LockTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _async_feature_testing(mode: FeatureMode, _lock, running_function, event_loop=None, factory=None):
_done_timestamp = FeatureTestSpec._get_collection_dict(mode)
async def _target_testing():
# Save a timestamp into list
await _lock.acquire()
await asyncio.sleep(_Sleep_Time)
_worker_id = FeatureTestSpec._get_worker_id(mode)
_time = float(time.time())
_done_timestamp[_worker_id] = _time
_lock.release()
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing, event_loop=event_loop, _feature=factory)
LockTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _async_feature_testing_by_pykeyword_with(mode: FeatureMode, _lock, running_function, factory=None):
_done_timestamp = FeatureTestSpec._get_collection_dict(mode)
async def _target_testing():
# Save a time stamp into list
try:
async with _lock:
await asyncio.sleep(_Sleep_Time)
_worker_id = FeatureTestSpec._get_worker_id(mode)
_time = float(time.time())
_done_timestamp[_worker_id] = _time
except Exception as e:
assert False, f"Occur something unexpected issue. Please check it. Exception: {e}"
else:
assert True, "Testing code successfully."
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing, _feature=factory)
LockTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _chk_done_timestamp(_done_timestamp: dict):
assert len(_done_timestamp.keys()) == _Worker_Size, f"The amount of thread ID keys (no de-duplicate) should be equal to worker size '{_Worker_Size}'."
assert len(set(_done_timestamp.keys())) == _Worker_Size, f"The amount of thread ID keys (de-duplicate) should be equal to worker size '{_Worker_Size}'."
_previous_v = None
for _v in sorted(_done_timestamp.values()):
if _previous_v is None:
_previous_v = _v
if _previous_v != _v:
assert int(abs(float(_v) - float(_previous_v))) == _Sleep_Time, \
f"The different time betweeen them should be {_Sleep_Time} second(s). One is {_v} and another one is {_previous_v}. All of them are {_done_timestamp}"
_previous_v = _v
class RLockTestSpec(FeatureTestSpec, ABC):
@abstractmethod
def test_feature_in_green_thread(self, **kwargs):
pass
@abstractmethod
def test_feature_by_pykeyword_with_in_green_thread(self, **kwargs):
pass
@staticmethod
def _feature_testing(mode: FeatureMode, _lock, running_function):
_done_timestamp = FeatureTestSpec._get_collection_dict(mode)
def _target_testing():
# Save a timestamp into list
_lock.acquire()
_lock.acquire()
_worker_id = FeatureTestSpec._get_worker_id(mode)
FeatureTestSpec._sleep(mode)
_time = float(time.time())
_done_timestamp[_worker_id] = _time
_lock.release()
_lock.release()
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing)
RLockTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _feature_testing_by_pykeyword_with(mode: FeatureMode, _lock, running_function):
_done_timestamp = FeatureTestSpec._get_collection_dict(mode)
def _target_testing():
# Save a time stamp into list
try:
with _lock:
with _lock:
_worker_id = FeatureTestSpec._get_worker_id(mode)
FeatureTestSpec._sleep(mode)
_time = float(time.time())
_done_timestamp[_worker_id] = _time
except Exception as e:
assert False, f"Occur something unexpected issue. Please check it. Exception: {e}"
else:
assert True, "Testing code successfully."
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing)
RLockTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _chk_done_timestamp(_done_timestamp: dict):
assert len(_done_timestamp.keys()) == _Worker_Size, f"The amount of thread ID keys (no de-duplicate) should be equal to worker size '{_Worker_Size}'."
assert len(set(_done_timestamp.keys())) == _Worker_Size, f"The amount of thread ID keys (de-duplicate) should be equal to worker size '{_Worker_Size}'."
_previous_v = None
for _v in sorted(_done_timestamp.values()):
if _previous_v is None:
_previous_v = _v
if _previous_v != _v:
assert int(abs(float(_v) - float(_previous_v))) == _Sleep_Time, \
f"The different time betweeen them should be {_Sleep_Time} second(s). One is {_v} and another one is {_previous_v}. All of them are {_done_timestamp}"
_previous_v = _v
class SemaphoreTestSpec(FeatureTestSpec, ABC):
@abstractmethod
def test_feature_in_green_thread(self, **kwargs):
pass
@abstractmethod
def test_feature_by_pykeyword_with_in_green_thread(self, **kwargs):
pass
@abstractmethod
def test_feature_in_asynchronous_tasks(self, **kwargs):
pass
@abstractmethod
def test_feature_by_pykeyword_with_in_asynchronous_tasks(self, **kwargs):
pass
@staticmethod
def _feature_testing(mode: FeatureMode, _lock, running_function):
_done_timestamp = FeatureTestSpec._get_collection_dict(mode)
def _target_testing():
# Save a timestamp into list
_lock.acquire()
_worker_id = FeatureTestSpec._get_worker_id(mode)
FeatureTestSpec._sleep(mode)
_time = float(time.time())
_done_timestamp[_worker_id] = _time
_lock.release()
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing)
SemaphoreTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _feature_testing_by_pykeyword_with(mode: FeatureMode, _lock, running_function):
_done_timestamp = FeatureTestSpec._get_collection_dict(mode)
def _target_testing():
# Save a time stamp into list
try:
with _lock:
_worker_id = FeatureTestSpec._get_worker_id(mode)
FeatureTestSpec._sleep(mode)
_time = float(time.time())
_done_timestamp[_worker_id] = _time
except Exception as e:
assert False, f"Occur something unexpected issue. Please check it. Exception: {e}"
else:
assert True, "Testing code successfully."
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing)
SemaphoreTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _async_feature_testing(_lock, running_function, factory=None):
_done_timestamp = {}
async def _target_testing():
# Save a timestamp into list
await _lock.acquire()
await asyncio.sleep(_Sleep_Time)
if (PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION) > (3, 6):
_async_task = asyncio.current_task(loop=asyncio.get_event_loop())
else:
_async_task = asyncio.Task.current_task()
_async_task_id = id(_async_task)
_time = float(time.time())
_done_timestamp[_async_task_id] = _time
_lock.release()
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing, _feature=factory)
SemaphoreTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _async_feature_testing_by_pykeyword_with(_lock, running_function, factory=None):
_done_timestamp = {}
async def _target_testing():
# Save a time stamp into list
try:
async with _lock:
await asyncio.sleep(_Sleep_Time)
if (PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION) > (3, 6):
_async_task = asyncio.current_task(loop=asyncio.get_event_loop())
else:
_async_task = asyncio.Task.current_task()
_async_task_id = id(_async_task)
_time = float(time.time())
_done_timestamp[_async_task_id] = _time
except Exception as e:
assert False, f"Occur something unexpected issue. Please check it. Exception: {e}"
else:
assert True, "Testing code successfully."
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing, _feature=factory)
SemaphoreTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _chk_done_timestamp(_done_timestamp: dict):
assert len(_done_timestamp.keys()) == _Worker_Size, f"The amount of thread ID keys (no de-duplicate) should be equal to worker size '{_Worker_Size}'."
assert len(set(_done_timestamp.keys())) == _Worker_Size, f"The amount of thread ID keys (de-duplicate) should be equal to worker size '{_Worker_Size}'."
assert len(_done_timestamp.values()) == _Worker_Size, f"The amount of done-timestamp (no de-duplicate) should be equal to worker size '{_Worker_Size}'."
_int_unix_time_timestamps = [int(_v) for _v in _done_timestamp.values()]
if _Worker_Size % 2 == 0:
assert len(set(_int_unix_time_timestamps)) == int(_Worker_Size / _Semaphore_Value), \
f"The amount of done-timestamp (de-duplicate) should be equal to (worker size: {_Worker_Size} / semaphore value: {_Semaphore_Value}) '{int(_Worker_Size / _Semaphore_Value)}'."
else:
assert len(set(_int_unix_time_timestamps)) == int(_Worker_Size / _Semaphore_Value) + 1, \
f"The amount of done-timestamp (de-duplicate) should be equal to (worker size: {_Worker_Size} / semaphore value: {_Semaphore_Value}) '{int(_Worker_Size / _Semaphore_Value)}'."
_previous_v = None
for _v in sorted(_int_unix_time_timestamps):
if _previous_v is None:
_previous_v = _v
if _previous_v != _v:
assert int(abs(float(_v) - float(_previous_v))) == _Sleep_Time, \
f"The different time betweeen them should be {_Sleep_Time} second(s). One is {_v} and another one is {_previous_v}. All of them are {_done_timestamp}"
_previous_v = _v
class BoundedSemaphoreTestSpec(FeatureTestSpec, ABC):
@abstractmethod
def test_feature_in_green_thread(self, **kwargs):
pass
@abstractmethod
def test_feature_by_pykeyword_with_in_green_thread(self, **kwargs):
pass
@abstractmethod
def test_feature_in_asynchronous_tasks(self, **kwargs):
pass
@abstractmethod
def test_feature_by_pykeyword_with_in_asynchronous_tasks(self, **kwargs):
pass
@staticmethod
def _feature_testing(mode: FeatureMode, _lock, running_function):
_done_timestamp = FeatureTestSpec._get_collection_dict(mode)
def _target_testing():
# Save a timestamp into list
_lock.acquire()
_worker_id = FeatureTestSpec._get_worker_id(mode)
FeatureTestSpec._sleep(mode)
_time = float(time.time())
_done_timestamp[_worker_id] = _time
_lock.release()
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing)
BoundedSemaphoreTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _feature_testing_by_pykeyword_with(mode: FeatureMode, _lock, running_function):
_done_timestamp = FeatureTestSpec._get_collection_dict(mode)
def _target_testing():
# Save a time stamp into list
try:
with _lock:
_worker_id = FeatureTestSpec._get_worker_id(mode)
FeatureTestSpec._sleep(mode)
_time = float(time.time())
_done_timestamp[_worker_id] = _time
except Exception as e:
assert False, f"Occur something unexpected issue. Please check it. Exception: {e}"
else:
assert True, "Testing code successfully."
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing)
BoundedSemaphoreTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _async_feature_testing(_lock, running_function, factory=None):
_done_timestamp = {}
async def _target_testing():
# Save a timestamp into list
await _lock.acquire()
await asyncio.sleep(_Sleep_Time)
if (PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION) > (3, 6):
_async_task = asyncio.current_task(loop=asyncio.get_event_loop())
else:
_async_task = asyncio.Task.current_task()
_async_task_id = id(_async_task)
_time = float(time.time())
_done_timestamp[_async_task_id] = _time
_lock.release()
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing, _feature=factory)
BoundedSemaphoreTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _async_feature_testing_by_pykeyword_with(_lock, running_function, factory=None):
_done_timestamp = {}
async def _target_testing():
# Save a time stamp into list
try:
async with _lock:
await asyncio.sleep(_Sleep_Time)
if (PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION) > (3, 6):
_async_task = asyncio.current_task(loop=asyncio.get_event_loop())
else:
_async_task = asyncio.Task.current_task()
_async_task_id = id(_async_task)
_time = float(time.time())
_done_timestamp[_async_task_id] = _time
except Exception as e:
assert False, f"Occur something unexpected issue. Please check it. Exception: {e}"
else:
assert True, "Testing code successfully."
# # # # Run multiple workers and save something info at the right time
running_function(_function=_target_testing, _feature=factory)
BoundedSemaphoreTestSpec._chk_done_timestamp(_done_timestamp)
@staticmethod
def _chk_done_timestamp(_done_timestamp: dict):
assert len(_done_timestamp.keys()) == _Worker_Size, f"The amount of thread ID keys (no de-duplicate) should be equal to worker size '{_Worker_Size}'."
assert len(set(_done_timestamp.keys())) == _Worker_Size, f"The amount of thread ID keys (de-duplicate) should be equal to worker size '{_Worker_Size}'."
assert len(_done_timestamp.values()) == _Worker_Size, f"The amount of done-timestamp (no de-duplicate) should be equal to worker size '{_Worker_Size}'."
_int_unix_time_timestamps = [int(_v) for _v in _done_timestamp.values()]
if _Worker_Size % 2 == 0:
assert len(set(_int_unix_time_timestamps)) == int(_Worker_Size / _Semaphore_Value), \
f"The amount of done-timestamp (de-duplicate) should be equal to (worker size: {_Worker_Size} / semaphore value: {_Semaphore_Value}) '{int(_Worker_Size / _Semaphore_Value)}'."
else:
assert len(set(_int_unix_time_timestamps)) == int(_Worker_Size / _Semaphore_Value) + 1, \
f"The amount of done-timestamp (de-duplicate) should be equal to (worker size: {_Worker_Size} / semaphore value: {_Semaphore_Value}) '{int(_Worker_Size / _Semaphore_Value)}'."
_previous_v = None
for _v in sorted(_int_unix_time_timestamps):
if _previous_v is None:
_previous_v = _v
if _previous_v != _v:
assert int(abs(float(_v) - float(_previous_v))) == _Sleep_Time, \
f"The different time betweeen them should be {_Sleep_Time} second(s). One is {_v} and another one is {_previous_v}. All of them are {_done_timestamp}"
_previous_v = _v
class EventTestSpec(FeatureTestSpec, ABC):
@abstractmethod
def test_feature_in_green_thread(self, **kwargs):
pass
@abstractmethod
def test_feature_in_asynchronous_tasks(self, **kwargs):
pass
@pytest.mark.xfail(reason="Doesn't support this feature usage via Python keyword 'with'.")
def test_feature_by_pykeyword_with_in_parallel(self, **kwargs):
raise Exception("Doesn't support this feature usage via 'with'.")
@pytest.mark.xfail(reason="Doesn't support this feature usage via Python keyword 'with'.")
def test_feature_by_pykeyword_with_in_concurrent(self, **kwargs):
raise Exception("Doesn't support this feature usage via 'with'.")
@staticmethod
def _feature_testing(mode: FeatureMode, _lock, running_function):
_thread_ids = FeatureTestSpec._get_collection_dict(mode)
_thread_flag = FeatureTestSpec._get_collection_dict(mode)
_thread_ids["producer"] = ""
_thread_ids["consumer"] = ""
_thread_flag["producer"] = FeatureTestSpec._get_collection_list(mode)
_thread_flag["consumer"] = FeatureTestSpec._get_collection_list(mode)
def _target_producer():
for _ in range(3):
FeatureTestSpec._sleep(mode)
_thread_index = random.randrange(_Random_Start_Time, _Random_End_Time)
_thread_flag["producer"].append(_thread_index)
_worker_id = FeatureTestSpec._get_worker_id(mode)
_thread_ids["producer"] = str(_worker_id)
_lock.set()
def _target_consumer():
while True:
FeatureTestSpec._sleep(mode)
_lock.wait()
_lock.clear()
_thread_flag["consumer"].append(float(time.time()))
_worker_id = FeatureTestSpec._get_worker_id(mode)
_thread_ids["consumer"] = str(_worker_id)
if len(_thread_flag["producer"]) == 3:
break
# # # # Run multiple workers and save something info at the right time
running_function(_functions=[_target_producer, _target_consumer])
EventTestSpec._chk_info(_thread_ids, _thread_flag)
@staticmethod
def _async_feature_testing(_lock, running_function, factory=None):
_async_task_ids = {"producer": "", "consumer": ""}
_async_task_flag = {"producer": [], "consumer": []}
async def _target_producer():
for _ in range(3):
await asyncio.sleep(_Sleep_Time)
_thread_index = random.randrange(_Random_Start_Time, _Random_End_Time)
_async_task_flag["producer"].append(_thread_index)
if (PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION) > (3, 6):
_async_task_ids["producer"] = str(id(asyncio.current_task()))
else:
_async_task_ids["producer"] = str(id(asyncio.Task.current_task()))
_lock.set()
async def _target_consumer():
while True:
await _lock.wait()
_lock.clear()
_async_task_flag["consumer"].append(float(time.time()))
_async_task_ids["consumer"] = str(threading.get_ident())
if len(_async_task_flag["producer"]) == 3:
break
# # # # Run multiple workers and save something info at the right time
running_function(_functions=[_target_producer, _target_consumer], _feature=factory)
EventTestSpec._chk_info(_async_task_ids, _async_task_flag)
@staticmethod
def _chk_info(_thread_ids: dict, _thread_flag: dict):
assert len(set(_thread_ids.values())) == 2, "The amount of thread ID (de-duplicate) should be equal to amount of functions '2'."
assert len(_thread_flag["producer"]) == 3, "The amount of producer's flags should be equal to '3'."
assert len(_thread_flag["consumer"]) == 3, "The amount of consumer's flags should be equal to '3'."
for _p_index in _thread_flag["producer"]:
assert _Random_Start_Time <= _p_index <= _Random_End_Time, f"All index of producer set should be in range '{_Random_Start_Time}' and '{_Random_End_Time}'."
_int_unix_time_timestamps = [int(_v) for _v in _thread_flag["consumer"]]
_previous_v = None
for _v in sorted(_int_unix_time_timestamps):
if _previous_v is None:
_previous_v = _v
if _previous_v != _v:
assert int(abs(float(_v) - float(_previous_v))) == _Sleep_Time, \
f"The different time between them should be {_Sleep_Time} second(s). One is {_v} and another one is {_previous_v}. All of them are {_thread_flag['consumer']}"
_previous_v = _v
class ConditionTestSpec(FeatureTestSpec, ABC):
@abstractmethod
def test_feature_in_asynchronous_tasks(self, **kwargs):
pass
@abstractmethod
def test_feature_by_pykeyword_with_in_asynchronous_tasks(self, **kwargs):
pass
@staticmethod
def _feature_testing(mode: FeatureMode, _lock, running_function):
_thread_ids = FeatureTestSpec._get_collection_dict(mode)
_thread_flag = FeatureTestSpec._get_collection_dict(mode)
_thread_ids["producer"] = ""
_thread_ids["consumer"] = ""
_thread_flag["producer"] = FeatureTestSpec._get_collection_list(mode)
_thread_flag["consumer"] = FeatureTestSpec._get_collection_list(mode)
def _target_producer():
for _ in range(3):
FeatureTestSpec._sleep(mode)
_thread_index = random.randrange(_Random_Start_Time, _Random_End_Time)
_thread_flag["producer"].append(_thread_index)
_worker_id = FeatureTestSpec._get_worker_id(mode)
_thread_ids["producer"] = str(_worker_id)
_lock.acquire()
_lock.notify_all()
_lock.release()
def _target_consumer():
while True:
_lock.acquire()
_lock.wait()
_thread_flag["consumer"].append(float(time.time()))
_worker_id = FeatureTestSpec._get_worker_id(mode)
_thread_ids["consumer"] = str(_worker_id)
_lock.release()
if len(_thread_flag["producer"]) == 3:
break
# # # # Run multiple workers and save something info at the right time
running_function(_functions=[_target_producer, _target_consumer])
ConditionTestSpec._chk_info(_thread_ids, _thread_flag)
@staticmethod
def _feature_testing_by_pykeyword_with(mode: FeatureMode, _lock, running_function):
_thread_ids = FeatureTestSpec._get_collection_dict(mode)
_thread_flag = FeatureTestSpec._get_collection_dict(mode)
_thread_ids["producer"] = ""
_thread_ids["consumer"] = ""
_thread_flag["producer"] = FeatureTestSpec._get_collection_list(mode)
_thread_flag["consumer"] = FeatureTestSpec._get_collection_list(mode)
def _target_producer():
for _ in range(3):
FeatureTestSpec._sleep(mode)
_thread_index = random.randrange(_Random_Start_Time, _Random_End_Time)
_thread_flag["producer"].append(_thread_index)
_worker_id = FeatureTestSpec._get_worker_id(mode)
_thread_ids["producer"] = str(_worker_id)
with _lock:
_lock.notify_all()
def _target_consumer():
while True:
with _lock:
_lock.wait()
_thread_flag["consumer"].append(float(time.time()))
_worker_id = FeatureTestSpec._get_worker_id(mode)
_thread_ids["consumer"] = str(_worker_id)
if len(_thread_flag["producer"]) == 3:
break
# # # # Run multiple workers and save something info at the right time
running_function(_functions=[_target_producer, _target_consumer])
ConditionTestSpec._chk_info(_thread_ids, _thread_flag)
@staticmethod
def _async_feature_testing(_lock, running_function, factory=None):
_async_task_ids = {"producer": "", "consumer": ""}
_async_task_flag = {"producer": [], "consumer": []}
async def _target_producer():
for _ in range(3):
await asyncio.sleep(_Sleep_Time)
_thread_index = random.randrange(_Random_Start_Time, _Random_End_Time)
_async_task_flag["producer"].append(_thread_index)
if (PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION) > (3, 6):
_async_task_ids["producer"] = str(id(asyncio.current_task()))
else:
_async_task_ids["producer"] = str(id(asyncio.Task.current_task()))
await _lock.acquire()
_lock.notify_all()
_lock.release()
async def _target_consumer():
while True:
await _lock.acquire()
await _lock.wait()
_async_task_flag["consumer"].append(float(time.time()))
# _async_task_ids["consumer"] = str(threading.get_ident())
if (PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION) > (3, 6):
_async_task_ids["consumer"] = str(id(asyncio.current_task()))
else:
_async_task_ids["consumer"] = str(id(asyncio.Task.current_task()))
_lock.release()
if len(_async_task_flag["producer"]) == 3:
break
# # # # Run multiple workers and save something info at the right time
running_function(_functions=[_target_producer, _target_consumer], _feature=factory)
ConditionTestSpec._chk_info(_async_task_ids, _async_task_flag)
@staticmethod
def _async_feature_testing_by_pykeyword_with(_lock, running_function, factory=None):
_async_task_ids = {"producer": "", "consumer": ""}
_async_task_flag = {"producer": [], "consumer": []}
async def _target_producer():
for _ in range(3):
await asyncio.sleep(_Sleep_Time)
_thread_index = random.randrange(_Random_Start_Time, _Random_End_Time)
_async_task_flag["producer"].append(_thread_index)
# _async_task_ids["producer"] = str(threading.get_ident())
if (PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION) > (3, 6):
_async_task_ids["producer"] = str(id(asyncio.current_task()))
else:
_async_task_ids["producer"] = str(id(asyncio.Task.current_task()))
async with _lock:
_lock.notify_all()
async def _target_consumer():
while True:
async with _lock:
await _lock.wait()
_async_task_flag["consumer"].append(float(time.time()))
if (PYTHON_MAJOR_VERSION, PYTHON_MINOR_VERSION) > (3, 6):
_async_task_ids["consumer"] = str(id(asyncio.current_task()))
else:
_async_task_ids["consumer"] = str(id(asyncio.Task.current_task()))
if len(_async_task_flag["producer"]) == 3:
break
# # # # Run multiple workers and save something info at the right time
running_function(_functions=[_target_producer, _target_consumer], _feature=factory)
ConditionTestSpec._chk_info(_async_task_ids, _async_task_flag)
@staticmethod
def _chk_info(_thread_ids: dict, _thread_flag: dict):
assert len(set(_thread_ids.values())) == 2, "The amount of thread ID (de-duplicate) should be equal to amount of functions '2'."
assert len(_thread_flag["producer"]) == 3, "The amount of producer's flags should be equal to '3'."
assert len(_thread_flag["consumer"]) == 3, "The amount of consumer's flags should be equal to '3'."
for _p_index in _thread_flag["producer"]:
assert _Random_Start_Time <= _p_index <= _Random_End_Time, f"All index of producer set should be in range '{_Random_Start_Time}' and '{_Random_End_Time}'."
_int_unix_time_timestamps = [int(_v) for _v in _thread_flag["consumer"]]
_previous_v = None
for _v in sorted(_int_unix_time_timestamps):
if _previous_v is None:
_previous_v = _v
if _previous_v != _v:
assert int(abs(float(_v) - float(_previous_v))) == _Sleep_Time, \
f"The different time between them should be {_Sleep_Time} second(s). One is {_v} and another one is {_previous_v}. All of them are {_thread_flag['consumer']}"
_previous_v = _v
| 40.690888 | 191 | 0.637943 | 3,993 | 35,279 | 5.204859 | 0.049336 | 0.055045 | 0.016167 | 0.02964 | 0.925757 | 0.906077 | 0.898763 | 0.889333 | 0.880864 | 0.878843 | 0 | 0.002159 | 0.277871 | 35,279 | 866 | 192 | 40.737875 | 0.813629 | 0.051532 | 0 | 0.904306 | 0 | 0.025518 | 0.136473 | 0.004856 | 0 | 0 | 0 | 0 | 0.068581 | 1 | 0.111643 | false | 0.036683 | 0.022329 | 0 | 0.157895 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
439a4be292855623a59846e0480057cdecec7d1a | 19,105 | py | Python | python3/tests/test_reports.py | CostaBru/knapsack | cdd95de759c20b0cdeef4064fbbed10df1ab76d0 | [
"MIT"
] | 1 | 2021-03-06T16:38:28.000Z | 2021-03-06T16:38:28.000Z | python3/tests/test_reports.py | CostaBru/knapsack | cdd95de759c20b0cdeef4064fbbed10df1ab76d0 | [
"MIT"
] | null | null | null | python3/tests/test_reports.py | CostaBru/knapsack | cdd95de759c20b0cdeef4064fbbed10df1ab76d0 | [
"MIT"
] | null | null | null | '''
Copyright Feb 2021 Konstantin Briukhnov (kooltew at gmail.com) (@CostaBru). San-Francisco Bay Area.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
import unittest
from datetime import datetime, timedelta
from random import random, sample, randint
import time
from tAPI import *
from flags.flags import verbose
from tests import dtNow, try_redirect_out, restore_out
from tests.helpers import sortReverseBoth
class ReportsTests(unittest.TestCase):
def setUp(self):
try_redirect_out("reports", self._testMethodName)
def tearDown(self):
restore_out()
# KB knapsacks and pareto reports for [1] * 50
# @unittest.skip("temp")
def test_knapsacks_and_pareto_1_50(self):
if verbose:
print("KB knapsacks and pareto reports for [1] * 50")
numbers = [1] * 50
if verbose:
print(f"len {len(numbers)} sum {sum(numbers)}")
if True:
iterCounter = [0]
prevIters = 0
prevPareto = 0
if True:
s = sum(numbers) - 1
iterCounter[0] = 0
t1 = time.perf_counter()
opt1, optItems1 = subsKnapsack(s, numbers, iterCounter, printPct=True)
subsTime = time.perf_counter() - t1
o1 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt2, optDim2, optItems2, optVal2 = knapsack(s, numbers, numbers, iterCounter, printPct=True)
knapTime = time.perf_counter() - t1
o2 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt3, optDim3, optItems3, optVal3 = paretoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
paretoTime = time.perf_counter() - t1
oP = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt4, optDim4, optItems4, optVal4 = hybridParetoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
paretoHTime = time.perf_counter() - t1
oH = round(iterCounter[0])
if opt1 != opt2 or opt2 != opt3 or opt1 != opt4:
print(f"ERROR: {opt1} - {opt2} - {opt3} - {opt4}, size {s}")
self.assertTrue(False)
prevIters = o1
prevPareto = o2
# KB knapsacks and pareto reports for ([1] * 25) + ([2] * 25)
# @unittest.skip("temp")
def test_knapsacks_and_pareto_1_25_2_25(self):
if verbose:
print("KB knapsacks and pareto reports for ([1] * 25) + ([2] * 25)")
numbers = ([1] * 25) + ([2] * 25)
if verbose:
print(f"len {len(numbers)} sum {sum(numbers)}")
if True:
iterCounter = [0]
prevIters = 0
prevPareto = 0
if True:
s = sum(numbers) - 1
iterCounter[0] = 0
opt, optItems1 = subsKnapsack(s, numbers, iterCounter, printPct=True)
o1 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt2, optDim2, optItems2, optVal3 = knapsack(s, numbers, numbers, iterCounter, printPct=True)
knapTime = time.perf_counter() - t1
o2 = round(iterCounter[0])
iterCounter[0] = 0
opt3, optDim3, optItems3, optVal3 = paretoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
o2 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt4, optDim4, optItems4, optVal4 = hybridParetoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
paretoHTime = time.perf_counter() - t1
oH = round(iterCounter[0])
if opt != opt2 or opt != opt3 or opt4 != opt:
print(f"ERROR: {opt} - {opt2} - {opt3} - {opt4}, size {s}")
self.assertTrue(False)
prevIters = o1
prevPareto = o2
# KB knapsacks and pareto reports for list(range(1, 51))
# @unittest.skip("temp")
def test_knapsacks_and_pareto_range_1_50(self):
if verbose:
print("KB knapsacks and pareto reports for list(range(1, 51))")
numbers = list(range(1, 51))
if verbose:
print(f"len {len(numbers)} sum {sum(numbers)}")
if True:
iterCounter = [0]
prevIters = 0
prevPareto = 0
if True:
s = sum(numbers) - 1
iterCounter[0] = 0
opt, optItems1 = subsKnapsack(s, numbers, iterCounter, printPct=True)
print(s)
o1 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt2, optDim2, optItems2, optVal3 = knapsack(s, numbers, numbers, iterCounter, printPct=True)
knapTime = time.perf_counter() - t1
o2 = round(iterCounter[0])
iterCounter[0] = 0
opt3, optDim3, optItems3, optVal3 = paretoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
o3 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt4, optDim4, optItems4, optVal4 = hybridParetoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
paretoHTime = time.perf_counter() - t1
oH = round(iterCounter[0])
if opt != opt2 or opt != opt3 or opt4 != opt:
print(f"ERROR: {opt} - {opt2} - {opt3} - {opt4}, size {s}")
self.assertTrue(False)
prevIters = o1
prevPareto = o2
# KB knapsacks and pareto reports for random.sample(range(1, 1000), 50)
#@unittest.skip("temp")
def test_knapsacks_and_pareto_random_1_1000_50(self):
if verbose:
print("KB knapsacks and pareto reports for random.sample(range(1, 1000), 50)")
numbers = sample(range(1, 1000), 25)
numbers.sort()
if verbose:
print(f"len {len(numbers)} sum {sum(numbers)}")
if True:
iterCounter = [0]
prevIters = 0
prevPareto = 0
if True:
s = sum(numbers) - 1
iterCounter[0] = 0
opt, optItems1 = subsKnapsack(s, numbers, iterCounter, printPct=True)
print(s)
o1 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt2, optDim2, optItems2, optVal3 = knapsack(s, numbers, numbers, iterCounter, printPct=True)
knapTime = time.perf_counter() - t1
o2 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt3, optDim3, optItems3, optVal3 = paretoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
parTime = time.perf_counter() - t1
o3 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt4, optDim4, optItems4, optVal4 = hybridParetoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
paretoHTime = time.perf_counter() - t1
oH = round(iterCounter[0])
print(f"knapsack time {round(knapTime, 4)}, pareto time {round(parTime, 4)}, hybrid pareto time {round(paretoHTime, 4)}")
if opt != opt2 or (opt != opt3) or opt4 != opt:
print(f"ERROR: {opt} - {opt2} - {opt3} - {opt4}size {s}")
self.assertTrue(False)
# KB knapsacks and pareto reports for random.sample(range(1, 10000000000000000), 10)
# @unittest.skip("temp")
def test_knapsacks_and_pareto_random_1_10000000000000000_10(self):
if verbose:
print("KB knapsacks and pareto reports for random.sample(range(1, 10000000000000000), 10)")
numbers = sample(range(1, 10000000000000000), 10)
numbers.sort()
if verbose:
print(f"len {len(numbers)} sum {sum(numbers)}")
for i in range(10):
newSeed = dtNow + timedelta(seconds=i)
iterCounter = [0]
prevIters = 0
prevPareto = 0
if True:
s = sum(numbers) - 1
iterCounter[0] = 0
opt, optItems1 = subsKnapsack(s, numbers, iterCounter, printPct=True)
print(s)
o1 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt2, optDim2, optItems2, optVal3 = knapsack(s, numbers, numbers, iterCounter, printPct=True)
knapTime = time.perf_counter() - t1
o2 = round(iterCounter[0])
iterCounter[0] = 0
opt3, optDim3, optItems3, optVal3 = paretoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
o3 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt4, optDim4, optItems4, optVal4 = hybridParetoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
paretoHTime = time.perf_counter() - t1
oH = round(iterCounter[0])
if opt != opt2 or (opt != opt3) or opt4 != opt:
print(f"ERROR: {opt} - {opt2} - {opt3} - {opt4}, size {s}, numbers: {numbers}")
self.assertTrue(False)
prevIters = o1
prevPareto = o2
# KB knapsacks and pareto reports for geometric progression numbers = [10000] * 10; numbers[i] *= (int(numbers[i - 1] * 2) - 1)
# @unittest.skip("temp")
def test_knapsacks_and_pareto_geometric_progression_10(self):
if verbose:
print(
"KB knapsacks and pareto reports for geometric progression numbers = [10000] * 10; numbers[i] *= (int(numbers[i - 1] * 2) - 1)")
numbers = [10000] * 10
for i in range(1, 10):
numbers[i] = (int(numbers[i - 1] * 2) - 1)
numbers.append(numbers[len(numbers) // 2])
numbers.sort()
if verbose:
print("len " + str(len(numbers)))
print("sum " + str(sum(numbers)))
if True:
iterCounter = [0]
prevIters = 0
prevPareto = 0
if True:
s = sum(numbers) - 1
iterCounter[0] = 0
opt1, optItems1 = subsKnapsack(s, numbers, iterCounter, printPct=True)
o1 = round(iterCounter[0])
print(o1)
iterCounter[0] = 0
t1 = time.perf_counter()
opt2, optDim2, optItems2, optVal3 = knapsack(s, numbers, numbers, iterCounter, printPct=True)
knapTime = time.perf_counter() - t1
o2 = round(iterCounter[0])
iterCounter[0] = 0
opt3, optDim3, optItems3, optVal3 = paretoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
o3 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt4, optDim4, optItems4, optVal4 = hybridParetoKnapsack(s, numbers, numbers, iterCounter, printPct=True)
paretoHTime = time.perf_counter() - t1
oH = round(iterCounter[0])
if opt1 != opt2 or opt1 != opt3 or opt4 != opt1:
print(f"ERROR: {opt1} - {opt2} - {opt3} - {opt4}, size {s}")
self.assertTrue(False)
prevIters = o1
prevPareto = o2
# KB knapsacks and pareto reports for geometric progression numbers = [1] * 10; numbers[i] *= (int(numbers[i - 1] * 2) - 1); values are random in [1..1000]
# @unittest.skip("temp")
def test_knapsacks_and_pareto_geometric_progression_10_values_random(self):
if verbose:
print(
"reports for geometric progression numbers = [1] * 10; numbers[i] *= (int(numbers[i - 1] * 2) - 1); values are random in [1..1000]")
numbers = [1000] * 10
values = [1] * 10
for i in range(1, 10):
numbers[i] = (int(numbers[i - 1] * 2) - 1)
values[i] = randint(1, 1000)
if verbose:
print(f"len {len(numbers)}")
print(f"sum {sum(numbers)}")
if True:
iterCounter = [0]
if True:
sumCases = [(sum(numbers) // 2) - 1, ((sum(numbers) // 4) * 3 - 1), sum(numbers) - 1, ]
for s in sumCases:
print(f"case size {s}")
iterCounter[0] = 0
opt3, optDim3, optItems3, optVal3 = paretoKnapsack(s, numbers, values, iterCounter, printPct=True)
o3 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt2, optDim2, optItems2, optVal2 = knapsack(s, numbers, values, iterCounter, printPct=True)
knapTime = time.perf_counter() - t1
o2 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt4, optDim4, optItems4, optVal4 = hybridParetoKnapsack(s, numbers, values, iterCounter, printPct=True)
paretoHTime = time.perf_counter() - t1
oH = round(iterCounter[0])
if opt2 != opt3 or opt4 != opt2:
print(f"ERROR: {opt2} - {opt3} - {opt4}, size {s}, numbers: {numbers}, values = {values}")
self.assertTrue(False)
# Exponential: reports KB NU for geometric progression numbers and values, non equals case
# @unittest.skip("temp")
def test_knapsacks_and_pareto_geometric_progression_random_non_equals_case(self):
if verbose:
print("reports KB NU for geometric progression numbers and values, non equals case")
numbers = [1000] * 10
values = [1000] * 10
for i in range(1, 10):
numbers[i] = (int(numbers[i - 1] * 2) - 1)
values[i] = (int(numbers[i - 1] * 2) - 2)
numbers.append(numbers[len(numbers) // 2])
values.append(values[len(values) // 2])
numbers, values = sortReverseBoth(numbers, values, reverse=False)
if verbose:
print(f"len {len(numbers)}")
print(f"sum {sum(numbers)}")
if True:
iterCounter = [0]
if True:
sumCases = [(sum(numbers) // 2) - 1, ((sum(numbers) // 4) * 3 - 1), sum(numbers) - 1, ]
for s in sumCases:
print(f"case size {s}")
iterCounter[0] = 0
opt3, optDim3, optItems3, optVal3 = paretoKnapsack(s, numbers, values, iterCounter, printPct=True)
o3 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt2, optDim2, optItems2, optVal2 = knapsack(s, numbers, values, iterCounter, printPct=True)
knapTime = time.perf_counter() - t1
o2 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt4, optDim4, optItems4, optVal4 = hybridParetoKnapsack(s, numbers, values, iterCounter, printPct=True)
paretoHTime = time.perf_counter() - t1
oH = round(iterCounter[0])
if opt2 != opt3 or opt4 != opt2:
print(f"ERROR: {opt2} - {opt3}, size {s}, numbers: {numbers}, values = {values}")
self.assertTrue(False)
# KB knapsacks and pareto reports for 25 random numbers in range(9500, 10000), values are random in [1..1000]
# @unittest.skip("temp")
def test_knapsacks_and_pareto_geometric_progression_9500_10000_25_values_random(self):
if verbose:
print(
"KB knapsacks and pareto reports for 25 random numbers in range(9500, 10000), values are random in [1..1000]")
numbers = list(sorted(sample(range(9500, 10000), 25), reverse=True))
values = sample(range(1, 100000), 25)
if verbose:
print(f"len {len(numbers)}")
print(f"sum {sum(numbers)}")
if True:
iterCounter = [0]
if True:
sumCases = [sum(numbers) // 2, (sum(numbers) // 4) * 3, sum(numbers) - 1, ]
for s in sumCases:
print(f"case size {s}")
iterCounter[0] = 0
t1 = time.perf_counter()
opt2, optDim2, optItems2, optVal2 = knapsack(s, numbers, values, iterCounter, printPct=True)
knapTime = time.perf_counter() - t1
o2 = round(iterCounter[0])
optValSum2 = sum(optVal2)
iterCounter[0] = 0
opt3, optDim3, optItems3, optVal3 = paretoKnapsack(s, numbers, values, iterCounter, printPct=True)
o3 = round(iterCounter[0])
iterCounter[0] = 0
t1 = time.perf_counter()
opt4, optDim4, optItems4, optVal4 = hybridParetoKnapsack(s, numbers, values, iterCounter, printPct=True)
paretoHTime = time.perf_counter() - t1
oH = round(iterCounter[0])
if opt2 != opt3 or opt4 != opt2:
print(f"ERROR: {opt2} - {opt3}, size {s}, numbers: {numbers}, values = {values}")
self.assertTrue(False)
| 30.864297 | 159 | 0.536561 | 2,046 | 19,105 | 4.954545 | 0.108993 | 0.088784 | 0.062149 | 0.071027 | 0.814639 | 0.810398 | 0.795798 | 0.790964 | 0.77735 | 0.764033 | 0 | 0.059602 | 0.358911 | 19,105 | 618 | 160 | 30.914239 | 0.768044 | 0.111175 | 0 | 0.809668 | 0 | 0.039275 | 0.102501 | 0.002595 | 0 | 0 | 0 | 0 | 0.02719 | 1 | 0.033233 | false | 0 | 0.024169 | 0 | 0.060423 | 0.217523 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
43d85af089b128b40ddb1850c3d31540cee6e034 | 180 | py | Python | selene/support/jquery_style_selectors.py | KalinkinaMaria/selene | 859e1102c85740b52af8d0f08dd6b6490b4bd2ff | [
"MIT"
] | null | null | null | selene/support/jquery_style_selectors.py | KalinkinaMaria/selene | 859e1102c85740b52af8d0f08dd6b6490b4bd2ff | [
"MIT"
] | 1 | 2021-06-02T04:21:17.000Z | 2021-06-02T04:21:17.000Z | selene/support/jquery_style_selectors.py | vkarpenko/selene | 4776357430c940be38f38be9981006dd156f9730 | [
"MIT"
] | null | null | null | from selene import browser
def s(css_selector_or_by):
return browser.element(css_selector_or_by)
def ss(css_selector_or_by):
return browser.elements(css_selector_or_by) | 20 | 47 | 0.805556 | 30 | 180 | 4.433333 | 0.466667 | 0.330827 | 0.390977 | 0.451128 | 0.421053 | 0.421053 | 0 | 0 | 0 | 0 | 0 | 0 | 0.127778 | 180 | 9 | 47 | 20 | 0.847134 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0.2 | 0.4 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
78eefa530a17341c1aa02ae57e562316562f37b5 | 1,950 | py | Python | lib/bx/phylo/phast_tests.py | tweirick/bx-python | f16a57e9f0a133ab4d62aed6fec087b8ce4ec848 | [
"MIT"
] | null | null | null | lib/bx/phylo/phast_tests.py | tweirick/bx-python | f16a57e9f0a133ab4d62aed6fec087b8ce4ec848 | [
"MIT"
] | null | null | null | lib/bx/phylo/phast_tests.py | tweirick/bx-python | f16a57e9f0a133ab4d62aed6fec087b8ce4ec848 | [
"MIT"
] | null | null | null | """
Tests for `bx.phylo.phast`.
"""
import unittest
from numpy import *
from six import StringIO
from bx.phylo.phast import TreeModel
test_data = """ALPHABET: A C G T -
ORDER: 0
SUBST_MOD: HKY85+Gap
TRAINING_LNL: -178667772.836697
BACKGROUND: 0.227006 0.169993 0.169307 0.227262 0.206432
RATE_MAT:
-0.971735 0.122443 0.465361 0.163692 0.220238
0.163508 -1.130351 0.121949 0.624656 0.220238
0.623952 0.122443 -1.130326 0.163692 0.220238
0.163508 0.467247 0.121949 -0.972942 0.220238
0.242187 0.181362 0.180630 0.242461 -0.846640
TREE: ((((((hg16:0.007738,panTro1:0.008356):0.027141,(baboon:0.009853,rheMac1:0.010187):0.035049):0.103138,galago:0.174770):0.019102,((rn3:0.092633,mm6:0.089667):0.273942,rabbit:0.230839):0.021927):0.023762,(canFam1:0.204637,(elephant:0.123777,tenrec:0.278910):0.085977):0.009439):0.306466,monDom1:0.401151)mammals;
"""
def test_parser():
tm = TreeModel.from_file( StringIO( test_data ) )
assert tm.alphabet == ( 'A', 'C', 'G', 'T', '-' )
assert tm.order == 0
assert tm.subst_mod == "HKY85+Gap"
assert allclose( tm.background, [ 0.227006, 0.169993, 0.169307, 0.227262, 0.206432 ] )
assert allclose( tm.matrix, array(
[ [ -0.971735, 0.122443, 0.465361, 0.163692, 0.220238 ],
[ 0.163508, -1.130351, 0.121949, 0.624656, 0.220238 ],
[ 0.623952, 0.122443, -1.130326, 0.163692, 0.220238 ],
[ 0.163508, 0.467247, 0.121949, -0.972942, 0.220238 ],
[ 0.242187, 0.181362, 0.180630, 0.242461, -0.846640 ] ] ) )
assert tm.tree == "((((((hg16:0.007738,panTro1:0.008356):0.027141,(baboon:0.009853,rheMac1:0.010187):0.035049):0.103138,galago:0.174770):0.019102,((rn3:0.092633,mm6:0.089667):0.273942,rabbit:0.230839):0.021927):0.023762,(canFam1:0.204637,(elephant:0.123777,tenrec:0.278910):0.085977):0.009439):0.306466,monDom1:0.401151)mammals;"
| 51.315789 | 333 | 0.641538 | 314 | 1,950 | 3.958599 | 0.318471 | 0.045052 | 0.051488 | 0.045052 | 0.751408 | 0.7321 | 0.7321 | 0.7321 | 0.7321 | 0.7321 | 0 | 0.476933 | 0.177436 | 1,950 | 37 | 334 | 52.702703 | 0.298005 | 0.013846 | 0 | 0 | 0 | 0.066667 | 0.571802 | 0.322715 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.033333 | false | 0 | 0.133333 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6015053760a1f3bb74dcf82dc1c9d4014a2b262e | 10,147 | py | Python | training/loss_vae_lie.py | zhuxinqimac/stylegan2 | 5c3bda161ead21ea290de4190d3704e59cf6de64 | [
"BSD-Source-Code"
] | 5 | 2020-01-23T10:04:27.000Z | 2021-07-04T09:51:28.000Z | training/loss_vae_lie.py | zhuxinqimac/stylegan2 | 5c3bda161ead21ea290de4190d3704e59cf6de64 | [
"BSD-Source-Code"
] | null | null | null | training/loss_vae_lie.py | zhuxinqimac/stylegan2 | 5c3bda161ead21ea290de4190d3704e59cf6de64 | [
"BSD-Source-Code"
] | null | null | null | #!/usr/bin/python
#-*- coding: utf-8 -*-
# >.>.>.>.>.>.>.>.>.>.>.>.>.>.>.>.
# Licensed under the Apache License, Version 2.0 (the "License")
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# --- File Name: loss_vae_lie.py
# --- Creation Date: 21-09-2020
# --- Last Modified: Sun 27 Sep 2020 18:05:08 AEST
# --- Author: Xinqi Zhu
# .<.<.<.<.<.<.<.<.<.<.<.<.<.<.<.<
"""
Loss of LieVAE.
"""
import numpy as np
import math
import tensorflow as tf
import dnnlib.tflib as tflib
from dnnlib.tflib.autosummary import autosummary
from training.utils import get_return_v
from training.loss_vae import sample_from_latent_distribution
from training.loss_vae import make_reconstruction_loss
from training.loss_vae import compute_gaussian_kl
from training.loss_vae import split_latents
def make_lie_group_loss_with_split(group_feats_E, group_feats_G, lie_alg_feats, lie_alg_basis,
minibatch_size, hy_rec, hy_dcp, hy_hes, hy_lin,
hy_ncut):
mat_dim = group_feats_G.get_shape().as_list()[1]
gfeats_G = group_feats_G[:minibatch_size]
gfeats_G_split_ls = [
group_feats_G[(i + 1) * minibatch_size:(i + 2) * minibatch_size]
for i in range(hy_ncut + 1)
]
lie_alg_G_split_ls = [
lie_alg_feats[(i + 1) * minibatch_size:(i + 2) * minibatch_size]
for i in range(hy_ncut + 1)
]
gfeats_G_split_mul = gfeats_G_split_ls[0]
for i in range(1, hy_ncut + 1):
gfeats_G_split_mul = tf.matmul(gfeats_G_split_mul,
gfeats_G_split_ls[i])
lie_alg_G_split_mul = lie_alg_G_split_ls[0]
lie_alg_linear_G_split_mul = lie_alg_G_split_ls[0]
for i in range(1, hy_ncut + 1):
lie_alg_G_split_mul = tf.matmul(lie_alg_G_split_mul,
lie_alg_G_split_ls[i])
lie_alg_linear_G_split_mul = lie_alg_linear_G_split_mul * lie_alg_G_split_ls[
i]
# [1, lat_dim, mat_dim, mat_dim]
_, lat_dim, mat_dim, _ = lie_alg_basis.get_shape().as_list()
lie_alg_basis_col = tf.reshape(lie_alg_basis, [lat_dim, 1, mat_dim, mat_dim])
lie_alg_basis_mul = tf.matmul(lie_alg_basis, lie_alg_basis_col)
lie_alg_basis_mask = 1. - tf.eye(lat_dim, dtype=lie_alg_basis_mul.dtype)[:, :, tf.newaxis, tf.newaxis]
lie_alg_basis_mul = lie_alg_basis_mul * lie_alg_basis_mask
lie_alg_basis_linear = lie_alg_basis * lie_alg_basis_col
lie_alg_basis_linear = lie_alg_basis_linear * lie_alg_basis_mask
if group_feats_E is None:
rec_loss = 0
else:
rec_loss = tf.reduce_mean(
tf.reduce_sum(tf.square(group_feats_E - gfeats_G), axis=[1, 2]))
# spl_loss = tf.reduce_mean(
# tf.reduce_sum(tf.square(gfeats_G_split_mul - gfeats_G), axis=[1, 2]))
spl_loss = tf.reduce_mean(tf.square(lie_alg_basis_mul - tf.transpose(lie_alg_basis_mul, perm=[1, 0, 2, 3])))
# hessian_loss = tf.reduce_mean(
# tf.reduce_sum(tf.square(lie_alg_G_split_mul), axis=[1, 2]))
hessian_loss = tf.reduce_mean(tf.square(lie_alg_basis_mul))
# linear_loss = tf.reduce_mean(
# tf.reduce_sum(tf.square(lie_alg_linear_G_split_mul), axis=[1, 2]))
linear_loss = tf.reduce_mean(tf.square(lie_alg_basis_linear))
loss = hy_rec * rec_loss + hy_dcp * spl_loss + \
hy_hes * hessian_loss + hy_lin * linear_loss
return loss
def lie_vae_with_split(E,
G,
opt,
training_set,
minibatch_size,
reals,
labels,
latent_type='normal',
hy_dcp=1,
hy_hes=0,
hy_lin=0,
hy_ncut=1,
hy_rec=1,
recons_type='bernoulli_loss'):
_ = opt, training_set
means, log_var, group_feats_E = get_return_v(
E.get_output_for(reals, labels, is_training=True), 3)
kl_loss = compute_gaussian_kl(means, log_var)
kl_loss = autosummary('Loss/kl_loss', kl_loss)
mat_dim = int(math.sqrt(group_feats_E.get_shape().as_list()[1]))
assert mat_dim * mat_dim == group_feats_E.get_shape().as_list()[1]
group_feats_E = tf.reshape(group_feats_E,
[minibatch_size, mat_dim, mat_dim])
sampled = sample_from_latent_distribution(means, log_var)
sampled_split_ls = split_latents(sampled, minibatch_size, hy_ncut=hy_ncut)
sampled_split = tf.concat(sampled_split_ls, axis=0)
labels_split = tf.concat([labels] * len(sampled_split_ls), axis=0)
sampled_all = tf.concat([sampled, sampled_split], axis=0)
labels_all = tf.concat([labels, labels_split], axis=0)
reconstructions, group_feats_G, _, _, lie_alg_feats, lie_alg_basis = get_return_v(
G.get_output_for(sampled_all, labels_all, is_training=True), 6)
lie_group_loss = make_lie_group_loss_with_split(group_feats_E, group_feats_G,
lie_alg_feats, lie_alg_basis, minibatch_size, hy_rec,
hy_dcp, hy_hes, hy_lin, hy_ncut)
lie_group_loss = autosummary('Loss/lie_group_loss', lie_group_loss)
reconstruction_loss = make_reconstruction_loss(
reals, reconstructions[:minibatch_size], recons_type=recons_type)
# reconstruction_loss = tf.reduce_mean(reconstruction_loss)
reconstruction_loss = autosummary('Loss/recons_loss', reconstruction_loss)
elbo = reconstruction_loss + kl_loss
elbo = autosummary('Loss/lie_vae_elbo', elbo)
loss = elbo + lie_group_loss
loss = autosummary('Loss/lie_vae_loss', loss)
return loss
def make_lie_group_loss_all(group_feats_E, group_feats_G, lie_alg_feats, lie_alg_basis,
minibatch_size, hy_rec, hy_dcp, hy_hes, hy_lin,
hy_ncut):
mat_dim = group_feats_G.get_shape().as_list()[1]
# [1, lat_dim, mat_dim, mat_dim]
_, lat_dim, mat_dim, _ = lie_alg_basis.get_shape().as_list()
lie_alg_basis_col = tf.reshape(lie_alg_basis, [lat_dim, 1, mat_dim, mat_dim])
lie_alg_basis_mul = tf.matmul(lie_alg_basis, lie_alg_basis_col)
lie_alg_basis_mask = 1. - tf.eye(lat_dim, dtype=lie_alg_basis_mul.dtype)[:, :, tf.newaxis, tf.newaxis]
lie_alg_basis_mul = lie_alg_basis_mul * lie_alg_basis_mask
lie_alg_basis_linear = lie_alg_basis * lie_alg_basis_col
lie_alg_basis_linear = lie_alg_basis_linear * (1. - lie_alg_basis_mask)
if group_feats_E is None:
rec_loss = 0
else:
rec_loss = tf.reduce_mean(
tf.reduce_sum(tf.square(group_feats_E - group_feats_G), axis=[1, 2]))
rec_loss = autosummary('Loss/lie_vae_rec_loss', rec_loss)
spl_loss = tf.reduce_sum(tf.square(lie_alg_basis_mul - tf.transpose(lie_alg_basis_mul, perm=[1, 0, 2, 3])))
spl_loss = autosummary('Loss/lie_vae_spl_loss', spl_loss)
hessian_loss = tf.reduce_sum(tf.square(lie_alg_basis_mul))
hessian_loss = autosummary('Loss/lie_vae_hessian_loss', hessian_loss)
linear_loss = tf.reduce_sum(tf.square(lie_alg_basis_linear))
linear_loss = autosummary('Loss/lie_vae_linear_loss', linear_loss)
loss = hy_rec * rec_loss + hy_dcp * spl_loss + \
hy_hes * hessian_loss + hy_lin * linear_loss
return loss
def make_lie_group_loss(group_feats_E, group_feats_G, lie_alg_feats, lie_alg_basis,
minibatch_size, hy_rec, hy_dcp, hy_hes, hy_lin,
hy_ncut):
mat_dim = group_feats_G.get_shape().as_list()[1]
# [1, lat_dim, mat_dim, mat_dim]
_, lat_dim, mat_dim, _ = lie_alg_basis.get_shape().as_list()
lie_alg_basis_col = tf.reshape(lie_alg_basis, [lat_dim, 1, mat_dim, mat_dim])
lie_alg_basis_mul = tf.matmul(lie_alg_basis, lie_alg_basis_col)
lie_alg_basis_mask = 1. - tf.eye(lat_dim, dtype=lie_alg_basis_mul.dtype)[:, :, tf.newaxis, tf.newaxis]
lie_alg_basis_mul = lie_alg_basis_mul * lie_alg_basis_mask
lie_alg_basis_linear = lie_alg_basis * lie_alg_basis_col
lie_alg_basis_linear = lie_alg_basis_linear * (1. - lie_alg_basis_mask)
hessian_loss = tf.reduce_sum(tf.square(lie_alg_basis_mul))
hessian_loss = autosummary('Loss/lie_vae_hessian_loss', hessian_loss)
linear_loss = tf.reduce_sum(tf.square(lie_alg_basis_linear))
linear_loss = autosummary('Loss/lie_vae_linear_loss', linear_loss)
loss = hy_hes * hessian_loss + hy_lin * linear_loss
return loss
def lie_vae(E,
G,
opt,
training_set,
minibatch_size,
reals,
labels,
latent_type='normal',
hy_dcp=1,
hy_hes=0,
hy_lin=0,
hy_ncut=1,
hy_rec=1,
recons_type='bernoulli_loss'):
_ = opt, training_set
means, log_var, group_feats_E = get_return_v(
E.get_output_for(reals, labels, is_training=True), 3)
kl_loss = compute_gaussian_kl(means, log_var)
kl_loss = autosummary('Loss/kl_loss', kl_loss)
mat_dim = int(math.sqrt(group_feats_E.get_shape().as_list()[1]))
assert mat_dim * mat_dim == group_feats_E.get_shape().as_list()[1]
group_feats_E = tf.reshape(group_feats_E,
[minibatch_size, mat_dim, mat_dim])
sampled = sample_from_latent_distribution(means, log_var)
reconstructions, group_feats_G, _, _, lie_alg_feats, lie_alg_basis = get_return_v(
G.get_output_for(sampled, labels, is_training=True), 6)
lie_group_loss = make_lie_group_loss(group_feats_E, group_feats_G,
lie_alg_feats, lie_alg_basis, minibatch_size, hy_rec,
hy_dcp, hy_hes, hy_lin, hy_ncut)
lie_group_loss = autosummary('Loss/lie_group_loss', lie_group_loss)
reconstruction_loss = make_reconstruction_loss(
reals, reconstructions[:minibatch_size], recons_type=recons_type)
# reconstruction_loss = tf.reduce_mean(reconstruction_loss)
reconstruction_loss = autosummary('Loss/recons_loss', reconstruction_loss)
elbo = reconstruction_loss + kl_loss
elbo = autosummary('Loss/lie_vae_elbo', elbo)
loss = elbo + lie_group_loss
loss = autosummary('Loss/lie_vae_loss', loss)
return loss
| 42.456067 | 112 | 0.677245 | 1,562 | 10,147 | 3.9379 | 0.095391 | 0.086815 | 0.121606 | 0.043245 | 0.872378 | 0.837913 | 0.822468 | 0.816127 | 0.801496 | 0.792554 | 0 | 0.011869 | 0.219474 | 10,147 | 238 | 113 | 42.634454 | 0.764773 | 0.087809 | 0 | 0.715909 | 0 | 0 | 0.037065 | 0.015173 | 0 | 0 | 0 | 0 | 0.011364 | 1 | 0.028409 | false | 0 | 0.056818 | 0 | 0.113636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
60279f76607109099dcb4ad9a6841c3bc50c043f | 1,029 | py | Python | scripts/run.py | Abrahm1234/ESRGAN-PyTorch | 537e856aef795e05d800ac62094aed76c6553660 | [
"Apache-2.0"
] | null | null | null | scripts/run.py | Abrahm1234/ESRGAN-PyTorch | 537e856aef795e05d800ac62094aed76c6553660 | [
"Apache-2.0"
] | null | null | null | scripts/run.py | Abrahm1234/ESRGAN-PyTorch | 537e856aef795e05d800ac62094aed76c6553660 | [
"Apache-2.0"
] | null | null | null | import os
# Prepare dataset
os.system("python ./prepare_dataset.py --images_dir ../data/DFO2K/original/train --output_dir ../data/DFO2K/ESRGAN/train --image_size 204 --step 102 --num_workers 10")
os.system("python ./prepare_dataset.py --images_dir ../data/DFO2K/original/valid --output_dir ../data/DFO2K/ESRGAN/valid --image_size 204 --step 102 --num_workers 10")
# Create LMDB database file
os.system("python ./create_lmdb_dataset.py --images_dir ../data/DFO2K/ESRGAN/train --lmdb_path ../data/train_lmdb/ESRGAN/DFO2K_HR_lmdb --upscale_factor 1")
os.system("python ./create_lmdb_dataset.py --images_dir ../data/DFO2K/ESRGAN/train --lmdb_path ../data/train_lmdb/ESRGAN/DFO2K_LRbicx4_lmdb --upscale_factor 4")
os.system("python ./create_lmdb_dataset.py --images_dir ../data/DFO2K/ESRGAN/valid --lmdb_path ../data/valid_lmdb/ESRGAN/DFO2K_HR_lmdb --upscale_factor 1")
os.system("python ./create_lmdb_dataset.py --images_dir ../data/DFO2K/ESRGAN/valid --lmdb_path ../data/valid_lmdb/ESRGAN/DFO2K_LRbicx4_lmdb --upscale_factor 4")
| 79.153846 | 167 | 0.771623 | 160 | 1,029 | 4.7 | 0.21875 | 0.074468 | 0.12766 | 0.143617 | 0.928191 | 0.851064 | 0.851064 | 0.851064 | 0.702128 | 0.702128 | 0 | 0.035417 | 0.067055 | 1,029 | 12 | 168 | 85.75 | 0.747917 | 0.039845 | 0 | 0 | 0 | 0.857143 | 0.899492 | 0.481218 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.142857 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
602e12d92ed18012354628bc089351d91eeeb560 | 304 | py | Python | exp04_ifelif.py | psb2509/learning-python3 | 38bd16f85d458b69ed677c72315c1023f83afc3d | [
"MIT"
] | null | null | null | exp04_ifelif.py | psb2509/learning-python3 | 38bd16f85d458b69ed677c72315c1023f83afc3d | [
"MIT"
] | 4 | 2018-09-09T16:47:46.000Z | 2018-09-10T12:18:43.000Z | exp04_ifelif.py | psb2509/learning-python3 | 38bd16f85d458b69ed677c72315c1023f83afc3d | [
"MIT"
] | null | null | null | name="Pradeep"
if name is 'Krishna':print("hi Krishna");
elif name is 'Pradeep':print("Hi Pradeep");
else:print("Who are you ?")
name="Krishna"
if name is 'Krishna':
print("hi Krishna");
elif name is 'Pradeep':
print("Hi Pradeep");
else:
print("Who are you ?") | 21.714286 | 48 | 0.585526 | 42 | 304 | 4.238095 | 0.285714 | 0.134831 | 0.089888 | 0.168539 | 0.876404 | 0.876404 | 0.876404 | 0.876404 | 0.876404 | 0.876404 | 0 | 0 | 0.25 | 304 | 14 | 49 | 21.714286 | 0.780702 | 0 | 0 | 0 | 0 | 0 | 0.369863 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.545455 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
604bddfc52aecdf49f70f3d7a8b449d2f5232ab2 | 10,415 | py | Python | scripts/9_simple_model_of_boom_and_bust.py | spatchcock/monetary_economics_python | 4c477b4bce419e984595f5b7b6c62bcbfb38168d | [
"MIT"
] | 2 | 2020-05-31T11:26:00.000Z | 2022-02-22T20:51:32.000Z | scripts/9_simple_model_of_boom_and_bust.py | spatchcock/monetary_economics_python | 4c477b4bce419e984595f5b7b6c62bcbfb38168d | [
"MIT"
] | 4 | 2015-08-09T21:16:06.000Z | 2015-08-09T21:23:32.000Z | scripts/9_simple_model_of_boom_and_bust.py | spatchcock/monetary_economics_python | 4c477b4bce419e984595f5b7b6c62bcbfb38168d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# This script describes the iteration of a simple economic model examining changing
# private sector spending behaviour. It is described in the accompanying iPython
# Notebook and at
#
# http://misunderheard.org/monetary_economics/2017/07/29/simple-model-of-boom-and-bust/
#
#%% Include libraries
import matplotlib.pyplot as plt
import numpy as np
# number of time steps
N = 100
# exogeneous variables
G = 20 # government spending
theta = 0.2 # tax rate
alpha_H = 0.2 # propensity to spend out of saved wealth
# endogeneous variables
Y = np.zeros(N) # income
T = np.zeros(N) # tax revenue
C = np.zeros(N) # consumption
H_h = np.zeros(N) # private savings
H_g = np.zeros(N) # government balance
#%% define propensity to consume time series
alpha_Y = np.zeros(N)
alpha_Y[0:10] = 0.9 # set the first 10 elements
alpha_Y[10:N] = 0.8 # set the remainder of the elements
print(alpha_Y[0:15])
#%% set initial conditions
Y[0] = 100
C[0] = 80
T[0] = 20
H_h[0] = 40
H_g[0] = -40
#%% run model
for t in range(1, N):
# calculate total income for this time step (equation 1)
Y[t] = (G + alpha_H*H_h[t-1])/(1 - alpha_Y[t]*(1-theta))
# calculate the tax paid on income for this time step (3)
T[t] = theta * Y[t]
# calculate the consumption spending for this time step (4)
C[t] = alpha_Y[t]*(1 - theta)*Y[t] + alpha_H*H_h[t-1]
# calculate the new level of private savings for this time step (5)
H_h[t] = H_h[t-1] + Y[t] - T[t] - C[t]
# calculate the new level of government money balance (6)
H_g[t] = H_g[t-1] + T[t]- G
#%% plot aggregates
# initialise plot figure
fig = plt.figure(figsize=(12, 4))
# plot government spending (G) through time
gov_plot = fig.add_subplot(131, xlim=(0, N), ylim=(0, 120)) # set axis limits
gov_plot.plot(range(N), np.repeat(G,N), lw=3) # plot constant G versus time
gov_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('government spending') # label y axis
# plot consumption spending (C) through time
consumption_plot = fig.add_subplot(132, xlim=(0, N), ylim=(0, 120)) # set axis limits
consumption_plot.plot(range(N), C, lw=3) # plot C versus time
consumption_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('consumption')
# label y axis
# plot aggregate income (Y) through time
income_plot = fig.add_subplot(133, xlim=(0, N), ylim=(0, 120)) # set axis limits
income_plot.plot(range(N), Y, lw=3) # plot Y versus time
income_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('income') # label y axis
plt.tight_layout() # space subplots neatly
#%% plot government
# initialise plot figure
fig = plt.figure(figsize=(8, 4))
gov_plot = fig.add_subplot(121, xlim=(0, N), ylim=(0, np.max(G)*1.5)) # set axis limits
gov_plot.plot(range(N), np.repeat(G,N), lw=3) # plot constant G versus time
gov_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('government spending') # label y axis
tax_plot = fig.add_subplot(122, xlim=(0, N), ylim=(0, np.max(G)*1.5)) # set axis limits
tax_plot.plot(range(N), T, lw=3) # plot tax revenue versus time
tax_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('tax revenue') # label y axis
plt.tight_layout() # space subplots neatly
#%% plot sectoral balances
# initialise plot figure
fig = plt.figure(figsize=(8, 4))
budget_plot = fig.add_subplot(121, xlim=(0, N), ylim=(-10, 10)) # set axis limits
budget_plot.plot(range(N), T-np.repeat(G,N), lw=3, label='Government') # plot gov budget versus time
budget_plot.plot(range(N), Y-T-C, lw=3, label='Private sector') # plot private budget versus time
budget_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('budget position') # label y axis
plt.legend(loc='upper right')
balance_plot = fig.add_subplot(122, xlim=(0, N), ylim=(np.min(H_g), np.max(H_h))) # set axis limits
balance_plot.plot(range(N), H_g, lw=3, label='Government') # plot gov balance versus time
balance_plot.plot(range(N), H_h, lw=3, label='Private sector') # plot private balance versus time
balance_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('money balance') # label y axis
plt.legend(loc='center right')
plt.tight_layout() # space subplots neatly
#%% reset propensity to consum time series
alpha_Y[0:10] = 0.9
alpha_Y[10:50] = 0.8
alpha_Y[50:N] = 0.9
#%% run model
for t in range(1, N):
# calculate total income for this time step (equation 1)
Y[t] = (G + alpha_H*H_h[t-1])/(1 - alpha_Y[t]*(1-theta))
# calculate the tax paid on income for this time step (3)
T[t] = theta * Y[t]
# calculate the consumption spending for this time step (4)
C[t] = alpha_Y[t]*(1 - theta)*Y[t] + alpha_H*H_h[t-1]
# calculate the new level of private savings for this time step (5)
H_h[t] = H_h[t-1] + Y[t] - T[t] - C[t]
# calculate the new level of government money balance (6)
H_g[t] = H_g[t-1] + T[t]- G
#%% plot aggregates
# initialise plot figure
fig = plt.figure(figsize=(12, 4))
# plot government spending (G) through time
gov_plot = fig.add_subplot(131, xlim=(0, N), ylim=(0, 130)) # set axis limits
gov_plot.plot(range(N), np.repeat(G,N), lw=3) # plot constant G versus time
gov_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('government spending') # label y axis
# plot consumption spending (C) through time
consumption_plot = fig.add_subplot(132, xlim=(0, N), ylim=(0, 130)) # set axis limits
consumption_plot.plot(range(N), C, lw=3) # plot C versus time
consumption_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('consumption') # label y axis
# plot aggregate income (Y) through time
income_plot = fig.add_subplot(133, xlim=(0, N), ylim=(0, 130)) # set axis limits
income_plot.plot(range(N), Y, lw=3) # plot Y versus time
income_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('income') # label y axis
plt.tight_layout() # space subplots neatly
#%% plot government
# initialise plot figure
fig = plt.figure(figsize=(8, 4))
gov_plot = fig.add_subplot(121, xlim=(0, N), ylim=(0, np.max(G)*1.5)) # set axis limits
gov_plot.plot(range(N), np.repeat(G,N), lw=3) # plot constant G versus time
gov_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('government spending') # label y axis
tax_plot = fig.add_subplot(122, xlim=(0, N), ylim=(0, np.max(G)*1.5)) # set axis limits
tax_plot.plot(range(N), T, lw=3) # plot tax revenue versus time
tax_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('tax revenue') # label y axis
plt.tight_layout() # space subplots neatly
#%% plot sectoral balances
# initialise plot figure
fig = plt.figure(figsize=(8, 4))
budget_plot = fig.add_subplot(121, xlim=(0, N), ylim=(-10, 10)) # set axis limits
budget_plot.plot(range(N), T-np.repeat(G,N), lw=3, label='Government') # plot gov budget versus time
budget_plot.plot(range(N), Y-T-C, lw=3, label='Private sector') # plot private budget versus time
budget_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('budget position') # label y axis
plt.legend(loc='upper right')
balance_plot = fig.add_subplot(122, xlim=(0, N), ylim=(np.min(H_g), np.max(H_h))) # set axis limits
balance_plot.plot(range(N), H_g, lw=3, label='Government') # plot gov balance versus time
balance_plot.plot(range(N), H_h, lw=3, label='Private sector') # plot private balance versus time
balance_plot.grid() # add gridlines
plt.xlabel('time') # label x axis
plt.ylabel('money balance') # label y axis
plt.legend(loc='center right')
plt.tight_layout() # space subplots neatly
#%%
| 43.945148 | 118 | 0.510322 | 1,353 | 10,415 | 3.849224 | 0.121212 | 0.02957 | 0.044931 | 0.048387 | 0.855991 | 0.855991 | 0.851767 | 0.851767 | 0.851767 | 0.84831 | 0 | 0.034205 | 0.379645 | 10,415 | 236 | 119 | 44.131356 | 0.771862 | 0.306961 | 0 | 0.784 | 0 | 0 | 0.054504 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.016 | 0 | 0.016 | 0.008 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
60512200742eda46be471e8b06fe188dabffa5fe | 15,165 | py | Python | vilemr_aws_deepracer_template/tests/test_reward.py | tamersalama/aws-deepracer-workshops | b57b75216218a4245fe3e7f853fd4900cd6e0346 | [
"MIT-0"
] | null | null | null | vilemr_aws_deepracer_template/tests/test_reward.py | tamersalama/aws-deepracer-workshops | b57b75216218a4245fe3e7f853fd4900cd6e0346 | [
"MIT-0"
] | null | null | null | vilemr_aws_deepracer_template/tests/test_reward.py | tamersalama/aws-deepracer-workshops | b57b75216218a4245fe3e7f853fd4900cd6e0346 | [
"MIT-0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
This is the test code used to test implemented methods and function in the ../reward_function.py file. The set of
unit test is optional for you to use. You will not use it for purpose of training in AWS console.
"""
import math
import unittest
from parms.parms import get_copy_of_params as get_test_params
from reward_function import RewardEvaluator
class RewardEvaluatorTestCase(unittest.TestCase):
def test_get_waypoint(self):
params_test = get_test_params()
params_test['waypoints'] = [(0, 0), (1, 0), (2, 0), (3, 3)]
re = RewardEvaluator(params_test)
self.assertEqual(re.get_way_point(0), (0, 0))
self.assertEqual(re.get_way_point(1), (1, 0))
self.assertEqual(re.get_way_point(2), (2, 0))
self.assertEqual(re.get_way_point(3), (3, 3))
self.assertEqual(re.get_way_point(4), (0, 0))
self.assertEqual(re.get_way_point(5), (1, 0))
self.assertEqual(re.get_way_point(-1), (3, 3))
self.assertEqual(re.get_way_point(-2), (2, 0))
self.assertEqual(re.get_way_point(-3), (1, 0))
def test_get_way_points_distance(self):
params_test = get_test_params()
re = RewardEvaluator(params_test)
self.assertEqual(re.get_way_points_distance((0, 0), (2, 0)), 2)
self.assertEqual(re.get_way_points_distance((0, 0), (2, 2)), math.sqrt(8))
self.assertEqual(re.get_way_points_distance((-2, 4), (-4, 2)), math.sqrt(8))
self.assertEqual(re.get_way_points_distance((0, 0), (1, 0)), 1)
def test_get_heading_between_waypoints(self):
params_test = get_test_params()
re = RewardEvaluator(params_test)
self.assertEqual(re.get_heading_between_waypoints((0, 0), (2, 0)), 0)
self.assertEqual(re.get_heading_between_waypoints((0, 0), (0, 2)), 90)
self.assertEqual(re.get_heading_between_waypoints((0, 0), (0, -2)), -90)
self.assertEqual(re.get_heading_between_waypoints((0, 0), (2, 2)), 45)
self.assertEqual(re.get_heading_between_waypoints((0, 0), (-2, -2)), -135)
def test_get_car_heading_error(self):
params_test = get_test_params()
params_test['heading'] = 0
params_test['waypoints'] = [(0, 0), (2, 0), (2, 2), (0, 2), (0, 0), (2, 2), (4, 0)]
params_test['closest_waypoints'] = [0, 1]
re = RewardEvaluator(params_test)
self.assertEqual(re.get_car_heading_error(), 0)
params_test['closest_waypoints'] = [1, 2]
re = RewardEvaluator(params_test)
self.assertEqual(re.get_car_heading_error(), 90)
params_test['closest_waypoints'] = [2, 3]
re = RewardEvaluator(params_test)
self.assertEqual(re.get_car_heading_error(), 180)
params_test['closest_waypoints'] = [3, 4]
re = RewardEvaluator(params_test)
self.assertEqual(re.get_car_heading_error(), -90)
params_test['closest_waypoints'] = [4, 5]
re = RewardEvaluator(params_test)
self.assertEqual(re.get_car_heading_error(), 45)
params_test['closest_waypoints'] = [5, 6]
re = RewardEvaluator(params_test)
self.assertEqual(re.get_car_heading_error(), -45)
def test_get_optimum_speed_ratio(self):
params_test = get_test_params()
params_test['heading'] = 0
params_test['distance_from_center'] = 0
params_test['steering_angle'] = 0
params_test['closest_waypoints'] = (0, 1)
params_test['x'] = params_test['waypoints'][params_test['closest_waypoints'][0]][0]
params_test['y'] = params_test['waypoints'][params_test['closest_waypoints'][0]][1]
re = RewardEvaluator(params_test)
self.assertEqual(re.get_optimum_speed_ratio(), 1.0)
params_test['closest_waypoints'] = (9, 10)
params_test['x'] = params_test['waypoints'][params_test['closest_waypoints'][0]][0]
params_test['y'] = params_test['waypoints'][params_test['closest_waypoints'][0]][1]
re = RewardEvaluator(params_test)
self.assertEqual(re.get_optimum_speed_ratio(), 0.66)
params_test['closest_waypoints'] = (10, 11)
params_test['x'] = params_test['waypoints'][params_test['closest_waypoints'][0]][0]
params_test['y'] = params_test['waypoints'][params_test['closest_waypoints'][0]][1]
re = RewardEvaluator(params_test)
self.assertEqual(re.get_optimum_speed_ratio(), 0.33)
params_test = get_test_params()
params_test['distance_from_center'] = 0
params_test['steering_angle'] = 0
params_test['closest_waypoints'] = (0, 1)
params_test['x'] = params_test['waypoints'][params_test['closest_waypoints'][0]][0]
params_test['y'] = params_test['waypoints'][params_test['closest_waypoints'][0]][1]
params_test['heading'] = 1.1 * re.MAX_STEERING_ANGLE
re = RewardEvaluator(params_test)
self.assertEqual(re.get_optimum_speed_ratio(), 0.34)
params_test['heading'] = 1.1 * (re.MAX_STEERING_ANGLE * 0.75)
re = RewardEvaluator(params_test)
self.assertEqual(re.get_optimum_speed_ratio(), 0.67)
# self.print_get_optimum_speed_ratio()
def test_is_in_optimized_corridor(self):
params_test = get_test_params()
params_test['heading'] = 0
params_test['track_width'] = 2
params_test['distance_from_center'] = 0
params_test['is_left_of_center'] = True
params_test['steering_angle'] = 0
params_test['closest_waypoints'] = (0, 1)
params_test['x'] = params_test['waypoints'][params_test['closest_waypoints'][0]][0]
params_test['y'] = params_test['waypoints'][params_test['closest_waypoints'][0]][1]
# Center line - in corridor (left and right)
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), True)
params_test['is_left_of_center'] = False
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), True)
# Center line - out of corridor (left and right)
params_test['distance_from_center'] = re.CENTERLINE_FOLLOW_RATIO_TRESHOLD * 2.2 * re.track_width
params_test['is_left_of_center'] = True
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), False)
params_test['is_left_of_center'] = False
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), False)
# BEFORE TURN LEFT - in corridor more right
params_test['closest_waypoints'] = (8, 9)
params_test['distance_from_center'] = re.CENTERLINE_FOLLOW_RATIO_TRESHOLD * 2.2 * re.track_width
params_test['is_left_of_center'] = True
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), False)
params_test['is_left_of_center'] = False
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), False)
params_test['distance_from_center'] = re.CENTERLINE_FOLLOW_RATIO_TRESHOLD * 0.4 * re.track_width
params_test['is_left_of_center'] = True
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), True)
params_test['is_left_of_center'] = False
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), True)
params_test['distance_from_center'] = re.CENTERLINE_FOLLOW_RATIO_TRESHOLD * 0.8 * re.track_width
params_test['is_left_of_center'] = True
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), False)
params_test['is_left_of_center'] = False
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), True)
# TEST IN CURVE - vnitrni strana
params_test['closest_waypoints'] = (15, 16)
params_test['distance_from_center'] = re.CENTERLINE_FOLLOW_RATIO_TRESHOLD * 2.2 * re.track_width
params_test['is_left_of_center'] = True
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), False)
params_test['is_left_of_center'] = False
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), False)
params_test['distance_from_center'] = re.CENTERLINE_FOLLOW_RATIO_TRESHOLD * 0.4 * re.track_width
params_test['is_left_of_center'] = True
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), True)
params_test['is_left_of_center'] = False
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), True)
# Prujezd zatacka vnitrni strana
params_test['distance_from_center'] = re.CENTERLINE_FOLLOW_RATIO_TRESHOLD * 0.8 * re.track_width
params_test['is_left_of_center'] = True
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), True)
params_test['is_left_of_center'] = False
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_optimized_corridor(), False)
def print_get_optimum_speed_ratio(self):
params_test = get_test_params()
params_test['distance_from_center'] = 0
params_test['steering_angle'] = 0
ind = 0
for w in params_test['waypoints']:
params_test['closest_waypoints'][0] = ind
params_test['closest_waypoints'][1] = ind + 1
params_test['x'] = w[0]
params_test['y'] = w[1]
re = RewardEvaluator(params_test)
params_test['heading'] = re.get_heading_between_waypoints(w, re.get_way_point(ind + 1))
# params_test['heading'] = params_test['heading'] + 1
re.init_self(params_test)
print(str(ind) + " speed ratio : " + str(re.get_optimum_speed_ratio()))
ind = ind + 1
print(" ")
def print_is_in_turn(self):
params_test = get_test_params()
params_test['distance_from_center'] = 0
params_test['steering_angle'] = 0
ind = 0
for w in params_test['waypoints']:
params_test['closest_waypoints'][0] = ind
params_test['closest_waypoints'][1] = ind + 1
re = RewardEvaluator(params_test)
re.init_self(params_test)
print(str(ind) + " is_in_turn : " + str(re.is_in_turn()))
ind = ind + 1
print(" ")
def test_is_in_turn(self):
params_test = get_test_params()
params_test['heading'] = 0
params_test['waypoints'] = [(0, 0), (1, 0), (2, 0), (3, 1), (4, 1), (5, 1), (6, -6), (-1, -6), (-1, 0)]
params_test['closest_waypoints'] = (0, 1)
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_turn(), False)
params_test['closest_waypoints'] = (1, 2)
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_turn(), False)
params_test['closest_waypoints'] = (2, 3)
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_turn(), True)
params_test['closest_waypoints'] = (5, 6)
re = RewardEvaluator(params_test)
self.assertEqual(re.is_in_turn(), True)
def print_get_turn_angle(self):
params_test = get_test_params()
params_test['distance_from_center'] = 0
params_test['steering_angle'] = 0
ind = 0
for w in params_test['waypoints']:
params_test['closest_waypoints'][0] = ind
params_test['closest_waypoints'][1] = ind + 1
re = RewardEvaluator(params_test)
re.init_self(params_test)
print(str(ind) + " get_turn_angle : {0:.1f}".format(re.get_turn_angle()))
ind = ind + 1
print(" ")
def print_get_expected_turn_direction(self):
params_test = get_test_params()
params_test['distance_from_center'] = 0
params_test['steering_angle'] = 0
ind = 0
for w in params_test['waypoints']:
params_test['closest_waypoints'][0] = ind
params_test['closest_waypoints'][1] = ind + 1
re = RewardEvaluator(params_test)
re.init_self(params_test)
print(str(ind) + " getCurveDirectio : " + re.get_expected_turn_direction())
ind = ind + 1
print(" ")
def test_get_turn_angle(self):
params_test = get_test_params()
params_test['heading'] = 0
params_test['waypoints'] = [(0, 0), (1, 0), (2, 0), (3, 1), (4, 1), (5, 1), (6, -6), (-1, -6), (-1, 0)]
params_test['closest_waypoints'] = (0, 1)
re = RewardEvaluator(params_test)
self.assertEqual(re.get_turn_angle(), 0)
params_test['closest_waypoints'] = (1, 2)
re = RewardEvaluator(params_test)
self.assertEqual(re.get_turn_angle(), 0)
params_test['closest_waypoints'] = (2, 3)
re = RewardEvaluator(params_test)
self.assertEqual(re.get_turn_angle(), 45)
params_test['closest_waypoints'] = (5, 6)
re = RewardEvaluator(params_test)
self.assertEqual(re.get_turn_angle(), -81.86989764584403)
def test_reached_target(self):
params_test = get_test_params()
max_way_point_index = len(params_test['waypoints']) - 1
params_test['closest_waypoints'] = (max_way_point_index - 1, max_way_point_index)
re = RewardEvaluator(params_test)
self.assertEqual(re.reached_target(), True)
max_way_point_index = len(params_test['waypoints']) - 5
params_test['closest_waypoints'] = (max_way_point_index - 1, max_way_point_index)
re = RewardEvaluator(params_test)
self.assertEqual(re.reached_target(), False)
def test_evaluation(self):
params_test = get_test_params()
params_test['heading'] = 0
params_test['track_width'] = 10
params_test['distance_from_center'] = 0
params_test['is_left_of_center'] = True
params_test['steering_angle'] = 0
params_test['closest_waypoints'] = (0, 1)
params_test['speed'] = 3
params_test['x'] = params_test['waypoints'][params_test['closest_waypoints'][0]][0]
params_test['y'] = params_test['waypoints'][params_test['closest_waypoints'][0]][1]
re = RewardEvaluator(params_test)
re.evaluate()
params_test = get_test_params()
params_test['heading'] = 0
params_test['track_width'] = 10
params_test['distance_from_center'] = 0
params_test['is_left_of_center'] = True
params_test['steering_angle'] = 0
params_test['closest_waypoints'] = (69, 70)
params_test['speed'] = 3
params_test['x'] = params_test['waypoints'][params_test['closest_waypoints'][0]][0]
params_test['y'] = params_test['waypoints'][params_test['closest_waypoints'][0]][1]
re = RewardEvaluator(params_test)
re.evaluate()
# self.print_is_in_turn()
# self.print_get_turn_angle()
# self.print_get_expected_turn_direction()
if __name__ == '__main__':
unittest.main()
| 46.661538 | 113 | 0.64695 | 1,982 | 15,165 | 4.606458 | 0.068113 | 0.233297 | 0.10241 | 0.133844 | 0.880504 | 0.857612 | 0.842278 | 0.842278 | 0.813801 | 0.79989 | 0 | 0.029354 | 0.22275 | 15,165 | 324 | 114 | 46.805556 | 0.745228 | 0.04029 | 0 | 0.714801 | 0 | 0 | 0.136351 | 0 | 0 | 0 | 0 | 0 | 0.198556 | 1 | 0.050542 | false | 0 | 0.01444 | 0 | 0.068592 | 0.043321 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6051229857885e64c0c7354a59477b9ab0fe4685 | 26,411 | py | Python | Precomputing/GAMLP.py | VITA-Group/Large_Scale_GCN_Benchmarking | d4c85a899a22b5a982ae437eac037f3629454510 | [
"MIT"
] | 1 | 2022-03-02T05:06:35.000Z | 2022-03-02T05:06:35.000Z | Precomputing/GAMLP.py | VITA-Group/Large_Scale_GCN_Benchmarking | d4c85a899a22b5a982ae437eac037f3629454510 | [
"MIT"
] | null | null | null | Precomputing/GAMLP.py | VITA-Group/Large_Scale_GCN_Benchmarking | d4c85a899a22b5a982ae437eac037f3629454510 | [
"MIT"
] | null | null | null |
import math
import os
import random
import time
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import Parameter
from torch_geometric.transforms import SIGN
from torch.utils.data import DataLoader
from Precomputing.base import PrecomputingBase
# adapted from https://github.com/chennnM/GBP
class Dense(nn.Module):
def __init__(self, in_features, out_features, bias='bn'):
super(Dense, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = nn.Parameter(torch.FloatTensor(in_features, out_features))
if bias == 'bn':
self.bias = nn.BatchNorm1d(out_features)
else:
self.bias = lambda x: x
self.reset_parameters()
def reset_parameters(self):
stdv = 1. / math.sqrt(self.weight.size(1))
self.weight.data.uniform_(-stdv, stdv)
def forward(self, input):
output = torch.mm(input, self.weight)
output = self.bias(output)
if self.in_features == self.out_features:
output = output + input
return output
# MLP apply initial residual
class GraphConvolution(nn.Module):
def __init__(self, in_features, out_features, alpha, bns=False):
super(GraphConvolution, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = Parameter(torch.FloatTensor(self.in_features,self.out_features))
self.alpha=alpha
self.reset_parameters()
self.bns=bns
self.bias = nn.BatchNorm1d(out_features)
def reset_parameters(self):
stdv = 1. / math.sqrt(self.out_features)
self.weight.data.uniform_(-stdv, stdv)
def forward(self, input ,h0):
support = (1-self.alpha)*input+self.alpha*h0
output = torch.mm(support, self.weight)
#if self.bns:
output=self.bias(output)
if self.in_features==self.out_features:
output = output+input
return output
# adapted from dgl sign
class FeedForwardNet(nn.Module):
def __init__(self, in_feats, hidden, out_feats, n_layers, dropout,bns=True):
super(FeedForwardNet, self).__init__()
self.layers = nn.ModuleList()
self.bns = nn.ModuleList()
self.n_layers = n_layers
if n_layers == 1:
self.layers.append(nn.Linear(in_feats, out_feats))
else:
self.layers.append(nn.Linear(in_feats, hidden))
self.bns.append(nn.BatchNorm1d(hidden))
for i in range(n_layers - 2):
self.layers.append(nn.Linear(hidden, hidden))
self.bns.append(nn.BatchNorm1d(hidden))
self.layers.append(nn.Linear(hidden, out_feats))
if self.n_layers > 1:
self.prelu = nn.PReLU()
self.dropout = nn.Dropout(dropout)
self.norm=bns
self.reset_parameters()
def reset_parameters(self):
gain = nn.init.calculate_gain("relu")
for layer in self.layers:
nn.init.xavier_uniform_(layer.weight, gain=gain)
nn.init.zeros_(layer.bias)
def forward(self, x):
for layer_id, layer in enumerate(self.layers):
x = layer(x)
if layer_id < self.n_layers -1:
if self.norm:
x = self.dropout(self.prelu(self.bns[layer_id](x)))
else:
x = self.dropout(self.prelu(x))
return x
class FeedForwardNetII(nn.Module):
def __init__(self, in_feats, hidden, out_feats, n_layers, dropout, alpha, bns=False):
super(FeedForwardNetII, self).__init__()
self.layers = nn.ModuleList()
self.n_layers = n_layers
self.in_feats=in_feats
self.hidden=hidden
self.out_feats=out_feats
if n_layers == 1:
self.layers.append(Dense(in_feats, out_feats))
else:
self.layers.append(Dense(in_feats, hidden))
for i in range(n_layers - 2):
self.layers.append(GraphConvolution(hidden, hidden, alpha, bns))
self.layers.append(Dense(hidden, out_feats))
self.prelu = nn.PReLU()
self.dropout = nn.Dropout(dropout)
self.reset_parameters()
def reset_parameters(self):
for layer in self.layers:
layer.reset_parameters()
def forward(self, x):
x=self.layers[0](x)
h0=x
for layer_id, layer in enumerate(self.layers):
if layer_id==0:
continue
elif layer_id== self.n_layers - 1:
x = self.dropout(self.prelu(x))
x = layer(x)
else:
x = self.dropout(self.prelu(x))
x = layer(x,h0)
#x = self.dropout(self.prelu(x))
return x
class R_GAMLP(PrecomputingBase): # recursive GAMLP
def __init__(self, args, data, train_idx, input_drop=0.0, att_dropout=0.0, alpha=0.5, n_layers_1=2, n_layers_2=2, act="relu", pre_process=False, residual=False,pre_dropout=False,bns=False):
super(R_GAMLP, self).__init__(args, data, train_idx)
num_hops = self.num_layers + 1
nfeat = args.num_feats
hidden = self.dim_hidden
nclass = args.num_classes
dropout = args.dropout
self.num_hops = num_hops
self.prelu = nn.PReLU()
if pre_process:
self.lr_att = nn.Linear(hidden + hidden, 1)
self.lr_output = FeedForwardNetII(
hidden, hidden, nclass, n_layers_2, dropout, alpha,bns)
self.process = nn.ModuleList(
[FeedForwardNet(nfeat, hidden, hidden, 2, dropout, bns) for i in range(num_hops)])
else:
self.lr_att = nn.Linear(nfeat + nfeat, 1)
self.lr_output = FeedForwardNetII(
nfeat, hidden, nclass, n_layers_2, dropout, alpha,bns)
self.dropout = nn.Dropout(dropout)
self.input_drop = nn.Dropout(input_drop)
self.att_drop = nn.Dropout(att_dropout)
self.pre_process = pre_process
self.res_fc = nn.Linear(nfeat, hidden)
self.residual = residual
self.pre_dropout=pre_dropout
if act == 'sigmoid':
self.act = torch.nn.Sigmoid()
elif act == 'relu':
self.act = torch.nn.ReLU()
elif act == 'leaky_relu':
self.act = torch.nn.LeakyReLU(0.2)
self.reset_parameters()
def reset_parameters(self):
gain = nn.init.calculate_gain("relu")
nn.init.xavier_uniform_(self.lr_att.weight, gain=gain)
nn.init.zeros_(self.lr_att.bias)
nn.init.xavier_uniform_(self.res_fc.weight, gain=gain)
nn.init.zeros_(self.res_fc.bias)
self.lr_output.reset_parameters()
if self.pre_process:
for layer in self.process:
layer.reset_parameters()
def forward(self, feature_list):
num_node = feature_list[0].shape[0]
feature_list = [self.input_drop(feature) for feature in feature_list]
input_list = []
if self.pre_process:
for i in range(self.num_hops):
input_list.append(self.process[i](feature_list[i]))
else:
input_list = feature_list
attention_scores = []
attention_scores.append(self.act(self.lr_att(
torch.cat([input_list[0], input_list[0]], dim=1))))
for i in range(1, self.num_hops):
history_att = torch.cat(attention_scores[:i], dim=1)
att = F.softmax(history_att, 1)
history = torch.mul(input_list[0], self.att_drop(
att[:, 0].view(num_node, 1)))
for j in range(1, i):
history = history + \
torch.mul(input_list[j], self.att_drop(
att[:, j].view(num_node, 1)))
attention_scores.append(self.act(self.lr_att(
torch.cat([history, input_list[i]], dim=1))))
attention_scores = torch.cat(attention_scores, dim=1)
attention_scores = F.softmax(attention_scores, 1)
right_1 = torch.mul(input_list[0], self.att_drop(
attention_scores[:, 0].view(num_node, 1)))
for i in range(1, self.num_hops):
right_1 = right_1 + \
torch.mul(input_list[i], self.att_drop(
attention_scores[:, i].view(num_node, 1)))
if self.residual:
right_1 += self.res_fc(feature_list[0])
right_1 = self.dropout(self.prelu(right_1))
if self.pre_dropout:
right_1=self.dropout(right_1)
right_1 = self.lr_output(right_1)
return right_1
class JK_GAMLP(PrecomputingBase):
def __init__(self, args, data, train_idx, input_drop=0.0, att_dropout=0.0, alpha=0.5, n_layers_1=2, n_layers_2=2, act="relu", pre_process=False, residual=False,pre_dropout=False,bns=False):
super(JK_GAMLP_orig, self).__init__(args, data, train_idx)
num_hops = self.num_layers + 1
nfeat = args.num_feats
hidden = self.dim_hidden
nclass = args.num_classes
dropout = args.dropout
self.num_hops = num_hops
self.prelu = nn.PReLU()
self.pre_dropout=pre_dropout
if pre_process:
self.lr_jk_ref = FeedForwardNetII(
num_hops*hidden, hidden, hidden, n_layers_1, dropout, alpha, bns)
self.lr_att = nn.Linear(hidden + hidden, 1)
self.lr_output = FeedForwardNetII(
hidden, hidden, nclass, n_layers_2, dropout, alpha, bns)
self.process = nn.ModuleList(
[FeedForwardNet(nfeat, hidden, hidden, 2, dropout,bns) for i in range(num_hops)])
else:
self.lr_jk_ref = FeedForwardNetII(
num_hops*nfeat, hidden, hidden, n_layers_1, dropout, alpha, bns)
self.lr_att = nn.Linear(nfeat + hidden, 1)
self.lr_output = FeedForwardNetII(
nfeat, hidden, nclass, n_layers_2, dropout, alpha, bns)
self.dropout = nn.Dropout(dropout)
self.input_drop = nn.Dropout(input_drop)
self.att_drop = nn.Dropout(att_dropout)
self.pre_process = pre_process
self.res_fc = nn.Linear(nfeat, hidden)
if act == 'sigmoid':
self.act = torch.nn.Sigmoid()
elif act == 'relu':
self.act = torch.nn.ReLU()
elif act == 'leaky_relu':
self.act = torch.nn.LeakyReLU(0.2)
self.residual = residual
self.reset_parameters()
def reset_parameters(self):
gain = nn.init.calculate_gain("relu")
nn.init.xavier_uniform_(self.lr_att.weight, gain=gain)
nn.init.zeros_(self.lr_att.bias)
nn.init.xavier_uniform_(self.res_fc.weight, gain=gain)
nn.init.zeros_(self.res_fc.bias)
self.lr_output.reset_parameters()
self.lr_jk_ref.reset_parameters()
if self.pre_process:
for layer in self.process:
layer.reset_parameters()
def forward(self, feature_list):
num_node = feature_list[0].shape[0]
feature_list = [self.input_drop(feature) for feature in feature_list]
input_list = []
if self.pre_process:
for i in range(len(feature_list)):
input_list.append(self.process[i](feature_list[i]))
else:
input_list = feature_list
concat_features = torch.cat(input_list, dim=1)
jk_ref = self.dropout(self.prelu(self.lr_jk_ref(concat_features)))
attention_scores = [self.act(self.lr_att(torch.cat((jk_ref, x), dim=1))).view(num_node, 1) for x in
input_list]
W = torch.cat(attention_scores, dim=1)
W = F.softmax(W, 1)
right_1 = torch.mul(input_list[0], self.att_drop(
W[:, 0].view(num_node, 1)))
for i in range(1, self.num_hops):
right_1 = right_1 + \
torch.mul(input_list[i], self.att_drop(
W[:, i].view(num_node, 1)))
if self.residual:
right_1 += self.res_fc(feature_list[0])
right_1 = self.dropout(self.prelu(right_1))
if self.pre_dropout:
right_1=self.dropout(right_1)
right_1 = self.lr_output(right_1)
return right_1
class JK_GAMLP_RLU(PrecomputingBase):
# def __init__(self, nfeat, hidden, nclass, num_hops,
# dropout, input_drop, att_dropout, label_drop, alpha, n_layers_1, n_layers_2, n_layers_3, act, pre_process=False, residual=False,pre_dropout=False,bns=False):
# super(JK_GAMLP_RLU, self).__init__()
def __init__(self, args, data, train_idx, input_drop=0.0, att_dropout=0.0, label_drop=0.0, alpha=0.5, n_layers_1=2, n_layers_2=2, n_layers_3=2, act="relu", pre_process=False, residual=False,pre_dropout=False,bns=False):
super(JK_GAMLP_RLU, self).__init__(args, data, train_idx)
num_hops = self.num_layers + 1
nfeat = args.num_feats
hidden = self.dim_hidden
nclass = args.num_classes
dropout = args.dropout
self.num_hops = num_hops
self.pre_dropout=pre_dropout
self.prelu = nn.PReLU()
self.res_fc = nn.Linear(nfeat, hidden, bias=False)
if pre_process:
self.lr_jk_ref = FeedForwardNetII(
num_hops*hidden, hidden, hidden, n_layers_1, dropout, alpha, bns)
self.lr_att = nn.Linear(hidden + hidden, 1)
self.lr_output = FeedForwardNetII(
hidden, hidden, nclass, n_layers_2, dropout, alpha, bns)
self.process = nn.ModuleList(
[FeedForwardNet(nfeat, hidden, hidden, 2, dropout,bns) for i in range(num_hops)])
else:
self.lr_jk_ref = FeedForwardNetII(
num_hops*nfeat, hidden, hidden, n_layers_1, dropout, alpha, bns)
self.lr_att = nn.Linear(nfeat + hidden, 1)
self.lr_output = FeedForwardNetII(
nfeat, hidden, nclass, n_layers_2, dropout, alpha, bns)
self.dropout = nn.Dropout(dropout)
self.input_drop = nn.Dropout(input_drop)
self.att_drop = nn.Dropout(att_dropout)
self.label_drop = nn.Dropout(label_drop)
self.pre_process = pre_process
self.label_fc = FeedForwardNet(
nclass, hidden, nclass, n_layers_3, dropout)
if act == 'sigmoid':
self.act = torch.nn.Sigmoid()
elif act == 'relu':
self.act = torch.nn.ReLU()
elif act == 'leaky_relu':
self.act = torch.nn.LeakyReLU(0.2)
self.residual = residual
def reset_parameters(self):
gain = nn.init.calculate_gain("relu")
nn.init.xavier_uniform_(self.lr_att.weight, gain=gain)
nn.init.zeros_(self.lr_att.bias)
nn.init.xavier_uniform_(self.res_fc.weight, gain=gain)
nn.init.zeros_(self.res_fc.bias)
self.lr_output.reset_parameters()
self.lr_jk_ref.reset_parameters()
if self.pre_process:
for layer in self.process:
layer.reset_parameters()
def forward(self, feature_list, label_emb):
num_node = feature_list[0].shape[0]
feature_list = [self.input_drop(feature) for feature in feature_list]
input_list = []
if self.pre_process:
for i in range(len(feature_list)):
input_list.append(self.process[i](feature_list[i]))
concat_features = torch.cat(input_list, dim=1)
jk_ref = self.dropout(self.prelu(self.lr_jk_ref(concat_features)))
attention_scores = [self.act(self.lr_att(torch.cat((jk_ref, x), dim=1))).view(num_node, 1) for x in
input_list]
W = torch.cat(attention_scores, dim=1)
W = F.softmax(W, 1)
right_1 = torch.mul(input_list[0], self.att_drop(
W[:, 0].view(num_node, 1)))
for i in range(1, self.num_hops):
right_1 = right_1 + \
torch.mul(input_list[i], self.att_drop(
W[:, i].view(num_node, 1)))
if self.residual:
right_1 += self.res_fc(feature_list[0])
right_1 = self.dropout(self.prelu(right_1))
if self.pre_dropout:
right_1=self.dropout(right_1)
right_1 = self.lr_output(right_1)
right_1 += self.label_fc(self.label_drop(label_emb))
return right_1
class R_GAMLP_RLU(PrecomputingBase): # recursive GAMLP
# def __init__(self, nfeat, hidden, nclass, num_hops,
# dropout, input_drop, att_dropout, label_drop, alpha, n_layers_1, n_layers_2, n_layers_3, act, pre_process=False, residual=False,pre_dropout=False,bns=False):
# super(R_GAMLP_RLU, self).__init__()
def __init__(self, args, data, train_idx, input_drop=0.0, att_dropout=0.0, label_drop=0.0, alpha=0.5, n_layers_1=2, n_layers_2=2, n_layers_3=2, act="relu", pre_process=False, residual=False,pre_dropout=False,bns=False):
super(R_GAMLP_RLU, self).__init__(args, data, train_idx)
num_hops = self.num_layers + 1
nfeat = args.num_feats
hidden = self.dim_hidden
nclass = args.num_classes
dropout = args.dropout
self.num_hops = num_hops
self.pre_dropout=pre_dropout
self.prelu = nn.PReLU()
if pre_process:
self.lr_att = nn.Linear(hidden + hidden, 1)
self.lr_output = FeedForwardNetII(
hidden, hidden, nclass, n_layers_2, dropout, alpha, bns)
self.process = nn.ModuleList(
[FeedForwardNet(nfeat, hidden, hidden, 2, dropout,bns) for i in range(num_hops)])
else:
self.lr_att = nn.Linear(nfeat + nfeat, 1)
self.lr_output = FeedForwardNetII(
nfeat, hidden, nclass, n_layers_2, dropout, alpha, bns)
self.dropout = nn.Dropout(dropout)
self.input_drop = nn.Dropout(input_drop)
self.att_drop = nn.Dropout(att_dropout)
self.pre_process = pre_process
self.res_fc = nn.Linear(nfeat, hidden)
self.label_drop = nn.Dropout(label_drop)
self.residual = residual
self.label_fc = FeedForwardNet(
nclass, hidden, nclass, n_layers_3, dropout)
if act == 'sigmoid':
self.act = torch.nn.Sigmoid()
elif act == 'relu':
self.act = torch.nn.ReLU()
elif act == 'leaky_relu':
self.act = torch.nn.LeakyReLU(0.2)
self.reset_parameters()
def reset_parameters(self):
gain = nn.init.calculate_gain("relu")
nn.init.xavier_uniform_(self.lr_att.weight, gain=gain)
nn.init.zeros_(self.lr_att.bias)
nn.init.xavier_uniform_(self.res_fc.weight, gain=gain)
nn.init.zeros_(self.res_fc.bias)
self.lr_output.reset_parameters()
if self.pre_process:
for layer in self.process:
layer.reset_parameters()
def forward(self, feature_list, label_emb):
num_node = feature_list[0].shape[0]
feature_list = [self.input_drop(feature) for feature in feature_list]
input_list = []
if self.pre_process:
for i in range(self.num_hops):
input_list.append(self.process[i](feature_list[i]))
else:
input_list = feature_list
attention_scores = []
attention_scores.append(self.act(self.lr_att(
torch.cat([input_list[0], input_list[0]], dim=1))))
for i in range(1, self.num_hops):
history_att = torch.cat(attention_scores[:i], dim=1)
att = F.softmax(history_att, 1)
history = torch.mul(input_list[0], self.att_drop(
att[:, 0].view(num_node, 1)))
for j in range(1, i):
history = history + \
torch.mul(input_list[j], self.att_drop(
att[:, j].view(num_node, 1)))
attention_scores.append(self.act(self.lr_att(
torch.cat([history, input_list[i]], dim=1))))
attention_scores = torch.cat(attention_scores, dim=1)
attention_scores = F.softmax(attention_scores, 1)
right_1 = torch.mul(input_list[0], self.att_drop(
attention_scores[:, 0].view(num_node, 1)))
for i in range(1, self.num_hops):
right_1 = right_1 + \
torch.mul(input_list[i], self.att_drop(
attention_scores[:, i].view(num_node, 1)))
if self.residual:
right_1 += self.res_fc(feature_list[0])
right_1 = self.dropout(self.prelu(right_1))
if self.pre_dropout:
right_1=self.dropout(right_1)
right_1 = self.lr_output(right_1)
right_1 += self.label_fc(self.label_drop(label_emb))
return right_1
# adapt from https://github.com/facebookresearch/NARS/blob/main/model.py
class WeightedAggregator(nn.Module):
def __init__(self, num_feats, in_feats, num_hops):
super(WeightedAggregator, self).__init__()
self.agg_feats = nn.ParameterList()
for _ in range(num_hops):
self.agg_feats.append(nn.Parameter(
torch.Tensor(num_feats, in_feats)))
nn.init.xavier_uniform_(self.agg_feats[-1])
def forward(self, feat_list): # feat_list k (N,S,D)
new_feats = []
for feats, weight in zip(feat_list, self.agg_feats):
new_feats.append(
(feats * weight.unsqueeze(0)).sum(dim=1).squeeze())
return new_feats
class NARS_JK_GAMLP(PrecomputingBase):
# def __init__(self, nfeat, hidden, nclass, num_hops, num_feats, alpha, n_layers_1, n_layers_2, n_layers_3, act="relu", dropout=0.5, input_drop=0.0, attn_drop=0.0, label_drop=0.0, pre_process=False, residual=False,pre_dropout=False,bns=False):
# super(NARS_JK_GAMLP, self).__init__()
def __init__(self, args, data, train_idx, input_drop=0.0, att_dropout=0.0, alpha=0.5, n_layers_1=2, n_layers_2=2, act="relu", pre_process=False, residual=False,pre_dropout=False,bns=False):
super(NARS_JK_GAMLP, self).__init__(args, data, train_idx)
num_hops = self.num_layers + 1
nfeat = args.num_feats
hidden = self.dim_hidden
nclass = args.num_classes
dropout = args.dropout
self.aggregator = WeightedAggregator(num_feats, nfeat, num_hops)
self.model = JK_GAMLP(nfeat, hidden, nclass, num_hops, dropout, input_drop, attn_drop,
alpha, n_layers_1, n_layers_2, pre_process, residual,pre_dropout,bns)
def forward(self, feats_dict, label_emb):
feats = self.aggregator(feats_dict)
out1 = self.model(feats, label_emb)
return out1
class NARS_R_GAMLP(PrecomputingBase):
# def __init__(self, nfeat, hidden, nclass, num_hops, num_feats, alpha, n_layers_1, n_layers_2, n_layers_3, act="relu", dropout=0.5, input_drop=0.0, attn_drop=0.0, label_drop=0.0, pre_process=False, residual=False,pre_dropout=False,bns=False):
# super(NARS_R_GAMLP, self).__init__()
def __init__(self, args, data, train_idx, input_drop=0.0, att_dropout=0.0, alpha=0.5, n_layers_1=2, n_layers_2=2, act="relu", pre_process=False, residual=False,pre_dropout=False,bns=False):
super(NARS_R_GAMLP, self).__init__(args, data, train_idx)
num_hops = self.num_layers + 1
nfeat = args.num_feats
hidden = self.dim_hidden
nclass = args.num_classes
dropout = args.dropout
self.aggregator = WeightedAggregator(num_feats, nfeat, num_hops)
self.model = R_GAMLP(nfeat, hidden, nclass, num_hops, dropout, input_drop,
attn_drop, alpha, n_layers_1, n_layers_2, pre_process, residual,pre_dropout,bns)
def forward(self, feats_dict, label_emb):
feats = self.aggregator(feats_dict)
out1 = self.model(feats, label_emb)
return out1
class NARS_JK_GAMLP_RLU(PrecomputingBase):
# def __init__(self, nfeat, hidden, nclass, num_hops, num_feats, alpha, n_layers_1, n_layers_2, n_layers_3, act="relu", dropout=0.5, input_drop=0.0, attn_drop=0.0, label_drop=0.0, pre_process=False, residual=False,pre_dropout=False,bns=False):
# super(NARS_JK_GAMLP_RLU, self).__init__()
def __init__(self, args, data, train_idx, input_drop=0.0, att_dropout=0.0, label_drop=0.0, alpha=0.5, n_layers_1=2, n_layers_2=2, n_layers_3=2, act="relu", pre_process=False, residual=False,pre_dropout=False,bns=False):
super(NARS_JK_GAMLP_RLU, self).__init__(args, data, train_idx)
num_hops = self.num_layers + 1
nfeat = args.num_feats
hidden = self.dim_hidden
nclass = args.num_classes
dropout = args.dropout
self.aggregator = WeightedAggregator(num_feats, nfeat, num_hops)
self.model = JK_GAMLP_RLU(nfeat, hidden, nclass, num_hops, dropout, input_drop, attn_drop,
label_drop, alpha, n_layers_1, n_layers_2, n_layers_3, act, pre_process, residual,pre_dropout, bns)
def forward(self, feats_dict, label_emb):
feats = self.aggregator(feats_dict)
out1 = self.model(feats, label_emb)
return out1
class NARS_R_GAMLP_RLU(PrecomputingBase):
# def __init__(self, nfeat, hidden, nclass, num_hops, num_feats, alpha, n_layers_1, n_layers_2, n_layers_3, act="relu", dropout=0.5, input_drop=0.0, attn_drop=0.0, label_drop=0.0, pre_process=False, residual=False,pre_dropout=False,bns=False):
# super(NARS_R_GAMLP_RLU, self).__init__()
def __init__(self, args, data, train_idx, input_drop=0.0, att_dropout=0.0, label_drop=0.0, alpha=0.5, n_layers_1=2, n_layers_2=2, n_layers_3=2, act="relu", pre_process=False, residual=False,pre_dropout=False,bns=False):
super(NARS_R_GAMLP_RLU, self).__init__(args, data, train_idx)
num_hops = self.num_layers + 1
nfeat = args.num_feats
hidden = self.dim_hidden
nclass = args.num_classes
dropout = args.dropout
self.aggregator = WeightedAggregator(num_feats, nfeat, num_hops)
self.model = R_GAMLP_RLU(nfeat, hidden, nclass, num_hops, dropout, input_drop, attn_drop,
label_drop, alpha, n_layers_1, n_layers_2, n_layers_3, act, pre_process, residual,pre_dropout,bns)
def forward(self, feats_dict, label_emb):
feats = self.aggregator(feats_dict)
out1 = self.model(feats, label_emb)
return out1
| 44.239531 | 247 | 0.623339 | 3,675 | 26,411 | 4.212245 | 0.046259 | 0.033915 | 0.01447 | 0.01137 | 0.902972 | 0.896059 | 0.870866 | 0.848837 | 0.839729 | 0.816602 | 0 | 0.017797 | 0.263905 | 26,411 | 596 | 248 | 44.313758 | 0.778458 | 0.072962 | 0 | 0.808511 | 0 | 0 | 0.005724 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.065764 | false | 0 | 0.023211 | 0 | 0.139265 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
60514d8efc76c69a64c4021dcae6403619f0c7e6 | 12,593 | py | Python | fliscopt/ga.py | qwe123coder/fliscopt | 423aa694a2d45e04d98672fe305bd129019a9ee2 | [
"MIT"
] | null | null | null | fliscopt/ga.py | qwe123coder/fliscopt | 423aa694a2d45e04d98672fe305bd129019a9ee2 | [
"MIT"
] | null | null | null | fliscopt/ga.py | qwe123coder/fliscopt | 423aa694a2d45e04d98672fe305bd129019a9ee2 | [
"MIT"
] | null | null | null | import sys
import os
import time
sys.path.append(os.getcwd())
from .utils.util import plot_scores, print_schedule, read_file
from .base_algorithm import FlightAlgorithm,random
from .rs import RandomSearch
from .utils.ga_utils import crossover, mutation
from .fitness import *
import random
import heapq
from abc import ABCMeta, abstractmethod
class BaseGA(FlightAlgorithm, metaclass=ABCMeta):
def __init__(self, domain=domain['domain'], fitness_function=fitness_function, seed=random.randint(10, 100), seed_init=True, init=[],max_time=100,
population_size=100, step=1, probability_mutation=0.2, probability_crossover=0.2, elitism=0.2,
number_generations=500, search=False) -> None:
super().__init__(domain, fitness_function, seed, seed_init, init,max_time)
self.population_size = population_size
self.step = step
self.probability_mutation = probability_mutation
self.probability_crossover = probability_crossover
self.elitism = elitism
self.number_generations = number_generations
self.search = search
def get_base(self) -> str:
return self.__class__.__base__.__name__
def get_name(self) -> str:
pass
@abstractmethod
def run(self,domain,fitness_function,seed) -> tuple:
pass
class GA(BaseGA):
def __init__(self, domain=domain['domain'], fitness_function=fitness_function, seed=random.randint(10, 100), seed_init=True, init=[],max_time=100,
population_size=100, step=1, probability_mutation=0.2, elitism=0.2,
number_generations=500, search=False) -> None:
super().__init__(domain, fitness_function, seed, seed_init, init,max_time, population_size, step, probability_mutation,
0, elitism, number_generations, search)
def run(self,domain,fitness_function,seed) -> tuple:
self.__init__(domain, fitness_function, seed, self.seed_init, self.init,self.max_time)
population = []
scores = []
nfe = 0
for i in range(self.population_size):
if self.search == True:
solution, b_c, sc, r_nfe, s = RandomSearch(
).run(self.domain, self.fitness_function, self.seed)
nfe += r_nfe
if len(self.init) > 0:
solution = self.init
else:
solution = [self.r_init.randint(self.domain[i][0], self.domain[i][1])
for i in range(len(self.domain))]
population.append(solution)
number_elitism = int(self.elitism * self.population_size)
self.start_time=time.time()
for i in range(self.number_generations):
if not self.fitness_function.__name__ == 'fitness_function':
costs = [(self.fitness_function(individual), individual)
for individual in population]
else:
costs = [(self.fitness_function(individual, 'FCO'), individual)
for individual in population]
nfe += 1
# costs.sort()
heapq.heapify(costs)
ordered_individuals = [individual for (cost, individual) in costs]
population = ordered_individuals[0:number_elitism]
if not self.fitness_function.__name__ == 'fitness_function':
scores.append(self.fitness_function(population[0]))
else:
scores.append(self.fitness_function(population[0], 'FCO'))
# scores.append(fitness_function(population[0], 'FCO'))
nfe += 1
while len(population) < self.population_size:
if random.random() < self.probability_mutation:
m = random.randint(0, number_elitism)
population.append(
mutation(self.domain, self.step, ordered_individuals[m]))
else:
i1 = random.randint(0, number_elitism)
i2 = random.randint(0, number_elitism)
population.append(
crossover(self.domain, ordered_individuals[i1], ordered_individuals[i2]))
if time.time()-self.start_time>self.max_time:
return costs[0][1], costs[0][0], scores, nfe, self.seed
return costs[0][1], costs[0][0], scores, nfe, self.seed
class ReverseGA(BaseGA):
def __init__(self, domain=domain['domain'], fitness_function=fitness_function, seed=random.randint(10, 100), seed_init=True, init=[],max_time=100,
population_size=100, step=1, probability_crossover=0.2, elitism=0.2,
number_generations=500, search=False) -> None:
super().__init__(domain, fitness_function, seed, seed_init, init,max_time, population_size, step, 0.0,
probability_crossover, elitism, number_generations, search)
def run(self,domain,fitness_function,seed) -> tuple:
self.__init__(domain, fitness_function, seed, self.seed_init, self.init,self.max_time)
population = []
scores = []
nfe = 0
for i in range(self.population_size):
if self.search == True:
solution, b_c, sc, r_nfe, s = RandomSearch(
).run(self.domain, self.fitness_function, self.seed)
nfe += r_nfe
if len(self.init) > 0:
solution = self.init
else:
solution = [self.r_init.randint(self.domain[i][0], self.domain[i][1])
for i in range(len(self.domain))]
population.append(solution)
number_elitism = int(self.elitism * self.population_size)
self.start_time=time.time()
for i in range(self.number_generations):
if not self.fitness_function.__name__ == 'fitness_function':
costs = [(self.fitness_function(individual), individual)
for individual in population]
else:
costs = [(self.fitness_function(individual, 'FCO'), individual)
for individual in population]
nfe += 1
# costs.sort()
heapq.heapify(costs)
ordered_individuals = [individual for (cost, individual) in costs]
population = ordered_individuals[0:number_elitism]
if not self.fitness_function.__name__ == 'fitness_function':
scores.append(self.fitness_function(population[0]))
else:
scores.append(self.fitness_function(population[0], 'FCO'))
# scores.append(fitness_function(population[0], 'FCO'))
nfe += 1
while len(population) < self.population_size:
if random.random() < self.probability_crossover:
i1 = random.randint(0, number_elitism)
i2 = random.randint(0, number_elitism)
population.append(
crossover(self.domain, ordered_individuals[i1], ordered_individuals[i2]))
else:
m = random.randint(0, number_elitism)
population.append(
mutation(self.domain, self.step, ordered_individuals[m]))
if time.time()-self.start_time>self.max_time:
return costs[0][1], costs[0][0], scores, nfe, self.seed
return costs[0][1], costs[0][0], scores, nfe, self.seed
class GAReversals(BaseGA):
def __init__(self, domain=domain['domain'], fitness_function=fitness_function, seed=random.randint(10, 100), seed_init=True, init=[],max_time=100,
population_size=100, step=1, probability_mutation=0.2, elitism=0.2,
number_generations=500, search=False,n_k=250, step_length=100,) -> None:
super().__init__(domain, fitness_function, seed, seed_init, init,max_time, population_size, step, probability_mutation,
0.0, elitism, number_generations, search)
self.n_k = n_k
self.step_length = step_length
def run(self,domain,fitness_function,seed) -> tuple:
self.__init__(domain, fitness_function, seed, self.seed_init, self.init,self.max_time)
population = []
scores = []
nfe = 0
rev = 0
for i in range(self.population_size):
if self.search == True:
solution, b_c, sc, r_nfe, s = RandomSearch(
).run(self.domain, self.fitness_function,self.seed)
nfe += r_nfe
if len(self.init) > 0:
solution = self.init
else:
solution = [self.r_init.randint(self.domain[i][0], self.domain[i][1])
for i in range(len(self.domain))]
population.append(solution)
number_elitism = int(self.elitism * self.population_size)
self.start_time=time.time()
for i in range(self.number_generations):
if not self.fitness_function.__name__ == 'fitness_function':
costs = [(self.fitness_function(individual), individual)
for individual in population]
else:
costs = [(self.fitness_function(individual, 'FCO'), individual)
for individual in population]
nfe += 1
if i % self.n_k == 0 and i != 0:
if self.step_length == 1:
costs.sort(reverse=True)
rev += 1
else:
rev += 1
for _ in range(self.step_length - 1):
costs.sort(reverse=True)
ordered_individuals = [
individual for (cost, individual) in costs]
population = ordered_individuals[0:number_elitism]
if not self.fitness_function.__name__ == 'fitness_function':
scores.append(self.fitness_function(population[0]))
else:
scores.append(self.fitness_function(population[0], 'FCO'))
nfe += 1
while len(population) < self.population_size:
if random.random() < self.probability_mutation:
i1 = random.randint(0, number_elitism)
i2 = random.randint(0, number_elitism)
population.append(
crossover(self.domain, ordered_individuals[i1], ordered_individuals[i2]))
else:
m = random.randint(0, number_elitism)
population.append(
mutation(self.domain, self.step, ordered_individuals[m]))
print(rev) # To print the number of reversals
else:
heapq.heapify(costs)
ordered_individuals = [individual for (cost, individual) in costs]
population = ordered_individuals[0:number_elitism]
if not self.fitness_function.__name__ == 'fitness_function':
scores.append(self.fitness_function(population[0]))
else:
scores.append(self.fitness_function(population[0], 'FCO'))
nfe += 1
while len(population) < self.population_size:
if random.random() < self.probability_mutation:
i1 = random.randint(0, number_elitism)
i2 = random.randint(0, number_elitism)
population.append(
crossover(self.domain, ordered_individuals[i1], ordered_individuals[i2]))
else:
m = random.randint(0, number_elitism)
population.append(
mutation(self.domain, self.step, ordered_individuals[m]))
if time.time()-self.start_time>self.max_time:
return costs[0][1], costs[0][0], scores, nfe, self.seed
return costs[0][1], costs[0][0], scores, nfe, self.seed
if __name__ == '__main__':
read_file('flights.txt')
sga = ReverseGA(seed_init=False,search=True)
soln, cost, scores, nfe, seed = sga.run(domain=domain['domain'], fitness_function=fitness_function,
seed=5)
plot_scores(scores, sga.get_base(),fname='flight_scheduling', save_fig=False)
print_schedule(soln, 'FCO')
| 46.988806 | 150 | 0.574367 | 1,374 | 12,593 | 5.046579 | 0.086608 | 0.116816 | 0.065763 | 0.034612 | 0.860831 | 0.85636 | 0.85636 | 0.85636 | 0.832997 | 0.832997 | 0 | 0.021372 | 0.323751 | 12,593 | 267 | 151 | 47.164794 | 0.792861 | 0.013182 | 0 | 0.77193 | 0 | 0 | 0.016263 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04386 | false | 0.008772 | 0.048246 | 0.004386 | 0.140351 | 0.013158 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6064094304b6baeabf61286daa02f2551a2125d7 | 2,760 | py | Python | tests/test_couple_rulesets.py | davidjohnoliver/IncomeForecast | f638a16a3bccb576f7977f9ea3fc08047c96ecce | [
"MIT"
] | null | null | null | tests/test_couple_rulesets.py | davidjohnoliver/IncomeForecast | f638a16a3bccb576f7977f9ea3fc08047c96ecce | [
"MIT"
] | null | null | null | tests/test_couple_rulesets.py | davidjohnoliver/IncomeForecast | f638a16a3bccb576f7977f9ea3fc08047c96ecce | [
"MIT"
] | null | null | null | import sim
import couple_rulesets
import solve
def test_alice_runs():
simulation = sim.Dual_Income_Simulation()
simulation.partner1_parameters.age_at_retirement = 60
simulation.partner1_parameters.year_of_birth = 1990
simulation.partner1_parameters.age_at_death = 80
simulation.partner1_parameters.initial_salary = 40000
simulation.partner1_parameters.initial_savings_rrsp = 5000
simulation.partner1_parameters.initial_savings_tfsa = 600
simulation.partner2_parameters.age_at_retirement = 64
simulation.partner2_parameters.year_of_birth = 1989
simulation.partner2_parameters.age_at_death = 75
simulation.partner2_parameters.initial_salary = 60000
simulation.partner2_parameters.initial_savings_rrsp = 2000
simulation.partner2_parameters.initial_savings_tfsa = 800
simulation.initial_year = 2025
simulation.final_savings = 10000
simulation.set_solver(solve.binary_solver)
simulation.set_ruleset(
couple_rulesets.alice(0.06, 80000, 0.04, 75000, 60000, 0.05, 0.1, 0.1)
)
simulation.run()
assert 46 == len(simulation.all_deltas)
assert 46 == len(simulation.all_funds)
def test_bad_seed_runs():
simulation = sim.Dual_Income_Simulation()
simulation.partner1_parameters.age_at_retirement = 60
simulation.partner1_parameters.year_of_birth = 1990
simulation.partner1_parameters.age_at_death = 80
simulation.partner1_parameters.initial_salary = 40000
simulation.partner1_parameters.initial_savings_rrsp = 5000
simulation.partner1_parameters.initial_savings_tfsa = 600
simulation.partner2_parameters.age_at_retirement = 64
simulation.partner2_parameters.year_of_birth = 1989
simulation.partner2_parameters.age_at_death = 75
simulation.partner2_parameters.initial_salary = 60000
simulation.partner2_parameters.initial_savings_rrsp = 2000
simulation.partner2_parameters.initial_savings_tfsa = 800
simulation.initial_year = 2025
simulation.final_savings = 10000
optimize = solve.Optimizing_Solver(solve.binary_solver, should_invert=True)
simulation.set_solver(optimize.solve)
simulation.set_ruleset(
couple_rulesets.bad_seed(
0.06,
80000,
0.04,
75000,
simulation.initial_year,
0.5,
0.5,
0.5,
0.5,
0.5,
simulation.partner1_parameters.year_of_retirement,
simulation.partner2_parameters.year_of_retirement,
simulation.final_year,
0,
0.05,
0.05,
optimize,
)
)
simulation.run()
assert 46 == len(simulation.all_deltas)
assert 46 == len(simulation.all_funds)
| 30.32967 | 79 | 0.721739 | 321 | 2,760 | 5.878505 | 0.211838 | 0.124006 | 0.192899 | 0.111288 | 0.859565 | 0.771595 | 0.754637 | 0.754637 | 0.749338 | 0.749338 | 0 | 0.08841 | 0.209058 | 2,760 | 90 | 80 | 30.666667 | 0.775996 | 0 | 0 | 0.661765 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.058824 | 1 | 0.029412 | false | 0 | 0.044118 | 0 | 0.073529 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6070e41386f497a5c9397db44aac7b32d3eb297e | 21,354 | py | Python | tests/ci/unit_tests/pipeline_config/test_frames.py | Food-X-Technologies/foodx_devops_tools | 57d1bf1304d9c9a386eaffa427f9eb36c410c350 | [
"MIT"
] | 3 | 2021-06-23T20:53:43.000Z | 2022-01-26T14:19:43.000Z | tests/ci/unit_tests/pipeline_config/test_frames.py | Food-X-Technologies/foodx_devops_tools | 57d1bf1304d9c9a386eaffa427f9eb36c410c350 | [
"MIT"
] | 33 | 2021-08-09T15:44:51.000Z | 2022-03-03T18:28:02.000Z | tests/ci/unit_tests/pipeline_config/test_frames.py | Food-X-Technologies/foodx_devops_tools | 57d1bf1304d9c9a386eaffa427f9eb36c410c350 | [
"MIT"
] | 1 | 2021-06-23T20:53:52.000Z | 2021-06-23T20:53:52.000Z | # Copyright (c) 2021 Food-X Technologies
#
# This file is part of foodx_devops_tools.
#
# You should have received a copy of the MIT License along with foodx_devops_tools.
# If not, see <https://opensource.org/licenses/MIT>.
import pathlib
import pytest
from foodx_devops_tools.pipeline_config import StructuredName, load_frames
from foodx_devops_tools.pipeline_config._structure import FrameFile
from foodx_devops_tools.pipeline_config.exceptions import FrameDefinitionsError
@pytest.fixture
def apply_applications_test(apply_pipeline_config_test):
def _apply(mock_content: str):
result = apply_pipeline_config_test(mock_content, load_frames)
return result
return _apply
def test_single_default(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: a1_group
a2:
steps:
- name: a2l1
mode: Complete
resource_group: a2_group
folder: some/path
"""
result = apply_applications_test(file_text)
result_frames = result.frames
assert len(result_frames.frames) == 1
assert len(result_frames.frames["f1"].applications["a1"].steps) == 1
assert (
result_frames.frames["f1"].applications["a1"].steps[0].resource_group
== "a1_group"
)
assert (
not result_frames.frames["f1"]
.applications["a1"]
.steps[0]
.static_secrets
)
assert len(result_frames.frames["f1"].applications["a2"].steps) == 1
assert (
result_frames.frames["f1"].applications["a2"].steps[0].resource_group
== "a2_group"
)
assert result_frames.frames["f1"].folder == pathlib.Path("some/path")
assert result_frames.frames["f1"].triggers is None
assert (
result_frames.frames["f1"].applications["a2"].steps[0].arm_file is None
)
assert (
result_frames.frames["f1"].applications["a2"].steps[0].puff_file is None
)
assert result_frames.frames["f1"].applications["a2"].steps[0].name == "a2l1"
assert result_frames.triggers is None
def test_deploy_delay(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: a1_group
- delay_seconds: 23
folder: some/path
"""
result = apply_applications_test(file_text)
result_frames = result.frames
assert len(result_frames.frames) == 1
assert len(result_frames.frames["f1"].applications["a1"].steps) == 2
assert (
result_frames.frames["f1"].applications["a1"].steps[0].resource_group
== "a1_group"
)
assert (
not result_frames.frames["f1"]
.applications["a1"]
.steps[0]
.static_secrets
)
assert not hasattr(
result_frames.frames["f1"].applications["a1"].steps[0], "delay_seconds"
)
assert (
result_frames.frames["f1"].applications["a1"].steps[1].delay_seconds
== 23
)
assert not hasattr(
result_frames.frames["f1"].applications["a1"].steps[1], "resource_group"
)
def test_enable_static_secrets(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: a1_group
static_secrets: true
a2:
steps:
- name: a2l1
mode: Complete
resource_group: a2_group
folder: some/path
"""
result = apply_applications_test(file_text)
result_frames = result.frames
assert len(result_frames.frames) == 1
assert len(result_frames.frames["f1"].applications["a1"].steps) == 1
assert (
result_frames.frames["f1"].applications["a1"].steps[0].resource_group
== "a1_group"
)
assert result_frames.frames["f1"].applications["a1"].steps[0].static_secrets
assert (
not result_frames.frames["f1"]
.applications["a2"]
.steps[0]
.static_secrets
)
assert len(result_frames.frames["f1"].applications["a2"].steps) == 1
assert (
result_frames.frames["f1"].applications["a2"].steps[0].resource_group
== "a2_group"
)
assert result_frames.frames["f1"].folder == pathlib.Path("some/path")
assert result_frames.frames["f1"].triggers is None
assert (
result_frames.frames["f1"].applications["a2"].steps[0].arm_file is None
)
assert (
result_frames.frames["f1"].applications["a2"].steps[0].puff_file is None
)
assert result_frames.frames["f1"].applications["a2"].steps[0].name == "a2l1"
assert result_frames.triggers is None
def test_arm_optional(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: a1_group
a2:
steps:
- name: a2l1
arm_file: something.json
mode: Complete
resource_group: a2_group
folder: some/path
"""
result = apply_applications_test(file_text)
result_frames = result.frames
assert result_frames.frames["f1"].applications["a2"].steps[
0
].arm_file == pathlib.Path("something.json")
def test_global_path_triggers_optional(apply_applications_test):
file_text = """---
frames:
triggers:
paths:
- "some/glob/**"
- "*/stuff/*"
frames:
f1:
applications:
a1:
steps:
- name: a1l1
resource_group: a1_group
mode: Incremental
a2:
steps:
- name: a2l1
resource_group: a2_group
mode: Complete
arm_file: something.json
folder: some/path
"""
result = apply_applications_test(file_text)
result_frames = result.frames
assert len(result_frames.triggers.paths) == 2
assert result_frames.triggers.paths[0] == "some/glob/**"
assert result_frames.triggers.paths[1] == "*/stuff/*"
def test_frame_path_triggers_optional(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
resource_group: a1_group
mode: Incremental
a2:
steps:
- name: a2l1
resource_group: a2_group
mode: Complete
arm_file: something.json
folder: some/path
triggers:
paths:
- "some/glob/**"
- "*/stuff/*"
"""
result = apply_applications_test(file_text)
result_frames = result.frames
assert len(result_frames.frames["f1"].triggers.paths) == 2
assert result_frames.frames["f1"].triggers.paths[0] == "some/glob/**"
assert result_frames.frames["f1"].triggers.paths[1] == "*/stuff/*"
def test_puff_optional(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
resource_group: a1_group
mode: Incremental
a2:
steps:
- name: a2l1
resource_group: a2_group
mode: Complete
puff_file: something.yml
folder: some/path
"""
result = apply_applications_test(file_text)
result_frames = result.frames
assert result_frames.frames["f1"].applications["a2"].steps[
0
].puff_file == pathlib.Path("something.yml")
def test_frames_sequenced(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
resource_group: f1a1
mode: Complete
a2:
steps:
- name: a2l1
resource_group: f1a2
mode: Incremental
folder: some/f1-path
f2:
applications:
a3:
steps:
- name: a3l1
resource_group: f2a3
mode: Complete
a4:
steps:
- name: a4l1
resource_group: f2a4
mode: Incremental
depends_on:
- f1
folder: some/f2-path
"""
result = apply_applications_test(file_text)
result_frames = result.frames
assert len(result_frames.frames) == 2
assert "f1" in result_frames.frames
assert "f2" in result_frames.frames
assert result_frames.frames["f2"].depends_on[0] == "f1"
def test_applications_sequenced(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
depends_on:
- a2
steps:
- name: a1l1
resource_group: f1a1
mode: Complete
a2:
steps:
- name: a2l1
resource_group: f1a2
mode: Incremental
folder: some/f1-path
"""
result = apply_applications_test(file_text)
result_frames = result.frames
assert len(result_frames.frames["f1"].applications["a1"].depends_on) == 1
assert result_frames.frames["f1"].applications["a1"].depends_on[0] == "a2"
def test_multiple_unsequenced(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
resource_group: f1a1
mode: Complete
a2:
steps:
- name: a2l1
resource_group: f1a2
mode: Complete
folder: some/f1-path
f3:
applications:
a5:
steps:
- name: a5l1
resource_group: f3a5
mode: Complete
a6:
steps:
- name: a6l1
resource_group: f3a6
mode: Complete
folder: some/f3-path
f2:
applications:
a3:
steps:
- name: a3l1
resource_group: f2a3
mode: Complete
a4:
steps:
- name: a4l1
resource_group: f2a4
mode: Complete
folder: some/f2-path
"""
result = apply_applications_test(file_text)
result_frames = result.frames
assert len(result_frames.frames) == 3
assert all([x in result_frames.frames for x in ["f1", "f2", "f3"]])
def test_none_raises(apply_applications_test):
file_text = """---
"""
with pytest.raises(
FrameDefinitionsError, match=r"Error validating frames definition"
):
apply_applications_test(file_text)
def test_empty_raises(apply_applications_test):
file_text = """---
frames:
"""
with pytest.raises(
FrameDefinitionsError, match=r"Error validating frames definition"
):
apply_applications_test(file_text)
def test_bad_dependency_raises(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
- a1
- a2
folder: some/f1-path
f2:
applications:
- a3
- a4
depends_on:
- bad_value
folder: some/f2-path
"""
with pytest.raises(
FrameDefinitionsError, match=r"Error validating frames definition"
):
apply_applications_test(file_text)
def test_duplicate_step_names_raises(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- arm_file: something.json
mode: Incremental
name: same_name
resource_group: a1_group
- arm_file: something_else.json
mode: Incremental
name: same_name
resource_group: other_group
folder: some/f2-path
"""
with pytest.raises(
FrameDefinitionsError, match=r"Application step names must be unique"
):
apply_applications_test(file_text)
def test_missing_name_raises(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: a1_group
a2:
steps:
- mode: Complete
resource_group: a2_group
folder: some/path
"""
with pytest.raises(
FrameDefinitionsError, match=r"Error validating frames definition"
):
apply_applications_test(file_text)
def test_missing_mode_raises(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: a1_group
a2:
steps:
- name: a2l1
resource_group: a2_group
folder: some/path
"""
with pytest.raises(
FrameDefinitionsError, match=r"Error validating frames definition"
):
apply_applications_test(file_text)
def test_external_application_dependency_raises(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1s1
mode: Incremental
resource_group: a1_group
depends_on:
- a2
folder: f1/path
f2:
applications:
a2:
steps:
- name: a2s1
mode: Incremental
resource_group: a2_group
folder: f2/path
"""
with pytest.raises(
FrameDefinitionsError, match=r"Error validating frames definition"
):
apply_applications_test(file_text)
def test_frame_application_dependency_raises(apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1s1
mode: Incremental
resource_group: a1_group
depends_on:
# application dependency can only depend on other peer applications
- f2
folder: f1/path
f2:
applications:
a2:
steps:
- name: a2s1
mode: Incremental
resource_group: a2_group
folder: f2/path
"""
with pytest.raises(
FrameDefinitionsError, match=r"Error validating frames definition"
):
apply_applications_test(file_text)
class TestArmFilePaths:
def test_default(self, apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: rgn
folder: some/path
"""
under_test = apply_applications_test(file_text)
result = under_test.frames.arm_file_paths()
assert result == {
StructuredName(["f1", "a1", "a1l1"]): FrameFile(
dir=pathlib.Path("some/path/"),
file=None,
),
}
def test_explicit(self, apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
arm_file: arm_file.json
resource_group: rgn
folder: some/path
"""
under_test = apply_applications_test(file_text)
result = under_test.frames.arm_file_paths()
assert result == {
StructuredName(["f1", "a1", "a1l1"]): FrameFile(
dir=pathlib.Path("some/path/"),
file=pathlib.Path("arm_file.json"),
)
}
def test_mixed(self, apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: rgn
a2:
steps:
- name: a2l1
mode: Incremental
arm_file: arm_file.json
resource_group: rgn
folder: some/path
"""
under_test = apply_applications_test(file_text)
result = under_test.frames.arm_file_paths()
assert result == {
StructuredName(["f1", "a1", "a1l1"]): FrameFile(
dir=pathlib.Path("some/path/"), file=None
),
StructuredName(["f1", "a2", "a2l1"]): FrameFile(
dir=pathlib.Path("some/path/"),
file=pathlib.Path("arm_file.json"),
),
}
def test_subpath(self, apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
arm_file: other/path/arm_file.json
resource_group: rgn
folder: some/path
"""
under_test = apply_applications_test(file_text)
result = under_test.frames.arm_file_paths()
assert result == {
StructuredName(["f1", "a1", "a1l1"]): FrameFile(
dir=pathlib.Path("some/path/"),
file=pathlib.Path("other/path/arm_file.json"),
)
}
class TestPuffFilePaths:
def test_default(self, apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: rgn
folder: some/path
"""
under_test = apply_applications_test(file_text)
result = under_test.frames.puff_file_paths()
assert result == {
StructuredName(["f1", "a1", "a1l1"]): FrameFile(
dir=pathlib.Path("some/path/"),
file=None,
),
}
def test_explicit(self, apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
puff_file: puff_file.yml
resource_group: rgn
folder: some/path
"""
under_test = apply_applications_test(file_text)
result = under_test.frames.puff_file_paths()
assert result == {
StructuredName(["f1", "a1", "a1l1"]): FrameFile(
dir=pathlib.Path("some/path/"),
file=pathlib.Path("puff_file.yml"),
)
}
def test_mixed(self, apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: rgn
a2:
steps:
- name: a2l1
mode: Incremental
puff_file: puff/file.yml
resource_group: rgn
folder: some/path
"""
under_test = apply_applications_test(file_text)
result = under_test.frames.puff_file_paths()
assert result == {
StructuredName(["f1", "a1", "a1l1"]): FrameFile(
dir=pathlib.Path("some/path/"),
file=None,
),
StructuredName(["f1", "a2", "a2l1"]): FrameFile(
dir=pathlib.Path("some/path/"),
file=pathlib.Path("puff/file.yml"),
),
}
def test_subpath(self, apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
puff_file: other/path/puff_file.yml
resource_group: rgn
folder: some/path
"""
under_test = apply_applications_test(file_text)
result = under_test.frames.puff_file_paths()
assert result == {
StructuredName(["f1", "a1", "a1l1"]): FrameFile(
dir=pathlib.Path("some/path/"),
file=pathlib.Path("other/path/puff_file.yml"),
)
}
class TestFrameFolders:
def test_single(self, apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: rgn
folder: some/path
"""
under_test = apply_applications_test(file_text)
result = under_test.frames.frame_folders()
assert result == {
StructuredName(["f1"]): FrameFile(
dir=pathlib.Path("some/path"), file=pathlib.Path("")
)
}
def test_multiple(self, apply_applications_test):
file_text = """---
frames:
frames:
f1:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: rgn
folder: some/path
f2:
applications:
a1:
steps:
- name: a1l1
mode: Incremental
resource_group: rgn
folder: f2/path
"""
under_test = apply_applications_test(file_text)
result = under_test.frames.frame_folders()
assert result == {
StructuredName(["f1"]): FrameFile(
dir=pathlib.Path("some/path"), file=pathlib.Path("")
),
StructuredName(["f2"]): FrameFile(
dir=pathlib.Path("f2/path"), file=pathlib.Path("")
),
}
| 24.516648 | 84 | 0.564578 | 2,200 | 21,354 | 5.260909 | 0.075 | 0.070503 | 0.071367 | 0.120961 | 0.885951 | 0.868844 | 0.849577 | 0.838431 | 0.806204 | 0.799378 | 0 | 0.028464 | 0.330383 | 21,354 | 870 | 85 | 24.544828 | 0.780964 | 0.010068 | 0 | 0.838158 | 0 | 0 | 0.461739 | 0.004543 | 0 | 0 | 0 | 0 | 0.076316 | 1 | 0.039474 | false | 0 | 0.006579 | 0 | 0.052632 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
60aef8677edbdb7f887ffc2466167b8eb039ff54 | 76 | py | Python | products.py | saykent/gitwork | 5dc0734137b617428f5a8ee25ceb826b9c5cd2b4 | [
"Apache-2.0"
] | null | null | null | products.py | saykent/gitwork | 5dc0734137b617428f5a8ee25ceb826b9c5cd2b4 | [
"Apache-2.0"
] | null | null | null | products.py | saykent/gitwork | 5dc0734137b617428f5a8ee25ceb826b9c5cd2b4 | [
"Apache-2.0"
] | null | null | null | import json
def product_list():
pass
def product_details():
pass
| 8.444444 | 22 | 0.671053 | 10 | 76 | 4.9 | 0.7 | 0.408163 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 76 | 8 | 23 | 9.5 | 0.859649 | 0 | 0 | 0.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | true | 0.4 | 0.2 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 7 |
60caf9eab601afafeb032250505904b0c3c15167 | 350 | py | Python | generated/users/admin.py | oreon/rtbp | c35f1a712bdc36c725e68a98b21105654c5f5fdc | [
"MIT"
] | 1 | 2017-12-12T17:28:16.000Z | 2017-12-12T17:28:16.000Z | generated/users/admin.py | oreon/rtbp | c35f1a712bdc36c725e68a98b21105654c5f5fdc | [
"MIT"
] | null | null | null | generated/users/admin.py | oreon/rtbp | c35f1a712bdc36c725e68a98b21105654c5f5fdc | [
"MIT"
] | null | null | null |
from django.contrib import admin
from users.models import *
from restbase.commons import CustomModelAdminMixin
class AppUserAdmin(CustomModelAdminMixin, admin.ModelAdmin):
pass
class AppRoleAdmin(CustomModelAdminMixin, admin.ModelAdmin):
pass
class GroupAdmin(CustomModelAdminMixin, admin.ModelAdmin):
pass
| 17.5 | 61 | 0.754286 | 32 | 350 | 8.25 | 0.5 | 0.295455 | 0.409091 | 0.454545 | 0.340909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.191429 | 350 | 20 | 62 | 17.5 | 0.932862 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
60d6758c0db68e5d74340d62aca55d6c88fa5dbf | 2,099 | py | Python | Project Euler/1-9/python/problem8.py | pybae/etc | ba3d6291ed5dd8e6b6ee18b186a09600def56505 | [
"MIT"
] | null | null | null | Project Euler/1-9/python/problem8.py | pybae/etc | ba3d6291ed5dd8e6b6ee18b186a09600def56505 | [
"MIT"
] | null | null | null | Project Euler/1-9/python/problem8.py | pybae/etc | ba3d6291ed5dd8e6b6ee18b186a09600def56505 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
number = """
7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450V
"""
# pylint: disable=C0103
number = number.strip()
current = 0
max_v = 0
# iterate through the number
# if the algorithm finds a 0 it resets the starting digit of the sequence
# to the digit immediately after the 0
# then, it initializes by looping through all thirteen digits
# once the current is initialized it gets the next series by dividing by its
# (previous) first number and multiplying by the new digit
# if this new digit is again 0, it resets
for i in xrange(len(number)-12):
if current == 0:
current = 1
for idx, ch in enumerate(number[i:i+13]):
if int(ch) == 0:
current = 0
i += idx
continue
else:
current *= int(ch)
max_v = max(current, max_v)
else:
if int(number[i+12]) == 0:
current = 0
else:
# scoot the series up by one
current *= int(number[i+12])
current /= int(number[i-1])
max_v = max(current, max_v)
print max_v
| 49.97619 | 1,001 | 0.777513 | 161 | 2,099 | 10.099379 | 0.453416 | 0.01476 | 0.01845 | 0.01722 | 0.02214 | 0.02214 | 0 | 0 | 0 | 0 | 0 | 0.589758 | 0.171987 | 2,099 | 41 | 1,002 | 51.195122 | 0.3458 | 0.218676 | 0 | 0.32 | 0 | 0 | 0.615715 | 0.614487 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.04 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
60e3e0a15e440dba5477b3110cfd765589d9fa67 | 4,910 | py | Python | api/metadata/migrations/0003_auto_20180705_1132.py | cad106uk/market-access-api | a357c33bbec93408b193e598a5628634126e9e99 | [
"MIT"
] | null | null | null | api/metadata/migrations/0003_auto_20180705_1132.py | cad106uk/market-access-api | a357c33bbec93408b193e598a5628634126e9e99 | [
"MIT"
] | 51 | 2018-05-31T12:16:31.000Z | 2022-03-08T09:36:48.000Z | api/metadata/migrations/0003_auto_20180705_1132.py | cad106uk/market-access-api | a357c33bbec93408b193e598a5628634126e9e99 | [
"MIT"
] | 2 | 2019-12-24T09:47:42.000Z | 2021-02-09T09:36:51.000Z | # Generated by Django 2.0.5 on 2018-07-05 11:32
from django.db import migrations
barriers_dummies_to_add = [
{
"title": "Goods Barrier type 1",
"description": "Goods Barrier type 1 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "GOODS",
},
{
"title": "Goods Barrier type 2",
"description": "Goods Barrier type 2 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "GOODS",
},
{
"title": "Goods Barrier type 3",
"description": "Goods Barrier type 3 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "GOODS",
},
{
"title": "Services Barrier type 1",
"description": "Services Barrier type 1 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "SERVICES",
},
{
"title": "Services Barrier type 2",
"description": "Services Barrier type 2 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "SERVICES",
},
{
"title": "Services Barrier type 3",
"description": "Services Barrier type 3 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "SERVICES",
},
{
"title": "Services Barrier type 4",
"description": "Services Barrier type 4 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "SERVICES",
},
{
"title": "Services Barrier type 5",
"description": "Services Barrier type 5 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "SERVICES",
},
{
"title": "Services Barrier type 6",
"description": "Services Barrier type 6 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "SERVICES",
},
{
"title": "Services Barrier type 7",
"description": "Services Barrier type 7 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "SERVICES",
},
{
"title": "Services Barrier type 8",
"description": "Services Barrier type 8 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "SERVICES",
},
{
"title": "Services Barrier type 9",
"description": "Services Barrier type 9 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse accumsan ultrices lectus sodales fermentum. Proin hendrerit turpis sed elit interdum rutrum. Pellentesque congue imperdiet felis.",
"category": "SERVICES",
},
]
def add_dummy_barrier_types(apps, schema_editor):
BarrierType = apps.get_model("metadata", "BarrierType")
for item in barriers_dummies_to_add:
try:
barrier_type = BarrierType.objects.get(title=item["title"])
barrier_type.description = item["description"]
barrier_type.category = item["category"]
barrier_type.save()
except BarrierType.DoesNotExist:
BarrierType(
title=item["title"],
description=item["description"],
category=item["category"],
).save()
class Migration(migrations.Migration):
dependencies = [("metadata", "0002_auto_20180704_1647")]
operations = [migrations.RunPython(add_dummy_barrier_types)]
| 53.956044 | 248 | 0.699796 | 539 | 4,910 | 6.335807 | 0.157699 | 0.09019 | 0.100146 | 0.06325 | 0.738507 | 0.738507 | 0.738507 | 0.738507 | 0.738507 | 0.738507 | 0 | 0.014312 | 0.217312 | 4,910 | 90 | 249 | 54.555556 | 0.874317 | 0.009165 | 0 | 0.15 | 1 | 0.15 | 0.698129 | 0.00473 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0125 | false | 0 | 0.0125 | 0 | 0.0625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
60fb33d8896a2f3c5d690cc0f49ecf851f032caf | 296 | py | Python | database/__init__.py | pnarvor/nephelae_mapping | 498c04a165ee9163c749a3f47bea6028494fc3f4 | [
"BSD-3-Clause"
] | null | null | null | database/__init__.py | pnarvor/nephelae_mapping | 498c04a165ee9163c749a3f47bea6028494fc3f4 | [
"BSD-3-Clause"
] | null | null | null | database/__init__.py | pnarvor/nephelae_mapping | 498c04a165ee9163c749a3f47bea6028494fc3f4 | [
"BSD-3-Clause"
] | null | null | null | from .SpatializedDatabase import SpbEntry
from .SpatializedDatabase import SpbSortableElement
from .SpatializedDatabase import SpatializedList
from .SpatializedDatabase import SpatializedDatabase
from .NephelaeDataServer import NephelaeDataServer
from .NephelaeDataServer import DatabasePlayer
| 42.285714 | 52 | 0.891892 | 24 | 296 | 11 | 0.333333 | 0.348485 | 0.439394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.087838 | 296 | 6 | 53 | 49.333333 | 0.977778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
71da263e823da8f3fe4e90e8755c066a027ceefb | 9,818 | py | Python | utilipy/tests/helper/tests/test_quantity_array.py | nstarman/utilipy | 17984942145d31126724df23500bafba18fb7516 | [
"BSD-3-Clause"
] | 2 | 2020-11-15T01:48:45.000Z | 2020-12-02T20:44:20.000Z | utilipy/tests/helper/tests/test_quantity_array.py | nstarman/astroPHD | 17984942145d31126724df23500bafba18fb7516 | [
"BSD-3-Clause"
] | 22 | 2020-09-13T17:58:24.000Z | 2022-02-04T19:05:23.000Z | utilipy/tests/helper/tests/test_quantity_array.py | nstarman/astroPHD | 17984942145d31126724df23500bafba18fb7516 | [
"BSD-3-Clause"
] | 1 | 2019-06-17T22:53:51.000Z | 2019-06-17T22:53:51.000Z | # -*- coding: utf-8 -*-
"""Tests for :mod:`~utilipy.tests.quantity_array.quantity_array`."""
__all__ = [
"Test_eltwise_quantity_isclose",
"Test_eltwise_quantity_allclose",
"Test_eltwise_assert_quantity_isclose",
"Test_eltwise_assert_quantity_allclose",
]
##############################################################################
# IMPORTS
# THIRD PARTY
import astropy.units as u
import numpy as np
import pytest
# PROJECT-SPECIFIC
from utilipy.tests.helper import quantity_array # BaseClassDependentTests,
##############################################################################
# CODE
##############################################################################
class Test_eltwise_quantity_isclose:
"""Test :func:`~utilipy.tests.helper.eltwise_quantity_isclose`."""
@property
def func(self):
"""Tested function."""
return quantity_array.eltwise_quantity_isclose
@pytest.mark.parametrize(
"a,b,rtol,atol,expected", [(1, 1 + 1e-16, 1e-15, None, True)]
)
def test_nounit(self, a, b, rtol, atol, expected):
"""Test when don't pass units."""
assert self.func(a, b, rtol=rtol, atol=atol) == expected
# /def
@pytest.mark.parametrize(
"a,b,rtol,atol,expected", [(1 * u.m, 1 * u.s, 1e-15, None, True)]
)
def test_wrongunit(self, a, b, rtol, atol, expected):
"""Test when pass wrong units."""
with pytest.raises(u.UnitsError):
assert self.func(a, b, rtol, atol)
# /def
@pytest.mark.parametrize(
"a,b,rtol,atol,expected",
[
(1 * u.m, 1 * u.m, 1e-15, None, True),
(0 * u.m, 0 * u.m, 1e-15, None, True), # basic
(1e-17 * u.m, 0 * u.m, 0, 1e-15 * u.m, True), # close to 0
(0 * u.m, 1e-17 * u.m, 0, 1e-15 * u.m, True), # close to 0
(0 * u.m, 1 * u.m, 1e-15, None, False), # fails
(0 * u.m, 1 * u.m, 2, None, True), # adjust rtol to make work
],
)
def test_scalar(self, a, b, rtol, atol, expected):
"""Test when arguments are scalars.
.. todo::
Add hypothesis tests
"""
assert self.func(a, b, rtol=rtol, atol=atol) == expected
# /def
@pytest.mark.parametrize(
"a,b,rtol,atol,expected",
[
([0, 0] * u.m, [0, 1] * u.m, 1e-15, None, [True, False]), # basic
([1e-17, 0] * u.m, [0, 0] * u.m, 0, 1e-15 * u.m, [True, True]),
([1, 0] * u.m, [0, 1e-17] * u.m, 0, 1e-15 * u.m, [False, True]),
([0, 1] * u.m, [1, 1] * u.m, 1e-15, None, [False, True]), # fails
([0, 1] * u.m, [1, 2] * u.m, 2, None, [True, True]),
( # units IN the list
[0 * u.m, 1 * u.s],
[0 * u.m, 1 * u.s],
1e-15,
None,
[True, True],
),
(
[0 * u.m, 1 * u.s],
[0 * u.m, 2 * u.s],
1e-15,
None,
[True, False],
),
],
)
def test_vector(self, a, b, rtol, atol, expected):
"""Tests when arguments are vectors.
To address [#148]
.. todo::
Add hypothesis tests
"""
assert np.all(self.func(a, b, rtol=rtol, atol=atol) == expected)
# /def
# /class
# -------------------------------------------------------------------
class Test_eltwise_quantity_allclose(Test_eltwise_quantity_isclose):
"""Test :func:`~utilipy.tests.helper.eltwise_quantity_allclose`."""
@property
def func(self):
"""Tested function."""
return quantity_array.eltwise_quantity_allclose
@pytest.mark.parametrize(
"a,b,rtol,atol,expected",
[
([0, 0] * u.m, [0, 0] * u.m, 1e-15, None, True), # basic
([1e-17, 0] * u.m, [0, 0] * u.m, 0, 1e-15 * u.m, True),
([0, 0] * u.m, [1e-17, 0] * u.m, 0, 1e-15 * u.m, True),
([0, 1] * u.m, [1, 1] * u.m, 1e-15, None, False), # fails
([0, 1] * u.m, [1, 2] * u.m, 2, None, True),
# units IN the list
([0 * u.m, 1 * u.s], [0 * u.m, 1 * u.s], 1e-15, None, True),
],
)
def test_vector(self, a, b, rtol, atol, expected):
"""Tests when arguments are vectors.
Unit tests IN the list are from [#148].
.. todo::
Add hypothesis tests
"""
assert self.func(a, b, rtol=rtol, atol=atol) == expected
# /def
@pytest.mark.parametrize(
"a,b,rtol,atol",
[
# units mismatch
([0 * u.m, 1 * u.s], [0 * u.m, 2 * u.m], 1e-15, None),
],
)
def test_vector_fails(self, a, b, rtol, atol):
"""Tests when arguments are vectors.
Unit tests IN the list are from [#148].
.. todo::
Add hypothesis tests
"""
with pytest.raises(u.UnitsError):
self.func(a, b, rtol=rtol, atol=atol)
# /def
# /class
# -------------------------------------------------------------------
class Test_eltwise_assert_quantity_isclose:
"""Test `~utilipy.tests.helper.eltwise_assert_quantity_isclose`.
test no-units, wrong units, and scalar inputs.
"""
@property
def func(self):
"""Tested function."""
return quantity_array.eltwise_assert_quantity_isclose
@pytest.mark.parametrize(
"a,b,rtol,atol",
[
(1, 1 + 1e-16, 1e-15, None),
],
)
def test_nounit(self, a, b, rtol, atol):
"""Test when don't pass units."""
self.func(a, b, rtol=rtol, atol=atol)
# /def
@pytest.mark.parametrize(
"a,b,rtol,atol",
[(1 * u.m, 1 * u.s, 1e-15, None)],
)
def test_wrongunit(self, a, b, rtol, atol):
"""Test when pass wrong units."""
with pytest.raises(u.UnitsError):
self.func(a, b, rtol, atol)
# /def
@pytest.mark.parametrize(
"a,b,rtol,atol",
[
(1 * u.m, 1 * u.m, 1e-15, None),
(0 * u.m, 0 * u.m, 1e-15, None), # basic
(1e-17 * u.m, 0 * u.m, 0, 1e-15 * u.m), # close to 0
(0 * u.m, 1e-17 * u.m, 0, 1e-15 * u.m), # close to 0
(0 * u.m, 1 * u.m, 2, None), # adjust rtol to make work
],
)
def test_scalar(self, a, b, rtol, atol):
"""Test when arguments are scalars.
.. todo::
Add hypothesis tests
"""
self.func(a, b, rtol=rtol, atol=atol)
# /def
@pytest.mark.parametrize(
"a,b,rtol,atol",
[
(0 * u.m, 1 * u.m, 1e-15, None),
], # fails
)
def test_scalar_fails(self, a, b, rtol, atol):
"""Test when arguments are scalars.
.. todo::
Add hypothesis tests
"""
with pytest.raises(AssertionError):
self.func(a, b, rtol=rtol, atol=atol)
# /def
@pytest.mark.parametrize(
"a,b,rtol,atol",
[
([0, 0] * u.m, [0, 0] * u.m, 1e-15, None),
([1e-17, 0] * u.m, [0, 0] * u.m, 0, 1e-15 * u.m),
([0, 0] * u.m, [1e-17, 0] * u.m, 0, 1e-15 * u.m),
([0, 1] * u.m, [1, 2] * u.m, 2, None),
],
)
def test_vector(self, a, b, rtol, atol):
"""Tests when arguments are vectors.
To address [#148]
.. todo::
Add hypothesis tests
"""
self.func(a, b, rtol=rtol, atol=atol)
# /def
@pytest.mark.parametrize(
"a,b,rtol,atol",
[([0, 1] * u.m, [1, 1] * u.m, 1e-15, None)],
)
def test_vector_fails(self, a, b, rtol, atol):
"""Tests when arguments are vectors.
.. todo::
Add hypothesis tests
"""
with pytest.raises(AssertionError):
self.func(a, b, rtol=rtol, atol=atol)
# /def
# /class
# -------------------------------------------------------------------
class Test_eltwise_assert_quantity_allclose(
Test_eltwise_assert_quantity_isclose
):
"""Test `~utilipy.tests.helper.eltwise_assert_quantity_isclose`.
Based on the Test_eltwise_assert_quantity_isclose tests.
"""
@property
def func(self):
"""Tested function."""
return quantity_array.eltwise_assert_quantity_allclose
@pytest.mark.parametrize(
"a,b,rtol,atol",
[
([0, 0] * u.m, [0, 0] * u.m, 1e-15, None),
([1e-17, 0] * u.m, [0, 0] * u.m, 0, 1e-15 * u.m),
([0, 0] * u.m, [1e-17, 0] * u.m, 0, 1e-15 * u.m),
([0, 1] * u.m, [1, 2] * u.m, 2, None),
# units IN the list
([0 * u.m, 1 * u.s], [0 * u.m, 1 * u.s], 1e-15, None),
],
)
def test_vector(self, a, b, rtol, atol):
"""Tests when arguments are vectors.
Unit tests IN the list are from [#148].
.. todo::
Add hypothesis tests
"""
self.func(a, b, rtol=rtol, atol=atol)
# /def
@pytest.mark.parametrize(
"a,b,rtol,atol",
[
([0, 1] * u.m, [1, 1] * u.m, 1e-15, None),
# units IN the list
([0 * u.m, 1 * u.s], [0 * u.m, 2 * u.m], 1e-15, None),
],
)
def test_vector_fails(self, a, b, rtol, atol):
"""Tests when arguments are vectors.
Unit tests IN the list are from [#148].
.. todo::
Add hypothesis tests
"""
# Raises AssertionError or UnitsError
with pytest.raises((AssertionError, u.UnitsError)):
self.func(a, b, rtol=rtol, atol=atol)
# /def
# /class
# -------------------------------------------------------------------
##############################################################################
# END
| 25.836842 | 78 | 0.458546 | 1,287 | 9,818 | 3.429681 | 0.0777 | 0.041686 | 0.031264 | 0.067966 | 0.910285 | 0.883326 | 0.861124 | 0.849116 | 0.801994 | 0.753512 | 0 | 0.047504 | 0.324608 | 9,818 | 379 | 79 | 25.905013 | 0.618157 | 0.227439 | 0 | 0.488636 | 0 | 0 | 0.052942 | 0.035688 | 0 | 0 | 0 | 0.026385 | 0.085227 | 1 | 0.102273 | false | 0 | 0.022727 | 0 | 0.170455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e0a920dc44b0e4657500d25db0e98d7ef2e7f4e4 | 25,772 | py | Python | sdk/python/pulumi_linode/node_balancer_node.py | pulumi/pulumi-linode | dcdc078ddcad836dddf6f31879f0f0488bec33b4 | [
"ECL-2.0",
"Apache-2.0"
] | 18 | 2019-05-02T21:14:37.000Z | 2021-12-19T18:37:40.000Z | sdk/python/pulumi_linode/node_balancer_node.py | pulumi/pulumi-linode | dcdc078ddcad836dddf6f31879f0f0488bec33b4 | [
"ECL-2.0",
"Apache-2.0"
] | 79 | 2019-05-01T17:52:03.000Z | 2022-03-31T15:31:56.000Z | sdk/python/pulumi_linode/node_balancer_node.py | pulumi/pulumi-linode | dcdc078ddcad836dddf6f31879f0f0488bec33b4 | [
"ECL-2.0",
"Apache-2.0"
] | 6 | 2019-05-02T00:37:23.000Z | 2021-05-04T11:10:40.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['NodeBalancerNodeArgs', 'NodeBalancerNode']
@pulumi.input_type
class NodeBalancerNodeArgs:
def __init__(__self__, *,
address: pulumi.Input[str],
config_id: pulumi.Input[int],
label: pulumi.Input[str],
nodebalancer_id: pulumi.Input[int],
mode: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a NodeBalancerNode resource.
:param pulumi.Input[str] address: The private IP Address where this backend can be reached. This must be a private IP address.
:param pulumi.Input[int] config_id: The ID of the NodeBalancerConfig to access.
:param pulumi.Input[str] label: The label of the Linode NodeBalancer Node. This is for display purposes only.
:param pulumi.Input[int] nodebalancer_id: The ID of the NodeBalancer to access.
:param pulumi.Input[str] mode: The mode this NodeBalancer should use when sending traffic to this backend. If set to `accept` this backend is accepting traffic. If set to `reject` this backend will not receive traffic. If set to `drain` this backend will not receive new traffic, but connections already pinned to it will continue to be routed to it. (`accept`, `reject`, `drain`, `backup`)
:param pulumi.Input[int] weight: Used when picking a backend to serve a request and is not pinned to a single backend yet. Nodes with a higher weight will receive more traffic. (1-255).
"""
pulumi.set(__self__, "address", address)
pulumi.set(__self__, "config_id", config_id)
pulumi.set(__self__, "label", label)
pulumi.set(__self__, "nodebalancer_id", nodebalancer_id)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def address(self) -> pulumi.Input[str]:
"""
The private IP Address where this backend can be reached. This must be a private IP address.
"""
return pulumi.get(self, "address")
@address.setter
def address(self, value: pulumi.Input[str]):
pulumi.set(self, "address", value)
@property
@pulumi.getter(name="configId")
def config_id(self) -> pulumi.Input[int]:
"""
The ID of the NodeBalancerConfig to access.
"""
return pulumi.get(self, "config_id")
@config_id.setter
def config_id(self, value: pulumi.Input[int]):
pulumi.set(self, "config_id", value)
@property
@pulumi.getter
def label(self) -> pulumi.Input[str]:
"""
The label of the Linode NodeBalancer Node. This is for display purposes only.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: pulumi.Input[str]):
pulumi.set(self, "label", value)
@property
@pulumi.getter(name="nodebalancerId")
def nodebalancer_id(self) -> pulumi.Input[int]:
"""
The ID of the NodeBalancer to access.
"""
return pulumi.get(self, "nodebalancer_id")
@nodebalancer_id.setter
def nodebalancer_id(self, value: pulumi.Input[int]):
pulumi.set(self, "nodebalancer_id", value)
@property
@pulumi.getter
def mode(self) -> Optional[pulumi.Input[str]]:
"""
The mode this NodeBalancer should use when sending traffic to this backend. If set to `accept` this backend is accepting traffic. If set to `reject` this backend will not receive traffic. If set to `drain` this backend will not receive new traffic, but connections already pinned to it will continue to be routed to it. (`accept`, `reject`, `drain`, `backup`)
"""
return pulumi.get(self, "mode")
@mode.setter
def mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mode", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[int]]:
"""
Used when picking a backend to serve a request and is not pinned to a single backend yet. Nodes with a higher weight will receive more traffic. (1-255).
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weight", value)
@pulumi.input_type
class _NodeBalancerNodeState:
def __init__(__self__, *,
address: Optional[pulumi.Input[str]] = None,
config_id: Optional[pulumi.Input[int]] = None,
label: Optional[pulumi.Input[str]] = None,
mode: Optional[pulumi.Input[str]] = None,
nodebalancer_id: Optional[pulumi.Input[int]] = None,
status: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering NodeBalancerNode resources.
:param pulumi.Input[str] address: The private IP Address where this backend can be reached. This must be a private IP address.
:param pulumi.Input[int] config_id: The ID of the NodeBalancerConfig to access.
:param pulumi.Input[str] label: The label of the Linode NodeBalancer Node. This is for display purposes only.
:param pulumi.Input[str] mode: The mode this NodeBalancer should use when sending traffic to this backend. If set to `accept` this backend is accepting traffic. If set to `reject` this backend will not receive traffic. If set to `drain` this backend will not receive new traffic, but connections already pinned to it will continue to be routed to it. (`accept`, `reject`, `drain`, `backup`)
:param pulumi.Input[int] nodebalancer_id: The ID of the NodeBalancer to access.
:param pulumi.Input[str] status: The current status of this node, based on the configured checks of its NodeBalancer Config. (unknown, UP, DOWN)
:param pulumi.Input[int] weight: Used when picking a backend to serve a request and is not pinned to a single backend yet. Nodes with a higher weight will receive more traffic. (1-255).
"""
if address is not None:
pulumi.set(__self__, "address", address)
if config_id is not None:
pulumi.set(__self__, "config_id", config_id)
if label is not None:
pulumi.set(__self__, "label", label)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if nodebalancer_id is not None:
pulumi.set(__self__, "nodebalancer_id", nodebalancer_id)
if status is not None:
pulumi.set(__self__, "status", status)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def address(self) -> Optional[pulumi.Input[str]]:
"""
The private IP Address where this backend can be reached. This must be a private IP address.
"""
return pulumi.get(self, "address")
@address.setter
def address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address", value)
@property
@pulumi.getter(name="configId")
def config_id(self) -> Optional[pulumi.Input[int]]:
"""
The ID of the NodeBalancerConfig to access.
"""
return pulumi.get(self, "config_id")
@config_id.setter
def config_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "config_id", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
The label of the Linode NodeBalancer Node. This is for display purposes only.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def mode(self) -> Optional[pulumi.Input[str]]:
"""
The mode this NodeBalancer should use when sending traffic to this backend. If set to `accept` this backend is accepting traffic. If set to `reject` this backend will not receive traffic. If set to `drain` this backend will not receive new traffic, but connections already pinned to it will continue to be routed to it. (`accept`, `reject`, `drain`, `backup`)
"""
return pulumi.get(self, "mode")
@mode.setter
def mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mode", value)
@property
@pulumi.getter(name="nodebalancerId")
def nodebalancer_id(self) -> Optional[pulumi.Input[int]]:
"""
The ID of the NodeBalancer to access.
"""
return pulumi.get(self, "nodebalancer_id")
@nodebalancer_id.setter
def nodebalancer_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "nodebalancer_id", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The current status of this node, based on the configured checks of its NodeBalancer Config. (unknown, UP, DOWN)
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[int]]:
"""
Used when picking a backend to serve a request and is not pinned to a single backend yet. Nodes with a higher weight will receive more traffic. (1-255).
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weight", value)
class NodeBalancerNode(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
address: Optional[pulumi.Input[str]] = None,
config_id: Optional[pulumi.Input[int]] = None,
label: Optional[pulumi.Input[str]] = None,
mode: Optional[pulumi.Input[str]] = None,
nodebalancer_id: Optional[pulumi.Input[int]] = None,
weight: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Provides a Linode NodeBalancer Node resource. This can be used to create, modify, and delete Linodes NodeBalancer Nodes.
For more information, see [Getting Started with NodeBalancers](https://www.linode.com/docs/platform/nodebalancer/getting-started-with-nodebalancers/) and the [Linode APIv4 docs](https://developers.linode.com/api/v4#operation/createNodeBalancerNode).
## Example Usage
The following example shows how one might use this resource to configure NodeBalancer Nodes attached to Linode instances.
```python
import pulumi
import pulumi_linode as linode
web = []
for range in [{"value": i} for i in range(0, 3)]:
web.append(linode.Instance(f"web-{range['value']}",
label=f"web-{range['value'] + 1}",
image="linode/ubuntu18.04",
region="us-east",
type="g6-standard-1",
authorized_keys=["ssh-rsa AAAA...Gw== user@example.local"],
root_pass="test",
private_ip=True))
foobar = linode.NodeBalancer("foobar",
label="mynodebalancer",
region="us-east",
client_conn_throttle=20)
foofig = linode.NodeBalancerConfig("foofig",
nodebalancer_id=foobar.id,
port=80,
protocol="http",
check="http",
check_path="/foo",
check_attempts=3,
check_timeout=30,
stickiness="http_cookie",
algorithm="source")
foonode = []
for range in [{"value": i} for i in range(0, 3)]:
foonode.append(linode.NodeBalancerNode(f"foonode-{range['value']}",
nodebalancer_id=foobar.id,
config_id=foofig.id,
address=[__item.private_ip_address for __item in web][range["value"]].apply(lambda private_ip_addresses: f"{private_ip_addresses}:80"),
label="mynodebalancernode",
weight=50))
```
## Attributes
This resource exports the following attributes:
* `status` - The current status of this node, based on the configured checks of its NodeBalancer Config. (`unknown`, `UP`, `DOWN`).
* `config_id` - The ID of the NodeBalancerConfig this NodeBalancerNode is attached to.
* `nodebalancer_id` - The ID of the NodeBalancer this NodeBalancerNode is attached to.
## Import
NodeBalancer Nodes can be imported using the NodeBalancer `nodebalancer_id` followed by the NodeBalancer Config `config_id` followed by the NodeBalancer Node `id`, separated by a comma, e.g.
```sh
$ pulumi import linode:index/nodeBalancerNode:NodeBalancerNode https-foobar-1 1234567,7654321,9999999
```
The Linode Guide, [Import Existing Infrastructure to Terraform](https://www.linode.com/docs/applications/configuration-management/import-existing-infrastructure-to-terraform/), offers resource importing examples for NodeBalancer Nodes and other Linode resource types.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] address: The private IP Address where this backend can be reached. This must be a private IP address.
:param pulumi.Input[int] config_id: The ID of the NodeBalancerConfig to access.
:param pulumi.Input[str] label: The label of the Linode NodeBalancer Node. This is for display purposes only.
:param pulumi.Input[str] mode: The mode this NodeBalancer should use when sending traffic to this backend. If set to `accept` this backend is accepting traffic. If set to `reject` this backend will not receive traffic. If set to `drain` this backend will not receive new traffic, but connections already pinned to it will continue to be routed to it. (`accept`, `reject`, `drain`, `backup`)
:param pulumi.Input[int] nodebalancer_id: The ID of the NodeBalancer to access.
:param pulumi.Input[int] weight: Used when picking a backend to serve a request and is not pinned to a single backend yet. Nodes with a higher weight will receive more traffic. (1-255).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NodeBalancerNodeArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Linode NodeBalancer Node resource. This can be used to create, modify, and delete Linodes NodeBalancer Nodes.
For more information, see [Getting Started with NodeBalancers](https://www.linode.com/docs/platform/nodebalancer/getting-started-with-nodebalancers/) and the [Linode APIv4 docs](https://developers.linode.com/api/v4#operation/createNodeBalancerNode).
## Example Usage
The following example shows how one might use this resource to configure NodeBalancer Nodes attached to Linode instances.
```python
import pulumi
import pulumi_linode as linode
web = []
for range in [{"value": i} for i in range(0, 3)]:
web.append(linode.Instance(f"web-{range['value']}",
label=f"web-{range['value'] + 1}",
image="linode/ubuntu18.04",
region="us-east",
type="g6-standard-1",
authorized_keys=["ssh-rsa AAAA...Gw== user@example.local"],
root_pass="test",
private_ip=True))
foobar = linode.NodeBalancer("foobar",
label="mynodebalancer",
region="us-east",
client_conn_throttle=20)
foofig = linode.NodeBalancerConfig("foofig",
nodebalancer_id=foobar.id,
port=80,
protocol="http",
check="http",
check_path="/foo",
check_attempts=3,
check_timeout=30,
stickiness="http_cookie",
algorithm="source")
foonode = []
for range in [{"value": i} for i in range(0, 3)]:
foonode.append(linode.NodeBalancerNode(f"foonode-{range['value']}",
nodebalancer_id=foobar.id,
config_id=foofig.id,
address=[__item.private_ip_address for __item in web][range["value"]].apply(lambda private_ip_addresses: f"{private_ip_addresses}:80"),
label="mynodebalancernode",
weight=50))
```
## Attributes
This resource exports the following attributes:
* `status` - The current status of this node, based on the configured checks of its NodeBalancer Config. (`unknown`, `UP`, `DOWN`).
* `config_id` - The ID of the NodeBalancerConfig this NodeBalancerNode is attached to.
* `nodebalancer_id` - The ID of the NodeBalancer this NodeBalancerNode is attached to.
## Import
NodeBalancer Nodes can be imported using the NodeBalancer `nodebalancer_id` followed by the NodeBalancer Config `config_id` followed by the NodeBalancer Node `id`, separated by a comma, e.g.
```sh
$ pulumi import linode:index/nodeBalancerNode:NodeBalancerNode https-foobar-1 1234567,7654321,9999999
```
The Linode Guide, [Import Existing Infrastructure to Terraform](https://www.linode.com/docs/applications/configuration-management/import-existing-infrastructure-to-terraform/), offers resource importing examples for NodeBalancer Nodes and other Linode resource types.
:param str resource_name: The name of the resource.
:param NodeBalancerNodeArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NodeBalancerNodeArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
address: Optional[pulumi.Input[str]] = None,
config_id: Optional[pulumi.Input[int]] = None,
label: Optional[pulumi.Input[str]] = None,
mode: Optional[pulumi.Input[str]] = None,
nodebalancer_id: Optional[pulumi.Input[int]] = None,
weight: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NodeBalancerNodeArgs.__new__(NodeBalancerNodeArgs)
if address is None and not opts.urn:
raise TypeError("Missing required property 'address'")
__props__.__dict__["address"] = address
if config_id is None and not opts.urn:
raise TypeError("Missing required property 'config_id'")
__props__.__dict__["config_id"] = config_id
if label is None and not opts.urn:
raise TypeError("Missing required property 'label'")
__props__.__dict__["label"] = label
__props__.__dict__["mode"] = mode
if nodebalancer_id is None and not opts.urn:
raise TypeError("Missing required property 'nodebalancer_id'")
__props__.__dict__["nodebalancer_id"] = nodebalancer_id
__props__.__dict__["weight"] = weight
__props__.__dict__["status"] = None
super(NodeBalancerNode, __self__).__init__(
'linode:index/nodeBalancerNode:NodeBalancerNode',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
address: Optional[pulumi.Input[str]] = None,
config_id: Optional[pulumi.Input[int]] = None,
label: Optional[pulumi.Input[str]] = None,
mode: Optional[pulumi.Input[str]] = None,
nodebalancer_id: Optional[pulumi.Input[int]] = None,
status: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None) -> 'NodeBalancerNode':
"""
Get an existing NodeBalancerNode resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] address: The private IP Address where this backend can be reached. This must be a private IP address.
:param pulumi.Input[int] config_id: The ID of the NodeBalancerConfig to access.
:param pulumi.Input[str] label: The label of the Linode NodeBalancer Node. This is for display purposes only.
:param pulumi.Input[str] mode: The mode this NodeBalancer should use when sending traffic to this backend. If set to `accept` this backend is accepting traffic. If set to `reject` this backend will not receive traffic. If set to `drain` this backend will not receive new traffic, but connections already pinned to it will continue to be routed to it. (`accept`, `reject`, `drain`, `backup`)
:param pulumi.Input[int] nodebalancer_id: The ID of the NodeBalancer to access.
:param pulumi.Input[str] status: The current status of this node, based on the configured checks of its NodeBalancer Config. (unknown, UP, DOWN)
:param pulumi.Input[int] weight: Used when picking a backend to serve a request and is not pinned to a single backend yet. Nodes with a higher weight will receive more traffic. (1-255).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NodeBalancerNodeState.__new__(_NodeBalancerNodeState)
__props__.__dict__["address"] = address
__props__.__dict__["config_id"] = config_id
__props__.__dict__["label"] = label
__props__.__dict__["mode"] = mode
__props__.__dict__["nodebalancer_id"] = nodebalancer_id
__props__.__dict__["status"] = status
__props__.__dict__["weight"] = weight
return NodeBalancerNode(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def address(self) -> pulumi.Output[str]:
"""
The private IP Address where this backend can be reached. This must be a private IP address.
"""
return pulumi.get(self, "address")
@property
@pulumi.getter(name="configId")
def config_id(self) -> pulumi.Output[int]:
"""
The ID of the NodeBalancerConfig to access.
"""
return pulumi.get(self, "config_id")
@property
@pulumi.getter
def label(self) -> pulumi.Output[str]:
"""
The label of the Linode NodeBalancer Node. This is for display purposes only.
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def mode(self) -> pulumi.Output[str]:
"""
The mode this NodeBalancer should use when sending traffic to this backend. If set to `accept` this backend is accepting traffic. If set to `reject` this backend will not receive traffic. If set to `drain` this backend will not receive new traffic, but connections already pinned to it will continue to be routed to it. (`accept`, `reject`, `drain`, `backup`)
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter(name="nodebalancerId")
def nodebalancer_id(self) -> pulumi.Output[int]:
"""
The ID of the NodeBalancer to access.
"""
return pulumi.get(self, "nodebalancer_id")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
The current status of this node, based on the configured checks of its NodeBalancer Config. (unknown, UP, DOWN)
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def weight(self) -> pulumi.Output[int]:
"""
Used when picking a backend to serve a request and is not pinned to a single backend yet. Nodes with a higher weight will receive more traffic. (1-255).
"""
return pulumi.get(self, "weight")
| 47.814471 | 398 | 0.644886 | 3,182 | 25,772 | 5.088938 | 0.091766 | 0.059779 | 0.040635 | 0.033965 | 0.879207 | 0.864695 | 0.845304 | 0.828691 | 0.810227 | 0.80578 | 0 | 0.006315 | 0.25648 | 25,772 | 538 | 399 | 47.903346 | 0.838743 | 0.505937 | 0 | 0.654762 | 1 | 0 | 0.082987 | 0.004154 | 0 | 0 | 0 | 0 | 0 | 1 | 0.15873 | false | 0.003968 | 0.019841 | 0 | 0.27381 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e0cb618687613160a4f53c16b47f229613bad8b3 | 21,370 | py | Python | fhir/resources/tests/test_searchparameter.py | mmabey/fhir.resources | cc73718e9762c04726cd7de240c8f2dd5313cbe1 | [
"BSD-3-Clause"
] | null | null | null | fhir/resources/tests/test_searchparameter.py | mmabey/fhir.resources | cc73718e9762c04726cd7de240c8f2dd5313cbe1 | [
"BSD-3-Clause"
] | null | null | null | fhir/resources/tests/test_searchparameter.py | mmabey/fhir.resources | cc73718e9762c04726cd7de240c8f2dd5313cbe1 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/SearchParameter
Release: R4
Version: 4.0.1
Build ID: 9346c8cc45
Last updated: 2019-11-01T09:29:23.356+11:00
"""
import io
import json
import os
import sys
import unittest
import pytest
from .. import searchparameter
from ..fhirdate import FHIRDate
from .fixtures import force_bytes
@pytest.mark.usefixtures("base_settings")
class SearchParameterTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("SearchParameter", js["resourceType"])
if "base" not in js:
raise ValueError("Required `base` attribute doesnt found! ")
return searchparameter.SearchParameter(js)
def testSearchParameter1(self):
try:
inst = self.instantiate_from("valueset-extensions-ValueSet-workflow.json")
except ValueError as exc:
sys.stderr.write(str(exc) + "\n")
return 1
self.assertIsNotNone(inst, "Must have instantiated a SearchParameter instance")
self.implSearchParameter1(inst)
js = inst.as_json()
self.assertEqual("SearchParameter", js["resourceType"])
inst2 = searchparameter.SearchParameter(js)
self.implSearchParameter1(inst2)
def implSearchParameter1(self, inst):
self.assertEqual(force_bytes(inst.code), force_bytes("workflow"))
self.assertEqual(
force_bytes(inst.description), force_bytes("Optional Extensions Element")
)
self.assertTrue(inst.experimental)
self.assertEqual(
force_bytes(inst.id), force_bytes("valueset-extensions-ValueSet-workflow")
)
self.assertEqual(force_bytes(inst.name), force_bytes("workflow"))
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.type), force_bytes("token"))
self.assertEqual(
force_bytes(inst.url),
force_bytes(
"http://hl7.org/fhir/SearchParameter/valueset-extensions-ValueSet-workflow"
),
)
self.assertEqual(force_bytes(inst.version), force_bytes("4.0.1"))
self.assertEqual(
force_bytes(inst.xpath),
force_bytes(
"f:ValueSet/f:extension[@url='http://hl7.org/fhir/StructureDefinition/valueset-workflowStatus'] | /f:#workflowStatus"
),
)
self.assertEqual(force_bytes(inst.xpathUsage), force_bytes("normal"))
def testSearchParameter2(self):
try:
inst = self.instantiate_from("codesystem-extensions-CodeSystem-author.json")
except ValueError as exc:
sys.stderr.write(str(exc) + "\n")
return 1
self.assertIsNotNone(inst, "Must have instantiated a SearchParameter instance")
self.implSearchParameter2(inst)
js = inst.as_json()
self.assertEqual("SearchParameter", js["resourceType"])
inst2 = searchparameter.SearchParameter(js)
self.implSearchParameter2(inst2)
def implSearchParameter2(self, inst):
self.assertEqual(force_bytes(inst.code), force_bytes("author"))
self.assertEqual(
force_bytes(inst.description), force_bytes("Optional Extensions Element")
)
self.assertTrue(inst.experimental)
self.assertEqual(
force_bytes(inst.id), force_bytes("codesystem-extensions-CodeSystem-author")
)
self.assertEqual(force_bytes(inst.name), force_bytes("author"))
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.type), force_bytes("string"))
self.assertEqual(
force_bytes(inst.url),
force_bytes(
"http://hl7.org/fhir/SearchParameter/codesystem-extensions-CodeSystem-author"
),
)
self.assertEqual(force_bytes(inst.version), force_bytes("4.0.1"))
self.assertEqual(
force_bytes(inst.xpath),
force_bytes(
"f:CodeSystem/f:extension[@url='http://hl7.org/fhir/StructureDefinition/codesystem-author'] | /f:#author"
),
)
self.assertEqual(force_bytes(inst.xpathUsage), force_bytes("normal"))
def testSearchParameter3(self):
inst = self.instantiate_from("searchparameter-example-extension.json")
self.assertIsNotNone(inst, "Must have instantiated a SearchParameter instance")
self.implSearchParameter3(inst)
js = inst.as_json()
self.assertEqual("SearchParameter", js["resourceType"])
inst2 = searchparameter.SearchParameter(js)
self.implSearchParameter3(inst2)
def implSearchParameter3(self, inst):
self.assertEqual(force_bytes(inst.base[0]), force_bytes("Patient"))
self.assertEqual(force_bytes(inst.code), force_bytes("part-agree"))
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].system), force_bytes("url")
)
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].value),
force_bytes("http://hl7.org/fhir"),
)
self.assertEqual(
force_bytes(inst.description),
force_bytes(
"Search by url for a participation agreement, which is stored in a DocumentReference"
),
)
self.assertTrue(inst.experimental)
self.assertEqual(
force_bytes(inst.expression),
force_bytes(
"DocumentReference.extension('http://example.org/fhir/StructureDefinition/participation-agreement')"
),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-extension"))
self.assertEqual(
force_bytes(inst.name),
force_bytes("Example Search Parameter on an extension"),
)
self.assertEqual(
force_bytes(inst.publisher),
force_bytes("Health Level Seven International (FHIR Infrastructure)"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.target[0]), force_bytes("DocumentReference"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type), force_bytes("reference"))
self.assertEqual(
force_bytes(inst.url),
force_bytes("http://hl7.org/fhir/SearchParameter/example-extension"),
)
self.assertEqual(
force_bytes(inst.xpath),
force_bytes(
"f:DocumentReference/f:extension[@url='http://example.org/fhir/StructureDefinition/participation-agreement']"
),
)
self.assertEqual(force_bytes(inst.xpathUsage), force_bytes("normal"))
def testSearchParameter4(self):
inst = self.instantiate_from(
"questionnaireresponse-extensions-QuestionnaireResponse-item-subject.json"
)
self.assertIsNotNone(inst, "Must have instantiated a SearchParameter instance")
self.implSearchParameter4(inst)
js = inst.as_json()
self.assertEqual("SearchParameter", js["resourceType"])
inst2 = searchparameter.SearchParameter(js)
self.implSearchParameter4(inst2)
def implSearchParameter4(self, inst):
self.assertEqual(
force_bytes(inst.base[0]), force_bytes("QuestionnaireResponse")
)
self.assertEqual(force_bytes(inst.code), force_bytes("item-subject"))
self.assertEqual(
force_bytes(inst.description),
force_bytes(
"Allows searching for QuestionnaireResponses by item value where the item has isSubject=true"
),
)
self.assertTrue(inst.experimental)
self.assertEqual(
force_bytes(inst.expression),
force_bytes(
"QuestionnaireResponse.item.where(hasExtension('http://hl7.org/fhir/StructureDefinition/questionnaireresponse-isSubject')).answer.value.ofType(Reference)"
),
)
self.assertEqual(
force_bytes(inst.id),
force_bytes(
"questionnaireresponse-extensions-QuestionnaireResponse-item-subject"
),
)
self.assertEqual(force_bytes(inst.name), force_bytes("item-subject"))
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.type), force_bytes("reference"))
self.assertEqual(
force_bytes(inst.url),
force_bytes(
"http://hl7.org/fhir/SearchParameter/questionnaireresponse-extensions-QuestionnaireResponse-item-subject"
),
)
self.assertEqual(force_bytes(inst.version), force_bytes("4.0.1"))
self.assertEqual(force_bytes(inst.xpathUsage), force_bytes("normal"))
def testSearchParameter5(self):
inst = self.instantiate_from("searchparameter-filter.json")
self.assertIsNotNone(inst, "Must have instantiated a SearchParameter instance")
self.implSearchParameter5(inst)
js = inst.as_json()
self.assertEqual("SearchParameter", js["resourceType"])
inst2 = searchparameter.SearchParameter(js)
self.implSearchParameter5(inst2)
def implSearchParameter5(self, inst):
self.assertEqual(force_bytes(inst.base[0]), force_bytes("Resource"))
self.assertEqual(force_bytes(inst.code), force_bytes("_filter"))
self.assertEqual(force_bytes(inst.contact[0].name), force_bytes("FHIR Project"))
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].system), force_bytes("url")
)
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].value),
force_bytes("http://hl7.org/fhir"),
)
self.assertEqual(inst.date.date, FHIRDate("2018-07-26").date)
self.assertEqual(inst.date.as_json(), "2018-07-26")
self.assertEqual(
force_bytes(inst.description),
force_bytes(
"This is the formal declaration for the _filter parameter, documented at [http://hl7.org/fhir/search_filter.html](http://hl7.org/fhir/search_filter.html)"
),
)
self.assertFalse(inst.experimental)
self.assertEqual(force_bytes(inst.id), force_bytes("filter"))
self.assertEqual(force_bytes(inst.name), force_bytes("FilterSearchParameter"))
self.assertEqual(
force_bytes(inst.publisher),
force_bytes("Health Level Seven International (FHIR Infrastructure)"),
)
self.assertEqual(
force_bytes(inst.purpose),
force_bytes(
"Support combination searches when the simple name=value basis of search cannot express what is required"
),
)
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type), force_bytes("special"))
self.assertEqual(
force_bytes(inst.url),
force_bytes("http://hl7.org/fhir/SearchParameter/filter"),
)
self.assertEqual(force_bytes(inst.version), force_bytes("1"))
def testSearchParameter6(self):
inst = self.instantiate_from("searchparameter-example-reference.json")
self.assertIsNotNone(inst, "Must have instantiated a SearchParameter instance")
self.implSearchParameter6(inst)
js = inst.as_json()
self.assertEqual("SearchParameter", js["resourceType"])
inst2 = searchparameter.SearchParameter(js)
self.implSearchParameter6(inst2)
def implSearchParameter6(self, inst):
self.assertEqual(force_bytes(inst.base[0]), force_bytes("Condition"))
self.assertEqual(force_bytes(inst.chain[0]), force_bytes("name"))
self.assertEqual(force_bytes(inst.chain[1]), force_bytes("identifier"))
self.assertEqual(force_bytes(inst.code), force_bytes("subject"))
self.assertEqual(force_bytes(inst.contact[0].name), force_bytes("[string]"))
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].system), force_bytes("url")
)
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].value),
force_bytes("http://hl7.org/fhir"),
)
self.assertEqual(inst.date.date, FHIRDate("2013-10-23").date)
self.assertEqual(inst.date.as_json(), "2013-10-23")
self.assertEqual(
force_bytes(inst.description), force_bytes("Search by condition subject")
)
self.assertTrue(inst.experimental)
self.assertEqual(force_bytes(inst.expression), force_bytes("Condition.subject"))
self.assertEqual(force_bytes(inst.id), force_bytes("example-reference"))
self.assertEqual(force_bytes(inst.modifier[0]), force_bytes("missing"))
self.assertEqual(
force_bytes(inst.name), force_bytes("Example Search Parameter")
)
self.assertEqual(
force_bytes(inst.publisher),
force_bytes("Health Level Seven International (FHIR Infrastructure)"),
)
self.assertEqual(
force_bytes(inst.purpose),
force_bytes("Need to search Condition by subject"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.target[0]), force_bytes("Organization"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type), force_bytes("reference"))
self.assertEqual(
force_bytes(inst.url),
force_bytes("http://hl7.org/fhir/SearchParameter/example-reference"),
)
self.assertEqual(force_bytes(inst.xpathUsage), force_bytes("normal"))
def testSearchParameter7(self):
inst = self.instantiate_from(
"diagnosticreport-genetic-DiagnosticReport-assessed-condition.json"
)
self.assertIsNotNone(inst, "Must have instantiated a SearchParameter instance")
self.implSearchParameter7(inst)
js = inst.as_json()
self.assertEqual("SearchParameter", js["resourceType"])
inst2 = searchparameter.SearchParameter(js)
self.implSearchParameter7(inst2)
def implSearchParameter7(self, inst):
self.assertEqual(force_bytes(inst.base[0]), force_bytes("DiagnosticReport"))
self.assertEqual(force_bytes(inst.code), force_bytes("assessed-condition"))
self.assertEqual(
force_bytes(inst.description),
force_bytes("Condition assessed by genetic test"),
)
self.assertTrue(inst.experimental)
self.assertEqual(
force_bytes(inst.expression),
force_bytes(
"DiagnosticReport.extension('http://hl7.org/fhir/StructureDefinition/DiagnosticReport-geneticsAssessedCondition')"
),
)
self.assertEqual(
force_bytes(inst.id),
force_bytes("diagnosticreport-genetic-DiagnosticReport-assessed-condition"),
)
self.assertEqual(force_bytes(inst.name), force_bytes("assessed-condition"))
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.type), force_bytes("reference"))
self.assertEqual(
force_bytes(inst.url),
force_bytes(
"http://hl7.org/fhir/SearchParameter/diagnosticreport-genetic-DiagnosticReport-assessed-condition"
),
)
self.assertEqual(force_bytes(inst.version), force_bytes("4.0.1"))
self.assertEqual(force_bytes(inst.xpathUsage), force_bytes("normal"))
def testSearchParameter8(self):
inst = self.instantiate_from("device-extensions-Device-din.json")
self.assertIsNotNone(inst, "Must have instantiated a SearchParameter instance")
self.implSearchParameter8(inst)
js = inst.as_json()
self.assertEqual("SearchParameter", js["resourceType"])
inst2 = searchparameter.SearchParameter(js)
self.implSearchParameter8(inst2)
def implSearchParameter8(self, inst):
self.assertEqual(force_bytes(inst.base[0]), force_bytes("Device"))
self.assertEqual(force_bytes(inst.code), force_bytes("din"))
self.assertEqual(
force_bytes(inst.description),
force_bytes("The donation identification number (DIN)"),
)
self.assertTrue(inst.experimental)
self.assertEqual(
force_bytes(inst.expression),
force_bytes(
"Device.extension('http://hl7.org/fhir/SearchParameter/device-extensions-Device-din')"
),
)
self.assertEqual(
force_bytes(inst.id), force_bytes("device-extensions-Device-din")
)
self.assertEqual(force_bytes(inst.name), force_bytes("din"))
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.type), force_bytes("token"))
self.assertEqual(
force_bytes(inst.url),
force_bytes(
"http://hl7.org/fhir/SearchParameter/device-extensions-Device-din"
),
)
self.assertEqual(force_bytes(inst.version), force_bytes("4.0.1"))
self.assertEqual(force_bytes(inst.xpathUsage), force_bytes("normal"))
def testSearchParameter9(self):
inst = self.instantiate_from(
"observation-genetic-Observation-gene-identifier.json"
)
self.assertIsNotNone(inst, "Must have instantiated a SearchParameter instance")
self.implSearchParameter9(inst)
js = inst.as_json()
self.assertEqual("SearchParameter", js["resourceType"])
inst2 = searchparameter.SearchParameter(js)
self.implSearchParameter9(inst2)
def implSearchParameter9(self, inst):
self.assertEqual(force_bytes(inst.base[0]), force_bytes("Observation"))
self.assertEqual(force_bytes(inst.code), force_bytes("gene-identifier"))
self.assertEqual(
force_bytes(inst.description),
force_bytes("HGNC gene symbol and identifier"),
)
self.assertTrue(inst.experimental)
self.assertEqual(
force_bytes(inst.expression),
force_bytes(
"Observation.extension('http://hl7.org/fhir/StructureDefinition/observation-geneticsGene')"
),
)
self.assertEqual(
force_bytes(inst.id),
force_bytes("observation-genetic-Observation-gene-identifier"),
)
self.assertEqual(force_bytes(inst.name), force_bytes("gene-identifier"))
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.type), force_bytes("token"))
self.assertEqual(
force_bytes(inst.url),
force_bytes(
"http://hl7.org/fhir/SearchParameter/observation-genetic-Observation-gene-identifier"
),
)
self.assertEqual(force_bytes(inst.version), force_bytes("4.0.1"))
self.assertEqual(force_bytes(inst.xpathUsage), force_bytes("normal"))
def testSearchParameter10(self):
try:
inst = self.instantiate_from(
"codesystem-extensions-CodeSystem-workflow.json"
)
except ValueError as exc:
sys.stderr.write(str(exc) + "\n")
return 1
self.assertIsNotNone(inst, "Must have instantiated a SearchParameter instance")
self.implSearchParameter10(inst)
js = inst.as_json()
self.assertEqual("SearchParameter", js["resourceType"])
inst2 = searchparameter.SearchParameter(js)
self.implSearchParameter10(inst2)
def implSearchParameter10(self, inst):
self.assertEqual(force_bytes(inst.code), force_bytes("workflow"))
self.assertEqual(
force_bytes(inst.description), force_bytes("Optional Extensions Element")
)
self.assertTrue(inst.experimental)
self.assertEqual(
force_bytes(inst.id),
force_bytes("codesystem-extensions-CodeSystem-workflow"),
)
self.assertEqual(force_bytes(inst.name), force_bytes("workflow"))
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.type), force_bytes("token"))
self.assertEqual(
force_bytes(inst.url),
force_bytes(
"http://hl7.org/fhir/SearchParameter/codesystem-extensions-CodeSystem-workflow"
),
)
self.assertEqual(force_bytes(inst.version), force_bytes("4.0.1"))
self.assertEqual(
force_bytes(inst.xpath),
force_bytes(
"f:CodeSystem/f:extension[@url='http://hl7.org/fhir/StructureDefinition/codesystem-workflowStatus'] | /f:#workflowStatus"
),
)
self.assertEqual(force_bytes(inst.xpathUsage), force_bytes("normal"))
| 43.434959 | 170 | 0.647356 | 2,181 | 21,370 | 6.214122 | 0.10729 | 0.185199 | 0.184461 | 0.230576 | 0.8179 | 0.789198 | 0.768022 | 0.746698 | 0.688704 | 0.644212 | 0 | 0.012465 | 0.23416 | 21,370 | 491 | 171 | 43.523422 | 0.815654 | 0.008376 | 0 | 0.553571 | 0 | 0.015625 | 0.235377 | 0.040646 | 0 | 0 | 0 | 0 | 0.357143 | 1 | 0.046875 | false | 0 | 0.020089 | 0 | 0.078125 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1ce0104a2b3dbe0a066e70b3f968f24704b5056d | 6,718 | py | Python | get_dev.py | mogolola/sequence_tagging | f18c7dd0590d263cce55cebee3cafdee3c1c6698 | [
"Apache-2.0"
] | null | null | null | get_dev.py | mogolola/sequence_tagging | f18c7dd0590d263cce55cebee3cafdee3c1c6698 | [
"Apache-2.0"
] | null | null | null | get_dev.py | mogolola/sequence_tagging | f18c7dd0590d263cce55cebee3cafdee3c1c6698 | [
"Apache-2.0"
] | null | null | null | import random
#number_per_file = 100
dir = 'data/generation-projet-dev/'
def save_dev_file():
with open(dir+'ask_for_recipe-newP-newV-dev.bio','r') as f:
a1 = f.read()
text_a1 = a1.split('\n\n')
with open(dir+'ask_for_recipe-newP-oldV-dev.bio','r') as f:
a2 = f.read()
text_a2 = a2.split('\n\n')
with open(dir+'ask_for_recipe-oldP-newV-dev.bio','r') as f:
a3 = f.read()
text_a3 = a3.split('\n\n')
with open(dir+'give_cat-ingredients-newP-newV-dev.bio','r') as f:
b1 = f.read()
text_b1 = b1.split('\n\n')
with open(dir+'give_cat-ingredients-newP-oldV-dev.bio','r') as f:
b2 = f.read()
text_b2 = b2.split('\n\n')
with open(dir+'give_cat-ingredients-oldP-newV-dev.bio','r') as f:
b3 = f.read()
text_b3 = b3.split('\n\n')
with open(dir+'give_ingredients-newP-newV-dev.bio','r') as f:
c1 = f.read()
text_c1 = c1.split('\n\n')
with open(dir+'give_ingredients-newP-oldV-dev.bio','r') as f:
c2 = f.read()
text_c2 = c2.split('\n\n')
with open(dir+'give_ingredients-oldP-newV-dev.bio','r') as f:
c3 = f.read()
text_c3 = c3.split('\n\n')
text = []
text.extend(text_a1)
text.extend(text_a2)
text.extend(text_a3)
text.extend(text_b1)
text.extend(text_b2)
text.extend(text_b3)
text.extend(text_c1)
text.extend(text_c2)
text.extend(text_c3)
random.shuffle(text)
text_s = ''
for i in text:
text_s = text_s + i + '\n\n'
print('Total Number of examples in dev_file: ' + str(len(text)))
print("Number of enamples of type 'ask_for_recipe_newP_newV': " + str(len(text_a1)))
print("Number of enamples of type 'ask_for_recipe_newP_oldV': " + str(len(text_a2)))
print("Number of enamples of type 'ask_for_recipe_oldP_newV': " + str(len(text_a3)))
print("Number of enamples of type 'give_cat-ingredients_newP_newV': " + str(len(text_b1)))
print("Number of enamples of type 'give_cat-ingredients_newP_oldV': " + str(len(text_b2)))
print("Number of enamples of type 'give_cat-ingredients_oldP_newV': " + str(len(text_b3)))
print("Number of enamples of type 'give_ingredients_newP_newV': " + str(len(text_c1)))
print("Number of enamples of type 'give_ingredients_newP_oldV': " + str(len(text_c2)))
print("Number of enamples of type 'give_ingredients_oldP_newV': " + str(len(text_c3)))
print("saved to 'data/dev.iob'")
f = open('data/dev.iob', 'w')
f.write(text_s)
f.close()
def save_dev_file_newP_newV():
with open(dir+'ask_for_recipe-newP-newV-dev.bio','r') as f:
a1 = f.read()
text_a1 = a1.split('\n\n')
with open(dir+'give_cat-ingredients-newP-newV-dev.bio','r') as f:
b1 = f.read()
text_b1 = b1.split('\n\n')
with open(dir+'give_ingredients-newP-newV-dev.bio','r') as f:
c1 = f.read()
text_c1 = c1.split('\n\n')
text = []
text.extend(text_a1)
text.extend(text_b1)
text.extend(text_c1)
random.shuffle(text)
text_s = ''
for i in text:
text_s = text_s + i + '\n\n'
print('Total Number of examples in dev_file: ' + str(len(text)))
print("Number of enamples of type 'ask_for_recipe_newP_newV': " + str(len(text_a1)))
print("Number of enamples of type 'give_cat-ingredients_newP_newV': " + str(len(text_b1)))
print("Number of enamples of type 'give_ingredients_newP_newV': " + str(len(text_c1)))
print("saved to 'data/dev.iob'")
f = open('data/dev.iob', 'w')
f.write(text_s)
f.close()
def save_dev_file_newP_oldV():
with open(dir + 'ask_for_recipe-newP-oldV-dev.bio', 'r') as f:
a1 = f.read()
text_a1 = a1.split('\n\n')#[:number_per_file]
with open(dir+'give_cat-ingredients-newP-oldV-dev.bio', 'r') as f:
b1 = f.read()
text_b1 = b1.split('\n\n')
with open(dir+'give_ingredients-newP-oldV-dev.bio', 'r') as f:
c1 = f.read()
text_c1 = c1.split('\n\n')#[:number_per_file]
text = []
text.extend(text_a1)
text.extend(text_b1)
text.extend(text_c1)
random.shuffle(text)
text_s = ''
for i in text:
text_s = text_s + i + '\n\n'
print('Total Number of examples in dev_file: ' + str(len(text)))
print("Number of enamples of type 'ask_for_recipe_newP_oldV': " + str(len(text_a1)))
print("Number of enamples of type 'give_cat-ingredients_newP_oldV': " + str(len(text_b1)))
print("Number of enamples of type 'give_ingredients_newP_oldV': " + str(len(text_c1)))
print("saved to 'data/dev.iob'")
f = open('data/dev.iob', 'w')
f.write(text_s)
f.close()
def save_dev_file_oldP_newV():
with open(dir + 'ask_for_recipe-oldP-newV-dev.bio', 'r') as f:
a1 = f.read()
text_a1 = a1.split('\n\n')#[:number_per_file]
with open(dir+'give_cat-ingredients-oldP-newV-dev.bio', 'r') as f:
b1 = f.read()
text_b1 = b1.split('\n\n')
with open(dir+'give_ingredients-oldP-newV-dev.bio', 'r') as f:
c1 = f.read()
text_c1 = c1.split('\n\n')#[:number_per_file]
text = []
text.extend(text_a1)
text.extend(text_b1)
text.extend(text_c1)
random.shuffle(text)
text_s = ''
for i in text:
text_s = text_s + i + '\n\n'
print('Total Number of examples in dev_file: ' + str(len(text)))
print("Number of enamples of type 'ask_for_recipe_oldP_newV': " + str(len(text_a1)))
print("Number of enamples of type 'give_cat-ingredients_oldP_newV': " + str(len(text_b1)))
print("Number of enamples of type 'give_ingredients_oldP_newV': " + str(len(text_c1)))
print("saved to 'data/dev.iob'")
f = open('data/dev.iob', 'w')
f.write(text_s)
f.close()
def save_dev_file_little():
with open(dir+'give_cat-ingredients-newP-newV-dev.bio','r') as f:
b1 = f.read()
text_b1 = b1.split('\n\n')
with open(dir+'give_cat-ingredients-newP-oldV-dev.bio','r') as f:
b2 = f.read()
text_b2 = b2.split('\n\n')
with open(dir+'give_cat-ingredients-oldP-newV-dev.bio','r') as f:
b3 = f.read()
text_b3 = b3.split('\n\n')
text = []
text.extend(text_b1)
text.extend(text_b2)
text.extend(text_b3)
random.shuffle(text)
text_s = ''
for i in text:
text_s = text_s + i + '\n\n'
print('Total Number of examples in dev_file: ' + str(len(text)))
print("Number of enamples of type 'give_cat-ingredients_newP_newV': " + str(len(text_b1)))
print("Number of enamples of type 'give_cat-ingredients_newP_oldV': " + str(len(text_b2)))
print("Number of enamples of type 'give_cat-ingredients_oldP_newV': " + str(len(text_b3)))
f = open('data/dev.iob', 'w')
f.write(text_s)
f.close() | 32.143541 | 94 | 0.622507 | 1,123 | 6,718 | 3.536064 | 0.055209 | 0.013095 | 0.065475 | 0.047595 | 0.945606 | 0.945606 | 0.945606 | 0.942584 | 0.942584 | 0.940821 | 0 | 0.020274 | 0.207056 | 6,718 | 209 | 95 | 32.143541 | 0.725174 | 0.013843 | 0 | 0.819876 | 0 | 0 | 0.371243 | 0.211146 | 0 | 0 | 0 | 0 | 0 | 1 | 0.031056 | false | 0 | 0.006211 | 0 | 0.037267 | 0.186335 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e812b8931c621a631a0a11e3eca1dcae70c89673 | 18,679 | py | Python | tests/test_flow/test_persistence.py | LevyForchh/bionic | f7f9486ac22c928bfbf12c40abf4bfd81751d77e | [
"Apache-2.0"
] | null | null | null | tests/test_flow/test_persistence.py | LevyForchh/bionic | f7f9486ac22c928bfbf12c40abf4bfd81751d77e | [
"Apache-2.0"
] | 1 | 2020-05-19T01:01:27.000Z | 2020-05-19T01:01:27.000Z | tests/test_flow/test_persistence.py | LevyForchh/bionic | f7f9486ac22c928bfbf12c40abf4bfd81751d77e | [
"Apache-2.0"
] | null | null | null | import pytest
import math
from ..helpers import count_calls, ResettingCounter, RoundingProtocol
from bionic.exception import CodeVersioningError
import bionic as bn
class ReadCountingProtocol(bn.protocols.PicklableProtocol):
def __init__(self):
self.times_read_called = 0
super(ReadCountingProtocol, self).__init__()
def read(self, path, extension):
self.times_read_called += 1
return super(ReadCountingProtocol, self).read(path, extension)
# It would be nice to move the builder setup into fixtures, but since we need
# to access the bound functions as well (to check the number of times they were
# called), it's easiest to just have one long test.
def test_caching_and_invalidation(builder):
# Set up the builder with singleton values.
builder.assign('x', 2)
builder.assign('y', 3)
builder.assign('z', 4)
@builder
@count_calls
def xy(x, y):
return x * y
@builder
@bn.persist(False)
@count_calls
def yz(y, z):
return y * z
@builder
@count_calls
def xy_plus_yz(xy, yz):
return xy + yz
# Access the downstream values.
flow = builder.build()
assert flow.get('xy') == 6
assert flow.get('xy') == 6
assert xy.times_called() == 1
assert flow.get('yz') == 12
assert flow.get('yz') == 12
assert yz.times_called() == 1
assert flow.get('xy_plus_yz') == 18
assert flow.get('xy_plus_yz') == 18
assert xy_plus_yz.times_called() == 1
# Rebuild the flow (resetting the in-memory cache) and confirm that
# xy and xy_plus_yz are still cached.
flow = builder.build()
assert flow.get('xy') == 6
assert xy.times_called() == 0
assert flow.get('yz') == 12
# Note yz is not cached.
assert yz.times_called() == 1
assert flow.get('xy_plus_yz') == 18
assert xy_plus_yz.times_called() == 0
# Change the value of z, and confirm that yz and xy_plus_yz are recomputed.
flow = flow.setting('z', -4)
assert flow.get('xy') == 6
assert xy.times_called() == 0
assert flow.get('yz') == -12
assert yz.times_called() == 1
assert flow.get('xy_plus_yz') == -6
assert flow.get('xy_plus_yz') == -6
assert xy_plus_yz.times_called() == 1
# Update x and y to have multiple values, and confirm that xy and
# xy_plus_yz are recomputed.
flow = builder.build()\
.setting('x', values=[2, -2])\
.setting('y', values=[3, 6])
assert flow.get('xy', set) == {-2*6, -2*3, 2*3, 2*6} # noqa: E226
# Note that we only call xy 3 times, because one value was already cached.
assert xy.times_called() == 3
assert flow.get('yz', set) == {3*4, 6*4} # noqa: E226
assert yz.times_called() == 2
assert flow.get('xy_plus_yz', set) == {
-2*3+3*4, -2*6+6*4, 2*3+3*4, 2*6+6*4} # noqa: E226
assert xy.times_called() == 0
assert yz.times_called() == 0
assert xy_plus_yz.times_called() == 3
flow = builder.build()\
.setting('x', values=[2, -2])\
.setting('y', values=[3, 6])
assert flow.get('xy', set) == {-12, -6, 6, 12}
assert xy.times_called() == 0
assert flow.get('yz', set) == {3*4, 6*4} # noqa: E226
assert yz.times_called() == 2
assert flow.get('xy_plus_yz', set) == {
-2*3+3*4, -2*6+6*4, 2*3+3*4, 2*6+6*4} # noqa: E226
assert xy.times_called() == 0
assert yz.times_called() == 0
assert xy_plus_yz.times_called() == 0
# Update y to have a different, overlapped set of values, and check that
# the minimal set of recomputations are performed.
flow = flow.setting('y', values=[6, 9])
assert flow.get('xy', set) == {-2*6, -2*9, 2*6, 2*9} # noqa: E226
assert xy.times_called() == 2
assert flow.get('yz', set) == {6*4, 9*4} # noqa: E226
assert yz.times_called() == 2
assert flow.get('xy_plus_yz', set) == {
-2*6+6*4, -2*9+9*4, 2*6+6*4, 2*9+9*4} # noqa: E226
assert xy_plus_yz.times_called() == 2
# This is mainly just to check that the cache wrapper returns a sane set of
# case keys.
key_names = flow.get('xy_plus_yz', 'series').index.names
for name in ['x', 'y']:
assert name in key_names
def test_versioning(builder):
call_counter = ResettingCounter()
builder.assign('x', 2)
builder.assign('y', 3)
@builder
def f(x, y):
call_counter.mark()
return x + y
assert builder.build().get('f') == 5
assert builder.build().get('f') == 5
assert call_counter.times_called() == 1
builder.delete('f')
@builder # noqa: F811
def f(x, y):
call_counter.mark()
return x * y
assert builder.build().get('f') == 5
assert call_counter.times_called() == 0
builder.delete('f')
@builder # noqa: F811
@bn.version(1)
def f(x, y):
call_counter.mark()
return x * y
assert builder.build().get('f') == 6
assert call_counter.times_called() == 1
builder.delete('f')
@builder # noqa: F811
@bn.version(1)
def f(x, y):
call_counter.mark()
return y * x
assert builder.build().get('f') == 6
assert call_counter.times_called() == 0
builder.delete('f')
@builder # noqa: F811
@bn.version(major=1, minor=1)
def f(x, y):
call_counter.mark()
return y * x
assert builder.build().get('f') == 6
assert call_counter.times_called() == 0
@builder # noqa: F811
@bn.version(major=1, minor=1)
def f(x, y):
call_counter.mark()
return x ** y
assert builder.build().get('f') == 6
assert call_counter.times_called() == 0
builder.delete('f')
@builder # noqa: F811
@bn.version(major=2)
def f(x, y):
call_counter.mark()
return x ** y
assert builder.build().get('f') == 8
assert call_counter.times_called() == 1
def test_indirect_versioning(builder):
y_call_counter = ResettingCounter()
f_call_counter = ResettingCounter()
builder.assign('x', 2)
@builder
def y():
y_call_counter.mark()
return 3
@builder
def f(x, y):
f_call_counter.mark()
return x + y
assert builder.build().get('f') == 5
assert y_call_counter.times_called() == 1
assert f_call_counter.times_called() == 1
@builder # noqa: F811
def y():
y_call_counter.mark()
return 4
assert builder.build().get('f') == 5
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 0
@builder # noqa: F811
@bn.version(1)
def y():
y_call_counter.mark()
return 4
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 1
assert f_call_counter.times_called() == 1
@builder # noqa: F811
@bn.version(1)
def y():
y_call_counter.mark()
return len('xxxx')
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 0
@builder # noqa: F811
@bn.version(1, minor=1)
def y():
y_call_counter.mark()
return len('xxxx')
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 0
builder.set('x', 5)
assert builder.build().get('f') == 9
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 1
builder.set('x', 2)
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 0
def test_versioning_assist(builder):
call_counter = ResettingCounter()
builder.set('core__versioning_mode', 'assist')
builder.assign('x', 2)
builder.assign('y', 3)
@builder
def f(x, y):
call_counter.mark()
return x + y
assert builder.build().get('f') == 5
assert builder.build().get('f') == 5
assert call_counter.times_called() == 1
builder.delete('f')
@builder # noqa: F811
def f(x, y):
call_counter.mark()
return x * y
with pytest.raises(CodeVersioningError):
builder.build().get('f')
builder.delete('f')
@builder # noqa: F811
@bn.version(1)
def f(x, y):
call_counter.mark()
return x * y
assert builder.build().get('f') == 6
assert call_counter.times_called() == 1
builder.delete('f')
@builder # noqa: F811
@bn.version(1)
def f(x, y):
call_counter.mark()
return y * x
with pytest.raises(CodeVersioningError):
builder.build().get('f')
builder.delete('f')
@builder # noqa: F811
@bn.version(major=1, minor=1)
def f(x, y):
call_counter.mark()
return y * x
assert builder.build().get('f') == 6
assert call_counter.times_called() == 0
@builder # noqa: F811
@bn.version(major=1, minor=1)
def f(x, y):
call_counter.mark()
return x ** y
with pytest.raises(CodeVersioningError):
builder.build().get('f')
builder.delete('f')
@builder # noqa: F811
@bn.version(major=2)
def f(x, y):
call_counter.mark()
return x ** y
assert builder.build().get('f') == 8
assert call_counter.times_called() == 1
def test_indirect_versioning_assist(builder):
y_call_counter = ResettingCounter()
f_call_counter = ResettingCounter()
builder.set('core__versioning_mode', 'assist')
builder.assign('x', 2)
@builder
def y():
y_call_counter.mark()
return 3
@builder
def f(x, y):
f_call_counter.mark()
return x + y
assert builder.build().get('f') == 5
assert y_call_counter.times_called() == 1
assert f_call_counter.times_called() == 1
@builder # noqa: F811
def y():
y_call_counter.mark()
return 4
with pytest.raises(CodeVersioningError):
builder.build().get('f')
@builder # noqa: F811
@bn.version(1)
def y():
y_call_counter.mark()
return 4
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 1
assert f_call_counter.times_called() == 1
@builder # noqa: F811
@bn.version(1)
def y():
y_call_counter.mark()
return len('xxxx')
with pytest.raises(CodeVersioningError):
builder.build().get('f')
@builder # noqa: F811
@bn.version(1, minor=1)
def y():
y_call_counter.mark()
return len('xxxx')
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 0
builder.set('x', 5)
assert builder.build().get('f') == 9
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 1
builder.set('x', 2)
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 0
def test_versioning_auto(builder):
call_counter = ResettingCounter()
builder.set('core__versioning_mode', 'auto')
builder.assign('x', 2)
builder.assign('y', 3)
@builder
def f(x, y):
call_counter.mark()
return x + y
assert builder.build().get('f') == 5
assert builder.build().get('f') == 5
assert call_counter.times_called() == 1
builder.delete('f')
@builder # noqa: F811
def f(x, y):
call_counter.mark()
return x * y
assert builder.build().get('f') == 6
assert call_counter.times_called() == 1
builder.delete('f')
@builder # noqa: F811
@bn.version(1)
def f(x, y):
call_counter.mark()
return x * y
assert builder.build().get('f') == 6
assert call_counter.times_called() == 1
builder.delete('f')
@builder # noqa: F811
@bn.version(1)
def f(x, y):
call_counter.mark()
return y * x
assert builder.build().get('f') == 6
assert call_counter.times_called() == 1
builder.delete('f')
@builder # noqa: F811
@bn.version(major=1, minor=1)
def f(x, y):
call_counter.mark()
return y * x
assert builder.build().get('f') == 6
assert call_counter.times_called() == 0
@builder # noqa: F811
@bn.version(major=1, minor=1)
def f(x, y):
call_counter.mark()
return x ** y
assert builder.build().get('f') == 8
assert call_counter.times_called() == 1
builder.delete('f')
@builder # noqa: F811
@bn.version(major=2)
def f(x, y):
call_counter.mark()
return x ** y
assert builder.build().get('f') == 8
assert call_counter.times_called() == 1
def test_indirect_versioning_auto(builder):
y_call_counter = ResettingCounter()
f_call_counter = ResettingCounter()
builder.set('core__versioning_mode', 'auto')
builder.assign('x', 2)
@builder
def y():
y_call_counter.mark()
return 3
@builder
def f(x, y):
f_call_counter.mark()
return x + y
assert builder.build().get('f') == 5
assert y_call_counter.times_called() == 1
assert f_call_counter.times_called() == 1
@builder # noqa: F811
def y():
y_call_counter.mark()
return 4
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 1
assert f_call_counter.times_called() == 1
@builder # noqa: F811
@bn.version(1)
def y():
y_call_counter.mark()
return 4
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 1
assert f_call_counter.times_called() == 1
@builder # noqa: F811
@bn.version(1)
def y():
y_call_counter.mark()
return len('xxxx')
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 1
assert f_call_counter.times_called() == 1
@builder # noqa: F811
@bn.version(1, minor=1)
def y():
y_call_counter.mark()
return len('xxxx')
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 0
builder.set('x', 5)
assert builder.build().get('f') == 9
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 1
builder.set('x', 2)
assert builder.build().get('f') == 6
assert y_call_counter.times_called() == 0
assert f_call_counter.times_called() == 0
def test_all_returned_results_are_deserialized(builder):
@builder
@RoundingProtocol()
@count_calls
def pi():
return math.pi
assert builder.build().get('pi') == 3
assert builder.build().get('pi') == 3
assert builder.build().get('pi') != math.pi
assert pi.times_called() == 1
def test_deps_of_cached_values_not_needed(builder):
y_protocol = ReadCountingProtocol()
z_protocol = ReadCountingProtocol()
builder.assign('x', 2)
@builder
@y_protocol
def y(x):
return x + 1
@builder
@z_protocol
def z(y):
return y + 1
flow = builder.build()
assert flow.get('x') == 2
assert flow.get('y') == 3
assert flow.get('z') == 4
assert flow.get('x') == 2
assert flow.get('y') == 3
assert flow.get('z') == 4
assert y_protocol.times_read_called == 1
assert z_protocol.times_read_called == 1
flow = builder.build()
assert flow.get('z') == 4
assert y_protocol.times_read_called == 1
assert z_protocol.times_read_called == 2
def test_gather_cache_invalidation(builder):
builder.assign('x', values=[1, 2])
builder.assign('y', values=[2, 3])
@builder
@bn.gather('x', 'x', 'df')
@count_calls
def z(df, y):
return df['x'].sum() + y
assert builder.build().get('z', set) == {5, 6}
assert z.times_called() == 2
assert builder.build().get('z', set) == {5, 6}
assert z.times_called() == 0
assert builder.build().setting('x', values=[2, 3]).get('z', set) == {7, 8}
assert z.times_called() == 2
builder.set('y', values=[3, 4])
assert builder.build().get('z', set) == {6, 7}
assert z.times_called() == 1
def test_gather_cache_invalidation_with_over_vars(builder):
builder.assign('x', values=[1, 2])
builder.assign('y', values=[2, 3])
@builder
@bn.gather('x', 'y', 'df')
@count_calls
def z(df):
return df.sum().sum()
assert builder.build().get('z', set) == {7, 9}
assert z.times_called() == 2
assert builder.build().get('z', set) == {7, 9}
assert z.times_called() == 0
# If we change one of the values of `x`, both values of `z` should change
# (because each instance depends on both values of `x`).
assert builder.build().setting('x', values=[2, 3]).get('z', set) == {9, 11}
assert z.times_called() == 2
# If we change one of the values of `y`, only one value of `z` should
# change.
assert builder.build().setting('y', values=[3, 4]).get('z', set) == {9, 11}
assert z.times_called() == 1
class Point(object):
def __init__(self, x, y):
self.x = x
self.y = y
def test_complex_input_type(builder):
builder.assign('point', Point(2, 3))
@builder
def x(point):
return point.x
@builder
def y(point):
return point.y
@builder
@count_calls
def x_plus_y(x, y):
return x + y
flow = builder.build()
assert flow.get('x_plus_y') == 5
assert x_plus_y.times_called() == 1
assert flow.get('x_plus_y') == 5
assert x_plus_y.times_called() == 0
builder = flow.to_builder()
builder.set('point', values=(Point(2, 3), Point(4, 5)))
flow = builder.build()
assert flow.get('x_plus_y', set) == {5, 9}
assert x_plus_y.times_called() == 1
assert flow.get('x_plus_y', set) == {5, 9}
assert x_plus_y.times_called() == 0
def test_persisting_none(builder):
@builder
@count_calls
def none():
return None
assert builder.build().get('none') is None
assert builder.build().get('none') is None
assert none.times_called() == 1
def test_disable_memory_caching(builder):
x_protocol = ReadCountingProtocol()
@builder
@x_protocol
@bn.memoize(False)
def x():
return 1
flow = builder.build()
assert flow.get('x') == 1
assert flow.get('x') == 1
assert x_protocol.times_read_called == 2
with pytest.raises(ValueError):
@builder
@x_protocol
@bn.persist(False)
@bn.memoize(False)
def y():
return 1
flow = builder.build()
assert flow.get('y') == 1
| 24.039897 | 79 | 0.595374 | 2,698 | 18,679 | 3.966271 | 0.06894 | 0.106906 | 0.065041 | 0.115129 | 0.80357 | 0.775722 | 0.760116 | 0.75264 | 0.730492 | 0.696197 | 0 | 0.035201 | 0.254778 | 18,679 | 776 | 80 | 24.070876 | 0.733549 | 0.078377 | 0 | 0.828467 | 0 | 0 | 0.025064 | 0.004896 | 0 | 0 | 0 | 0 | 0.341241 | 1 | 0.127737 | false | 0 | 0.009124 | 0.025547 | 0.239051 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e82618267489f588a09c4deabfba73220e04a678 | 16,391 | py | Python | sdk/python/pulumi_yandex/compute_disk_placement_group.py | pulumi/pulumi-yandex | 559a0c82fd2b834bb5f1dc3abbf0dab689b13a3e | [
"ECL-2.0",
"Apache-2.0"
] | 9 | 2021-04-20T15:39:41.000Z | 2022-02-20T09:14:39.000Z | sdk/python/pulumi_yandex/compute_disk_placement_group.py | pulumi/pulumi-yandex | 559a0c82fd2b834bb5f1dc3abbf0dab689b13a3e | [
"ECL-2.0",
"Apache-2.0"
] | 56 | 2021-04-20T11:31:03.000Z | 2022-03-31T15:53:06.000Z | sdk/python/pulumi_yandex/compute_disk_placement_group.py | pulumi/pulumi-yandex | 559a0c82fd2b834bb5f1dc3abbf0dab689b13a3e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['ComputeDiskPlacementGroupArgs', 'ComputeDiskPlacementGroup']
@pulumi.input_type
class ComputeDiskPlacementGroupArgs:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
folder_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
zone: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ComputeDiskPlacementGroup resource.
:param pulumi.Input[str] description: A description of the Disk Placement Group.
:param pulumi.Input[str] folder_id: Folder that the resource belongs to. If value is omitted, the default provider folder is used.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: A set of key/value label pairs to assign to the Disk Placement Group.
:param pulumi.Input[str] name: The name of the Disk Placement Group.
:param pulumi.Input[str] zone: ID of the zone where the Disk Placement Group resides.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if folder_id is not None:
pulumi.set(__self__, "folder_id", folder_id)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if name is not None:
pulumi.set(__self__, "name", name)
if zone is not None:
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description of the Disk Placement Group.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="folderId")
def folder_id(self) -> Optional[pulumi.Input[str]]:
"""
Folder that the resource belongs to. If value is omitted, the default provider folder is used.
"""
return pulumi.get(self, "folder_id")
@folder_id.setter
def folder_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "folder_id", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A set of key/value label pairs to assign to the Disk Placement Group.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Disk Placement Group.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def zone(self) -> Optional[pulumi.Input[str]]:
"""
ID of the zone where the Disk Placement Group resides.
"""
return pulumi.get(self, "zone")
@zone.setter
def zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone", value)
@pulumi.input_type
class _ComputeDiskPlacementGroupState:
def __init__(__self__, *,
created_at: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
folder_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
zone: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ComputeDiskPlacementGroup resources.
:param pulumi.Input[str] description: A description of the Disk Placement Group.
:param pulumi.Input[str] folder_id: Folder that the resource belongs to. If value is omitted, the default provider folder is used.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: A set of key/value label pairs to assign to the Disk Placement Group.
:param pulumi.Input[str] name: The name of the Disk Placement Group.
:param pulumi.Input[str] status: Status of the Disk Placement Group.
:param pulumi.Input[str] zone: ID of the zone where the Disk Placement Group resides.
"""
if created_at is not None:
pulumi.set(__self__, "created_at", created_at)
if description is not None:
pulumi.set(__self__, "description", description)
if folder_id is not None:
pulumi.set(__self__, "folder_id", folder_id)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if name is not None:
pulumi.set(__self__, "name", name)
if status is not None:
pulumi.set(__self__, "status", status)
if zone is not None:
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "created_at")
@created_at.setter
def created_at(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "created_at", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description of the Disk Placement Group.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="folderId")
def folder_id(self) -> Optional[pulumi.Input[str]]:
"""
Folder that the resource belongs to. If value is omitted, the default provider folder is used.
"""
return pulumi.get(self, "folder_id")
@folder_id.setter
def folder_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "folder_id", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A set of key/value label pairs to assign to the Disk Placement Group.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Disk Placement Group.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
Status of the Disk Placement Group.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def zone(self) -> Optional[pulumi.Input[str]]:
"""
ID of the zone where the Disk Placement Group resides.
"""
return pulumi.get(self, "zone")
@zone.setter
def zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone", value)
class ComputeDiskPlacementGroup(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
folder_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
A Disk Placement Group resource. For more information, see
[the official documentation](https://cloud.yandex.com/docs/compute/concepts/disk#nr-disks).
## Example Usage
```python
import pulumi
import pulumi_yandex as yandex
group1 = yandex.ComputeDiskPlacementGroup("group1",
description="my description",
folder_id="abc*********123")
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A description of the Disk Placement Group.
:param pulumi.Input[str] folder_id: Folder that the resource belongs to. If value is omitted, the default provider folder is used.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: A set of key/value label pairs to assign to the Disk Placement Group.
:param pulumi.Input[str] name: The name of the Disk Placement Group.
:param pulumi.Input[str] zone: ID of the zone where the Disk Placement Group resides.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[ComputeDiskPlacementGroupArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A Disk Placement Group resource. For more information, see
[the official documentation](https://cloud.yandex.com/docs/compute/concepts/disk#nr-disks).
## Example Usage
```python
import pulumi
import pulumi_yandex as yandex
group1 = yandex.ComputeDiskPlacementGroup("group1",
description="my description",
folder_id="abc*********123")
```
:param str resource_name: The name of the resource.
:param ComputeDiskPlacementGroupArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ComputeDiskPlacementGroupArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
folder_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ComputeDiskPlacementGroupArgs.__new__(ComputeDiskPlacementGroupArgs)
__props__.__dict__["description"] = description
__props__.__dict__["folder_id"] = folder_id
__props__.__dict__["labels"] = labels
__props__.__dict__["name"] = name
__props__.__dict__["zone"] = zone
__props__.__dict__["created_at"] = None
__props__.__dict__["status"] = None
super(ComputeDiskPlacementGroup, __self__).__init__(
'yandex:index/computeDiskPlacementGroup:ComputeDiskPlacementGroup',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
created_at: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
folder_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
zone: Optional[pulumi.Input[str]] = None) -> 'ComputeDiskPlacementGroup':
"""
Get an existing ComputeDiskPlacementGroup resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A description of the Disk Placement Group.
:param pulumi.Input[str] folder_id: Folder that the resource belongs to. If value is omitted, the default provider folder is used.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: A set of key/value label pairs to assign to the Disk Placement Group.
:param pulumi.Input[str] name: The name of the Disk Placement Group.
:param pulumi.Input[str] status: Status of the Disk Placement Group.
:param pulumi.Input[str] zone: ID of the zone where the Disk Placement Group resides.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ComputeDiskPlacementGroupState.__new__(_ComputeDiskPlacementGroupState)
__props__.__dict__["created_at"] = created_at
__props__.__dict__["description"] = description
__props__.__dict__["folder_id"] = folder_id
__props__.__dict__["labels"] = labels
__props__.__dict__["name"] = name
__props__.__dict__["status"] = status
__props__.__dict__["zone"] = zone
return ComputeDiskPlacementGroup(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> pulumi.Output[str]:
return pulumi.get(self, "created_at")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
A description of the Disk Placement Group.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="folderId")
def folder_id(self) -> pulumi.Output[str]:
"""
Folder that the resource belongs to. If value is omitted, the default provider folder is used.
"""
return pulumi.get(self, "folder_id")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A set of key/value label pairs to assign to the Disk Placement Group.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the Disk Placement Group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
Status of the Disk Placement Group.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def zone(self) -> pulumi.Output[Optional[str]]:
"""
ID of the zone where the Disk Placement Group resides.
"""
return pulumi.get(self, "zone")
| 40.17402 | 138 | 0.630102 | 1,915 | 16,391 | 5.207833 | 0.08564 | 0.101474 | 0.108092 | 0.097062 | 0.788629 | 0.762759 | 0.736689 | 0.722551 | 0.717437 | 0.696079 | 0 | 0.000909 | 0.261851 | 16,391 | 407 | 139 | 40.272727 | 0.823374 | 0.294796 | 0 | 0.733051 | 1 | 0 | 0.069813 | 0.013418 | 0 | 0 | 0 | 0 | 0 | 1 | 0.161017 | false | 0.004237 | 0.021186 | 0.008475 | 0.279661 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1c3671bfc6332f3424ddea5eaeaa6013b8fc4c0b | 18,139 | py | Python | config/qtile/Managers/ScreenManager.py | dat-adi/Dotfiles | 7a541aba2bbdd88736bebc9e82f6921ab4a3e03b | [
"Apache-2.0"
] | 2 | 2021-05-06T15:58:29.000Z | 2021-10-02T14:12:08.000Z | config/qtile/Managers/ScreenManager.py | dat-adi/dotfiles | 7a541aba2bbdd88736bebc9e82f6921ab4a3e03b | [
"Apache-2.0"
] | null | null | null | config/qtile/Managers/ScreenManager.py | dat-adi/dotfiles | 7a541aba2bbdd88736bebc9e82f6921ab4a3e03b | [
"Apache-2.0"
] | null | null | null | # -*- coding:utf-8 -*-
import os
from libqtile import bar, layout, widget, hook, qtile
from libqtile.config import Click, Drag, Group, Key, KeyChord, Match, Screen
def get_two_screens(colors):
groupbox_defaults = dict(
margin_y=3,
margin_x=0,
padding_y=7,
padding_x=7,
borderwidth=3,
active=colors[2],
inactive=colors[7],
rounded=False,
highlight_color=colors[1],
highlight_method="line",
this_current_screen_border=colors[6],
this_screen_border=colors[4],
other_current_screen_border=colors[6],
other_screen_border=colors[4],
foreground=colors[2],
background=colors[0],
)
screens = [
Screen(
bottom=bar.Bar(
[
widget.CurrentLayoutIcon(
custom_icon_paths=[os.path.expanduser("~/.config/qtile/icons")],
foreground=colors[0],
background=colors[1],
padding=0,
scale=0.7,
),
widget.Sep(
linewidth=0,
padding=6,
foreground=colors[2],
background=colors[0],
),
widget.GroupBox(
visible_groups=["SYS"],
font="FiraCode Nerd Font", # ? using the font is vital for loading the icon
fontsize=15,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["DEV"],
font="FiraCode Nerd Font",
fontsize=17,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["WWW"],
font="Font Awesome 5 Free",
fontsize=25,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["DIS"],
font="FiraCode Nerd Font",
fontsize=16,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["TEAMS"],
font="FiraCode Nerd Font",
fontsize=17,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["DOC"],
font="Font Awesome 5 Free",
fontsize=20,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["MUS"],
font="Font Awesome 5 Free",
fontsize=27,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["VID"],
font="Font Awesome 5 Free",
fontsize=20,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["VBOX"],
font="Font Awesome 5 Free",
fontsize=20,
**groupbox_defaults
),
widget.Spacer(10),
# WindowName
widget.WindowName(
format=" {name}",
max_chars=80,
background=colors[0],
foreground=colors[6],
),
widget.Chord(
chords_colors={
"launch": ("#0000ff", "#ffffff"),
},
name_transform=lambda name: name.upper(),
),
widget.Systray(icon_size=16, background=colors[0], padding=5),
widget.Spacer(10),
# Backlight
widget.TextBox(
text="", padding=8, foreground=colors[3], fontsize=25
),
widget.Backlight(
foreground=colors[3],
change_command="light -S {0}",
backlight_name="intel_backlight",
),
widget.Spacer(10),
widget.CheckUpdates(
update_interval=1800,
distro="Arch",
display_format="{updates} Updates",
mouse_callbacks={
"Button1": lambda: qtile.cmd_spawn(
"alacritty -e sudo pacman -Syu"
)
},
foreground=colors[3],
),
widget.Spacer(10),
# Volume
widget.TextBox(
text="墳", foreground=colors[3], padding=6, fontsize=23
),
widget.Volume(foreground=colors[3]),
widget.Spacer(10),
# Time
widget.TextBox(
text="", fontsize=21, padding=6, foreground=colors[3]
),
widget.Clock(foreground=colors[3], format="%d-%m-%Y | %a %I:%M %p"),
widget.Spacer(10),
# CPU
widget.TextBox(
text="", fontsize=23, padding=8, foreground=colors[3]
),
widget.CPU(format="{load_percent}%", foreground=colors[3]),
widget.Spacer(10),
# Battery
widget.TextBox(text="", fontsize=14, foreground=colors[3]),
widget.Battery(
foreground=colors[3],
low_foreground="d08770",
format="{percent:2.0%}",
),
widget.Spacer(10),
],
24,
background=colors[0],
),
),
Screen(
bottom=bar.Bar(
[
widget.CurrentLayoutIcon(
custom_icon_paths=[os.path.expanduser("~/.config/qtile/icons")],
foreground=colors[0],
background=colors[1],
padding=0,
scale=0.7,
),
widget.Sep(
linewidth=0,
padding=6,
foreground=colors[2],
background=colors[0],
),
widget.GroupBox(
visible_groups=["SYS"],
font="FiraCode Nerd Font", # ? using the font is vital for loading the icon
fontsize=15,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["DEV"],
font="FiraCode Nerd Font",
fontsize=17,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["WWW"],
font="Font Awesome 5 Free",
fontsize=25,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["DIS"],
font="FiraCode Nerd Font",
fontsize=16,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["TEAMS"],
font="FiraCode Nerd Font",
fontsize=17,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["DOC"],
font="Font Awesome 5 Free",
fontsize=20,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["MUS"],
font="Font Awesome 5 Free",
fontsize=27,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["VID"],
font="Font Awesome 5 Free",
fontsize=20,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["VBOX"],
font="Font Awesome 5 Free",
fontsize=20,
**groupbox_defaults
),
widget.Spacer(10),
# WindowName
widget.WindowName(
format=" {name}",
max_chars=80,
background=colors[0],
foreground=colors[6],
),
widget.Chord(
chords_colors={
"launch": ("#0000ff", "#ffffff"),
},
name_transform=lambda name: name.upper(),
),
widget.Systray(icon_size=16, background=colors[0], padding=5),
widget.Spacer(10),
# Backlight
widget.TextBox(
text="", padding=8, foreground=colors[3], fontsize=25
),
widget.Backlight(
foreground=colors[3],
change_command="light -S {0}",
backlight_name="intel_backlight",
),
widget.Spacer(10),
# Volume
widget.TextBox(
text="墳", foreground=colors[3], padding=6, fontsize=23
),
widget.Volume(foreground=colors[3]),
widget.Spacer(10),
# Time
widget.TextBox(
text="", fontsize=21, padding=6, foreground=colors[3]
),
widget.Clock(foreground=colors[3], format="%d-%m-%Y | %a %I:%M %p"),
widget.Spacer(10),
# CPU
widget.TextBox(
text="", fontsize=23, padding=8, foreground=colors[3]
),
widget.CPU(format="{load_percent}%", foreground=colors[3]),
widget.Spacer(10),
# Battery
widget.TextBox(text="", fontsize=14, foreground=colors[3]),
widget.Battery(
foreground=colors[3],
low_foreground="d08770",
format="{percent:2.0%}",
),
widget.Spacer(10),
],
24,
background=colors[0],
),
),
]
return screens
def get_one_screens(colors):
groupbox_defaults = dict(
margin_y=3,
margin_x=0,
padding_y=5,
padding_x=7,
borderwidth=3,
active=colors[2],
inactive=colors[7],
rounded=False,
highlight_color=colors[1],
highlight_method="line",
this_current_screen_border=colors[6],
this_screen_border=colors[4],
other_current_screen_border=colors[6],
other_screen_border=colors[4],
foreground=colors[2],
background=colors[0],
)
screens = [
Screen(
top=bar.Bar(
[
widget.CurrentLayoutIcon(
custom_icon_paths=[os.path.expanduser("~/.config/qtile/icons")],
foreground=colors[0],
background=colors[1],
padding=0,
scale=0.7,
),
widget.Sep(
linewidth=0,
padding=6,
foreground=colors[2],
background=colors[0],
),
widget.GroupBox(
visible_groups=["SYS"],
font="FiraCode Nerd Font", # ? using the font is vital for loading the icon
fontsize=15,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["DEV"],
font="FiraCode Nerd Font",
fontsize=17,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["WWW"],
font="Font Awesome 5 Free",
fontsize=25,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["DIS"],
font="FiraCode Nerd Font",
fontsize=16,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["TEAMS"],
font="FiraCode Nerd Font",
fontsize=17,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["DOC"],
font="Font Awesome 5 Free",
fontsize=20,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["MUS"],
font="Font Awesome 5 Free",
fontsize=27,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["VID"],
font="Font Awesome 5 Free",
fontsize=20,
**groupbox_defaults
),
widget.GroupBox(
visible_groups=["VBOX"],
font="Font Awesome 5 Free",
fontsize=20,
**groupbox_defaults
),
widget.Spacer(500),
# WindowName
widget.WindowName(
format="{name}",
font="Ubuntu Mono Bold",
fontsize=10,
max_chars=80,
background=colors[0],
foreground=colors[6],
),
widget.Chord(
chords_colors={
"launch": ("#0000ff", "#ffffff"),
},
name_transform=lambda name: name.upper(),
),
widget.Systray(icon_size=16, background=colors[0], padding=5),
widget.Spacer(10),
# Backlight
widget.TextBox(
text="", padding=8, foreground=colors[3], fontsize=25
),
widget.Backlight(
foreground=colors[3],
change_command="light -S {0}",
backlight_name="intel_backlight",
),
widget.Spacer(10),
widget.CheckUpdates(
update_interval=1800,
distro="Arch",
display_format="{updates} Updates",
mouse_callbacks={
"Button1": lambda: qtile.cmd_spawn(
"alacritty -e sudo pacman -Syu"
)
},
foreground=colors[3],
),
widget.Spacer(10),
# Volume
widget.TextBox(
text="墳", foreground=colors[3], padding=6, fontsize=23
),
widget.Volume(foreground=colors[3]),
widget.Spacer(10),
# Time
widget.TextBox(
text="", fontsize=21, padding=6, foreground=colors[3]
),
widget.Clock(foreground=colors[3], format="%d-%m-%Y | %a %I:%M %p"),
widget.Spacer(10),
# CPU
widget.TextBox(
text="", fontsize=23, padding=8, foreground=colors[3]
),
widget.CPU(format="{load_percent}%", foreground=colors[3]),
widget.Spacer(10),
# Battery
widget.TextBox(text="", fontsize=14, foreground=colors[3]),
widget.Battery(
foreground=colors[3],
low_foreground="d08770",
format="{percent:2.0%}",
),
widget.Spacer(10),
],
24,
background=colors[0],
),
),
]
return screens
| 39.177106 | 100 | 0.364077 | 1,249 | 18,139 | 5.192154 | 0.136109 | 0.106091 | 0.083886 | 0.112413 | 0.967618 | 0.967618 | 0.967618 | 0.967618 | 0.967618 | 0.967618 | 0 | 0.039531 | 0.543966 | 18,139 | 462 | 101 | 39.261905 | 0.742747 | 0.016318 | 0 | 0.940367 | 0 | 0 | 0.064029 | 0.003535 | 0 | 0 | 0 | 0 | 0 | 1 | 0.004587 | false | 0 | 0.006881 | 0 | 0.016055 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c7465697c0153b004a072d7810f8a4473af2e2b9 | 34,779 | py | Python | test/test_transform.py | jormacmoo/ah | dde2f179d1ad70ededa491ba0472b38ca9ef758e | [
"RSA-MD"
] | null | null | null | test/test_transform.py | jormacmoo/ah | dde2f179d1ad70ededa491ba0472b38ca9ef758e | [
"RSA-MD"
] | 13 | 2021-07-07T13:06:58.000Z | 2021-07-30T04:17:18.000Z | test/test_transform.py | smith-tinkerlab/repytah | 12b5cbf4ef97fcdd36e6de355b570b5af400ca55 | [
"RSA-MD"
] | 3 | 2019-07-26T19:28:39.000Z | 2019-11-20T00:24:39.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Unit tests for Aligned Hierarchies, transform.py
"""
import unittest
import numpy as np
from repytah.transform import remove_overlaps
from repytah.transform import __create_anno_remove_overlaps \
as create_anno_remove_overlaps
from repytah.transform import __separate_anno_markers \
as separate_anno_markers
class TestTransform(unittest.TestCase):
def test_create_anno_remove_overlaps_single_row_input(self):
"""
Tests if __create_anno_remove_overlaps works with a single-row matrix.
"""
input_mat = np.array([2, 2, 4, 4, 1, 1])
song_length = 10
band_width = 1
expect_pattern_row = np.array([0, 1, 0, 1, 0, 0, 0, 0, 0, 0])
expect_k_lst_out = np.array([[2, 2, 4, 4, 1, 1]])
expect_overlaps_lst = np.array([])
output_tuple = create_anno_remove_overlaps(input_mat, song_length,
band_width)
self.assertTrue((output_tuple[0] == expect_pattern_row).all())
self.assertTrue((output_tuple[1] == expect_k_lst_out).all())
self.assertTrue((output_tuple[2] == expect_overlaps_lst).all())
def test_create_anno_remove_overlaps_small_input_overlaps_only(self):
"""
Tests if __create_anno_remove_overlaps works with a small matrix
containing only overlaps.
"""
input_mat = np.array([[1, 4, 11, 14, 4, 1],
[4, 7, 14, 17, 4, 1]])
song_length = 20
band_width = 4
expect_pattern_row = np.array(
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
)
expect_k_lst_out = np.array([])
expect_overlaps_lst = np.array([[1, 4, 11, 14, 4, 1],
[4, 7, 14, 17, 4, 2]])
output_tuple = create_anno_remove_overlaps(input_mat, song_length,
band_width)
self.assertTrue((output_tuple[0] == expect_pattern_row).all())
self.assertTrue((output_tuple[1] == expect_k_lst_out).all())
self.assertTrue((output_tuple[2] == expect_overlaps_lst).all())
def test_create_anno_remove_overlaps_large_input_no_overlaps(self):
"""
Tests if __create_anno_remove_overlaps works with a large matrix
containing no overlaps and having bandwidth larger than 1.
"""
input_mat = np.array([[2, 3, 8, 9, 2, 1],
[2, 3, 15, 16, 2, 1],
[8, 9, 15, 16, 2, 1],
[3, 4, 9, 10, 2, 2],
[3, 4, 16, 17, 2, 2],
[9, 10, 16, 17, 2, 2],
[4, 5, 10, 11, 2, 3],
[4, 5, 17, 18, 2, 3],
[10, 11, 17, 18, 2, 3],
[7, 8, 14, 15, 2, 4],
[11, 12, 18, 19, 2, 5]])
song_length = 19
band_width = 2
expect_pattern_row = np.array(
[0, 1, 2, 3, 0, 0, 4, 1, 2, 3, 5, 0, 0, 4, 1, 2, 3, 5, 0]
)
expect_k_lst_out = np.array([[2, 3, 8, 9, 2, 1],
[2, 3, 15, 16, 2, 1],
[3, 4, 9, 10, 2, 2],
[3, 4, 16, 17, 2, 2],
[4, 5, 10, 11, 2, 3],
[4, 5, 17, 18, 2, 3],
[7, 8, 14, 15, 2, 4],
[8, 9, 15, 16, 2, 1],
[9, 10, 16, 17, 2, 2],
[10, 11, 17, 18, 2, 3],
[11, 12, 18, 19, 2, 5]])
expect_overlaps_lst = np.array([])
output_tuple = create_anno_remove_overlaps(input_mat, song_length,
band_width)
self.assertTrue((output_tuple[0] == expect_pattern_row).all())
self.assertTrue((output_tuple[1] == expect_k_lst_out).all())
self.assertTrue((output_tuple[2] == expect_overlaps_lst).all())
def test_create_anno_remove_overlaps_large_input_no_overlaps_bw_1(self):
"""
Tests if __create_anno_remove_overlaps works with a large matrix
containing no overlaps and having bandwidth equal to 1.
"""
input_mat = np.array([[8, 8, 14, 14, 1, 1],
[8, 8, 56, 56, 1, 1],
[8, 8, 62, 62, 1, 1],
[8, 8, 104, 104, 1, 1],
[8, 8, 110, 110, 1, 1],
[14, 14, 56, 56, 1, 1],
[14, 14, 62, 62, 1, 1],
[14, 14, 104, 104, 1, 1],
[14, 14, 110, 110, 1, 1],
[56, 56, 62, 62, 1, 1],
[56, 56, 104, 104, 1, 1],
[56, 56, 110, 110, 1, 1],
[62, 62, 104, 104, 1, 1],
[62, 62, 110, 110, 1, 1],
[104, 104, 110, 110, 1, 1]])
song_length = 119
band_width = 1
expect_pattern_row = np.array([
0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0
])
expect_k_lst_out = np.array([[8, 8, 14, 14, 1, 1],
[8, 8, 56, 56, 1, 1],
[8, 8, 62, 62, 1, 1],
[8, 8, 104, 104, 1, 1],
[8, 8, 110, 110, 1, 1],
[14, 14, 56, 56, 1, 1],
[14, 14, 62, 62, 1, 1],
[14, 14, 104, 104, 1, 1],
[14, 14, 110, 110, 1, 1],
[56, 56, 62, 62, 1, 1],
[56, 56, 104, 104, 1, 1],
[56, 56, 110, 110, 1, 1],
[62, 62, 104, 104, 1, 1],
[62, 62, 110, 110, 1, 1],
[104, 104, 110, 110, 1, 1]])
expect_overlaps_lst = np.array([])
output_tuple = create_anno_remove_overlaps(input_mat, song_length,
band_width)
self.assertTrue((output_tuple[0] == expect_pattern_row).all())
self.assertTrue((output_tuple[1] == expect_k_lst_out).all())
self.assertTrue((output_tuple[2] == expect_overlaps_lst).all())
def test_create_anno_remove_overlaps_wrong_bandwidth(self):
"""
Tests if __create_anno_remove_overlaps works with a matrix
when a non-existing bandwidth is given.
"""
input_mat = np.array([[2, 3, 8, 9, 2, 1],
[2, 3, 15, 16, 2, 1],
[8, 9, 15, 16, 2, 1],
[3, 4, 9, 10, 2, 2],
[3, 4, 16, 17, 2, 2],
[9, 10, 16, 17, 2, 2],
[4, 5, 10, 11, 2, 3],
[4, 5, 17, 18, 2, 3],
[10, 11, 17, 18, 2, 3],
[7, 8, 14, 15, 2, 4],
[11, 12, 18, 19, 2, 5]])
song_length = 19
band_width = 3
expect_pattern_row = np.array(
[0, 1, 2, 3, 0, 0, 4, 1, 2, 3, 5, 0, 0, 4, 1, 2, 3, 5, 0]
)
expect_k_lst_out = np.array([[2, 3, 8, 9, 2, 1],
[2, 3, 15, 16, 2, 1],
[3, 4, 9, 10, 2, 2],
[3, 4, 16, 17, 2, 2],
[4, 5, 10, 11, 2, 3],
[4, 5, 17, 18, 2, 3],
[7, 8, 14, 15, 2, 4],
[8, 9, 15, 16, 2, 1],
[9, 10, 16, 17, 2, 2],
[10, 11, 17, 18, 2, 3],
[11, 12, 18, 19, 2, 5]])
expect_overlaps_lst = np.array([])
output_tuple = create_anno_remove_overlaps(input_mat, song_length,
band_width)
self.assertTrue((output_tuple[0] == expect_pattern_row).all())
self.assertTrue((output_tuple[1] == expect_k_lst_out).all())
self.assertTrue((output_tuple[2] == expect_overlaps_lst).all())
def test_create_anno_remove_overlaps_some_overlaps(self):
"""
Tests if __create_anno_remove_overlaps works with a matrix
containing both overlapping and non-overlapping repeats.
"""
input_mat = np.array([[2, 3, 8, 9, 2, 1],
[2, 3, 15, 16, 2, 1],
[8, 9, 15, 16, 2, 1],
[3, 4, 9, 10, 2, 1],
[3, 4, 16, 17, 2, 1],
[9, 10, 16, 17, 2, 1],
[4, 5, 10, 11, 2, 2],
[4, 5, 17, 18, 2, 2],
[10, 11, 17, 18, 2, 2],
[7, 8, 14, 15, 2, 3],
[11, 12, 18, 19, 2, 4]])
song_length = 19
band_width = 2
expect_pattern_row = np.array(
[0, 0, 0, 1, 0, 0, 2, 0, 0, 1, 3, 0, 0, 2, 0, 0, 1, 3, 0]
)
expect_k_lst_out = np.array([[4, 5, 10, 11, 2, 1],
[4, 5, 17, 18, 2, 1],
[7, 8, 14, 15, 2, 2],
[10, 11, 17, 18, 2, 1],
[11, 12, 18, 19, 2, 3]])
expect_overlaps_lst = np.array([[2, 3, 8, 9, 2, 1],
[2, 3, 15, 16, 2, 1],
[8, 9, 15, 16, 2, 1],
[3, 4, 9, 10, 2, 2],
[3, 4, 16, 17, 2, 2],
[9, 10, 16, 17, 2, 2]])
output_tuple = create_anno_remove_overlaps(input_mat, song_length,
band_width)
self.assertTrue((output_tuple[0] == expect_pattern_row).all())
self.assertTrue((output_tuple[1] == expect_k_lst_out).all())
self.assertTrue((output_tuple[2] == expect_overlaps_lst).all())
def test_create_anno_remove_overlaps_skipped_anno_small_input(self):
"""
Tests that step 2 of __create_anno_remove_overlaps is able to check
whether the annotation has a repeat associated to it for a small
matrix.
"""
input_mat = np.array([[2, 2, 8, 8, 1, 0],
[2, 2, 10, 10, 1, 1],
[3, 3, 4, 4, 1, 2],
[3, 3, 6, 6, 1, 2]])
song_length = 10
band_width = 1
expect_pattern_row = np.array([0, 1, 2, 2, 0, 2, 0, 0, 0, 1])
expect_k_lst_out = np.array([[2, 2, 8, 8, 1, 0],
[2, 2, 10, 10, 1, 1],
[3, 3, 4, 4, 1, 2],
[3, 3, 6, 6, 1, 2]])
expect_overlaps_lst = np.array([])
output_tuple = create_anno_remove_overlaps(input_mat, song_length,
band_width)
self.assertTrue((output_tuple[0] == expect_pattern_row).all())
self.assertTrue((output_tuple[1] == expect_k_lst_out).all())
self.assertTrue((output_tuple[2] == expect_overlaps_lst).all())
def test_create_anno_remove_overlaps_skipped_anno_large_input(self):
"""
Tests if step 2 of __create_anno_remove_overlaps is able to check
whether the annotation has a repeat associated to it for a large
matrix.
"""
input_mat = np.array([[2, 2, 8, 8, 1, 1],
[2, 2, 15, 15, 1, 1],
[8, 8, 15, 15, 1, 1],
[3, 3, 5, 5, 1, 2],
[3, 3, 9, 9, 1, 2],
[5, 5, 9, 9, 1, 2],
[3, 3, 11, 11, 1, 2],
[5, 5, 11, 11, 1, 2],
[9, 9, 11, 11, 1, 2],
[3, 3, 16, 16, 1, 2],
[5, 5, 16, 16, 1, 2],
[9, 9, 16, 16, 1, 2],
[11, 11, 16, 16, 1, 2],
[3, 3, 18, 18, 1, 2],
[5, 5, 18, 18, 1, 2],
[9, 9, 18, 18, 1, 2],
[11, 11, 18, 18, 1, 2],
[16, 16, 18, 18, 1, 2],
[4, 4, 10, 10, 1, 3],
[4, 4, 17, 17, 1, 3],
[10, 10, 17, 17, 1, 3],
[7, 7, 14, 14, 1, 4],
[12, 12, 19, 19, 1, 6],
[2, 2, 12, 12, 1, 6]])
song_length = 19
band_width = 1
expect_pattern_row = np.array(
[0, 5, 2, 3, 2, 0, 4, 1, 2, 3, 2, 5, 0, 4, 1, 2, 3, 2, 5]
)
expect_k_lst_out = np.array([[2, 2, 8, 8, 1, 1],
[2, 2, 12, 12, 1, 5],
[2, 2, 15, 15, 1, 1],
[3, 3, 5, 5, 1, 2],
[3, 3, 9, 9, 1, 2],
[3, 3, 11, 11, 1, 2],
[3, 3, 16, 16, 1, 2],
[3, 3, 18, 18, 1, 2],
[4, 4, 10, 10, 1, 3],
[4, 4, 17, 17, 1, 3],
[5, 5, 9, 9, 1, 2],
[5, 5, 11, 11, 1, 2],
[5, 5, 16, 16, 1, 2],
[5, 5, 18, 18, 1, 2],
[7, 7, 14, 14, 1, 4],
[8, 8, 15, 15, 1, 1],
[9, 9, 11, 11, 1, 2],
[9, 9, 16, 16, 1, 2],
[9, 9, 18, 18, 1, 2],
[10, 10, 17, 17, 1, 3],
[11, 11, 16, 16, 1, 2],
[11, 11, 18, 18, 1, 2],
[12, 12, 19, 19, 1, 5],
[16, 16, 18, 18, 1, 2]])
expect_overlaps_lst = np.array([])
output_tuple = create_anno_remove_overlaps(input_mat, song_length,
band_width)
self.assertTrue((output_tuple[0] == expect_pattern_row).all())
self.assertTrue((output_tuple[1] == expect_k_lst_out).all())
self.assertTrue((output_tuple[2] == expect_overlaps_lst).all())
def test_separate_anno_markers_single_row_input(self):
"""
Tests if __separate_anno_markers works with a single-row matrix.
"""
k_mat = np.array([[7, 12, 14, 19, 6, 1]])
song_length = 19
band_width = 6
pattern_row = np.array(
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
)
expect_pattern_mat = np.array(
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
)
expect_pattern_key = np.array([6])
expect_anno_id_lst = np.array([[1]])
output_tuple = separate_anno_markers(k_mat, song_length, band_width,
pattern_row)
self.assertTrue((output_tuple[0] == expect_pattern_mat).all())
self.assertTrue((output_tuple[1] == expect_pattern_key).all())
self.assertTrue((output_tuple[2] == expect_anno_id_lst).all())
def test_separate_anno_markers_small_input(self):
"""
Tests if __separate_anno_markers works with a small matrix.
"""
k_mat = np.array([[3, 3, 9, 9, 1, 1],
[3, 3, 15, 15, 1, 1],
[5, 5, 12, 12, 1, 2]])
song_length = 19
band_width = 1
pattern_row = np.array(
[0, 0, 1, 0, 2, 0, 0, 0, 1, 0, 0, 2, 0, 0, 1, 0, 0, 0, 0]
)
expect_pattern_mat = np.array([
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]
])
expect_pattern_key = np.array([[1],
[1]])
expect_anno_id_lst = np.array([[1],
[2]])
output_tuple = separate_anno_markers(k_mat, song_length, band_width,
pattern_row)
self.assertTrue((output_tuple[0] == expect_pattern_mat).all())
self.assertTrue((output_tuple[1] == expect_pattern_key).all())
self.assertTrue((output_tuple[2] == expect_anno_id_lst).all())
def test_separate_anno_markers_large_input(self):
"""
Tests if __separate_anno_markers works with a large matrix.
"""
k_mat = np.array([[2, 2, 8, 8, 1, 1],
[2, 2, 15, 15, 1, 1],
[3, 3, 5, 5, 1, 2],
[3, 3, 9, 9, 1, 2],
[3, 3, 11, 11, 1, 2],
[3, 3, 16, 16, 1, 2],
[3, 3, 18, 18, 1, 2],
[4, 4, 10, 10, 1, 3],
[4, 4, 17, 17, 1, 3],
[5, 5, 9, 9, 1, 2],
[5, 5, 11, 11, 1, 2],
[5, 5, 16, 16, 1, 2],
[5, 5, 18, 18, 1, 2],
[7, 7, 14, 14, 1, 4],
[8, 8, 15, 15, 1, 1],
[9, 9, 11, 11, 1, 2],
[9, 9, 16, 16, 1, 2],
[9, 9, 18, 18, 1, 2],
[10, 10, 17, 17, 1, 3],
[11, 11, 16, 16, 1, 2],
[11, 11, 18, 18, 1, 2],
[12, 12, 19, 19, 1, 5],
[16, 16, 18, 18, 1, 2]])
song_length = 19
band_width = 1
pattern_row = np.array(
[0, 1, 2, 3, 2, 0, 4, 1, 2, 3, 2, 5, 0, 4, 1, 2, 3, 2, 5]
)
expect_pattern_mat = np.array([
[0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1]
])
expect_pattern_key = np.array([[1],
[1],
[1],
[1],
[1]])
expect_anno_id_lst = np.array([[1],
[2],
[3],
[4],
[5]])
output_tuple = separate_anno_markers(k_mat, song_length, band_width,
pattern_row)
self.assertTrue((output_tuple[0] == expect_pattern_mat).all())
self.assertTrue((output_tuple[1] == expect_pattern_key).all())
self.assertTrue((output_tuple[2] == expect_anno_id_lst).all())
def test_remove_overlaps_small_input_with_overlaps(self):
"""
Tests if remove_overlaps works with a small matrix containing
overlaps.
"""
input_lst = np.array([[1, 4, 11, 14, 4, 1],
[4, 7, 14, 17, 4, 1],
[2, 3, 12, 13, 2, 1]])
song_length = 20
expect_lst_no_overlaps = np.array([[2, 3, 12, 13, 2, 1]])
expect_matrix_no_overlaps = np.array([
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]
])
expect_key_no_overlaps = np.array([2])
expect_annotations_no_overlaps = np.array([1])
expect_all_overlap_lst = np.array([[1, 4, 11, 14, 4, 1],
[4, 7, 14, 17, 4, 2]])
output_tuple = remove_overlaps(input_lst, song_length)
self.assertTrue((output_tuple[0] == expect_lst_no_overlaps).all())
self.assertTrue((output_tuple[1] == expect_matrix_no_overlaps).all())
self.assertTrue((output_tuple[2] == expect_key_no_overlaps).all())
self.assertTrue((output_tuple[3] ==
expect_annotations_no_overlaps).all())
self.assertTrue((output_tuple[4] == expect_all_overlap_lst).all())
def test_remove_overlaps_small_input_without_overlaps(self):
"""
Tests if remove_overlaps works with a small matrix containing
no overlaps.
"""
input_lst = np.array([[1, 1, 10, 10, 1, 1],
[7, 7, 13, 13, 1, 1],
[3, 4, 17, 18, 2, 1]])
song_length = 20
expect_lst_no_overlaps = np.array([[1, 1, 10, 10, 1, 1],
[7, 7, 13, 13, 1, 1],
[3, 4, 17, 18, 2, 1]])
expect_matrix_no_overlaps = np.array([
[1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0]
])
expect_key_no_overlaps = np.array([1, 2])
expect_annotations_no_overlaps = np.array([1, 1])
expect_all_overlap_lst = np.empty([0, 6])
output_tuple = remove_overlaps(input_lst, song_length)
self.assertTrue((output_tuple[0] == expect_lst_no_overlaps).all())
self.assertTrue((output_tuple[1] == expect_matrix_no_overlaps).all())
self.assertTrue((output_tuple[2] == expect_key_no_overlaps).all())
self.assertTrue((output_tuple[3] ==
expect_annotations_no_overlaps).all())
self.assertTrue((output_tuple[4] == expect_all_overlap_lst).all())
def test_remove_overlaps_large_input_with_overlaps(self):
"""
Tests if remove_overlaps works with a large matrix containing
overlaps.
"""
input_lst = np.array([[1, 2, 8, 9, 2, 1],
[2, 3, 9, 10, 2, 1],
[1, 2, 2, 3, 2, 1],
[16, 17, 18, 19, 2, 1],
[16, 17, 9, 10, 2, 1],
[3, 4, 10, 11, 2, 2],
[4, 6, 11, 13, 3, 1],
[5, 7, 13, 15, 3, 1],
[5, 7, 11, 13, 3, 1]])
song_length = 20
expect_lst_no_overlaps = np.array([[3, 4, 10, 11, 2, 1]])
expect_matrix_no_overlaps = np.array([
[0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
])
expect_key_no_overlaps = np.array([2])
expect_annotations_no_overlaps = np.array([1])
expect_all_overlap_lst = np.array([[4, 6, 11, 13, 3, 1],
[5, 7, 11, 13, 3, 1],
[5, 7, 13, 15, 3, 1],
[1, 2, 2, 3, 2, 1],
[1, 2, 8, 9, 2, 1],
[2, 3, 9, 10, 2, 1],
[16, 17, 9, 10, 2, 1],
[16, 17, 18, 19, 2, 1]])
output_tuple = remove_overlaps(input_lst, song_length)
self.assertTrue((output_tuple[0] == expect_lst_no_overlaps).all())
self.assertTrue((output_tuple[1] == expect_matrix_no_overlaps).all())
self.assertTrue((output_tuple[2] == expect_key_no_overlaps).all())
self.assertTrue((output_tuple[3] ==
expect_annotations_no_overlaps).all())
self.assertTrue((output_tuple[4] == expect_all_overlap_lst).all())
def test_remove_overlaps_large_input_without_overlaps(self):
"""
Tests if remove_overlaps works with a large matrix containing
no overlaps.
"""
input_lst = np.array([[2, 2, 8, 8, 1, 1],
[2, 2, 15, 15, 1, 1],
[3, 3, 5, 5, 1, 2],
[3, 3, 9, 9, 1, 2],
[3, 3, 11, 11, 1, 2],
[3, 3, 16, 16, 1, 2],
[3, 3, 18, 18, 1, 2],
[4, 4, 10, 10, 1, 3],
[4, 4, 17, 17, 1, 3],
[5, 5, 9, 9, 1, 2],
[5, 5, 11, 11, 1, 2],
[5, 5, 16, 16, 1, 2],
[5, 5, 18, 18, 1, 2],
[7, 7, 14, 14, 1, 4],
[8, 8, 15, 15, 1, 1],
[9, 9, 11, 11, 1, 2],
[9, 9, 16, 16, 1, 2],
[9, 9, 18, 18, 1, 2],
[10, 10, 17, 17, 1, 3],
[11, 11, 16, 16, 1, 2],
[11, 11, 18, 18, 1, 2],
[12, 12, 19, 19, 1, 5],
[16, 16, 18, 18, 1, 2],
[2, 3, 8, 9, 2, 1],
[2, 3, 15, 16, 2, 1],
[3, 4, 9, 10, 2, 2],
[3, 4, 16, 17, 2, 2],
[4, 5, 10, 11, 2, 3],
[4, 5, 17, 18, 2, 3],
[7, 8, 14, 15, 2, 4],
[8, 9, 15, 16, 2, 1],
[9, 10, 16, 17, 2, 2],
[10, 11, 17, 18, 2, 3],
[11, 12, 18, 19, 2, 5],
[2, 4, 8, 10, 3, 1],
[2, 4, 15, 17, 3, 1],
[3, 5, 9, 11, 3, 2],
[3, 5, 16, 18, 3, 2],
[7, 9, 14, 16, 3, 3],
[8, 10, 15, 17, 3, 1],
[9, 11, 16, 18, 3, 2],
[10, 12, 17, 19, 3, 4],
[2, 5, 8, 11, 4, 1],
[2, 5, 15, 18, 4, 1],
[7, 10, 14, 17, 4, 2],
[8, 11, 15, 18, 4, 1],
[9, 12, 16, 19, 4, 3],
[7, 11, 14, 18, 5, 1],
[8, 12, 15, 19, 5, 2],
[7, 12, 14, 19, 6, 1]])
song_length = 20
expect_lst_no_overlaps = np.array([[2, 2, 8, 8, 1, 1],
[2, 2, 15, 15, 1, 1],
[3, 3, 5, 5, 1, 2],
[3, 3, 9, 9, 1, 2],
[3, 3, 11, 11, 1, 2],
[3, 3, 16, 16, 1, 2],
[3, 3, 18, 18, 1, 2],
[4, 4, 10, 10, 1, 3],
[4, 4, 17, 17, 1, 3],
[5, 5, 9, 9, 1, 2],
[5, 5, 11, 11, 1, 2],
[5, 5, 16, 16, 1, 2],
[5, 5, 18, 18, 1, 2],
[7, 7, 14, 14, 1, 4],
[8, 8, 15, 15, 1, 1],
[9, 9, 11, 11, 1, 2],
[9, 9, 16, 16, 1, 2],
[9, 9, 18, 18, 1, 2],
[10, 10, 17, 17, 1, 3],
[11, 11, 16, 16, 1, 2],
[11, 11, 18, 18, 1, 2],
[12, 12, 19, 19, 1, 5],
[16, 16, 18, 18, 1, 2],
[2, 3, 8, 9, 2, 1],
[2, 3, 15, 16, 2, 1],
[3, 4, 9, 10, 2, 2],
[3, 4, 16, 17, 2, 2],
[4, 5, 10, 11, 2, 3],
[4, 5, 17, 18, 2, 3],
[7, 8, 14, 15, 2, 4],
[8, 9, 15, 16, 2, 1],
[9, 10, 16, 17, 2, 2],
[10, 11, 17, 18, 2, 3],
[11, 12, 18, 19, 2, 5],
[2, 4, 8, 10, 3, 1],
[2, 4, 15, 17, 3, 1],
[3, 5, 9, 11, 3, 2],
[3, 5, 16, 18, 3, 2],
[7, 9, 14, 16, 3, 3],
[8, 10, 15, 17, 3, 1],
[9, 11, 16, 18, 3, 2],
[10, 12, 17, 19, 3, 4],
[2, 5, 8, 11, 4, 1],
[2, 5, 15, 18, 4, 1],
[7, 10, 14, 17, 4, 2],
[8, 11, 15, 18, 4, 1],
[9, 12, 16, 19, 4, 3],
[7, 11, 14, 18, 5, 1],
[8, 12, 15, 19, 5, 2],
[7, 12, 14, 19, 6, 1]])
expect_matrix_no_overlaps = np.array([
[0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0]
])
expect_key_no_overlaps = np.array(
[1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 5, 5, 6]
)
expect_annotations_no_overlaps = np.array(
[1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4, 1, 2, 3, 1, 2, 1]
)
expect_all_overlap_lst = np.empty([0, 6])
output_tuple = remove_overlaps(input_lst, song_length)
self.assertTrue((output_tuple[0] == expect_lst_no_overlaps).all())
self.assertTrue((output_tuple[1] == expect_matrix_no_overlaps).all())
self.assertTrue((output_tuple[2] == expect_key_no_overlaps).all())
self.assertTrue((output_tuple[3] ==
expect_annotations_no_overlaps).all())
self.assertTrue((output_tuple[4] == expect_all_overlap_lst).all())
if __name__ == '__main__':
unittest.main()
| 48.237171 | 78 | 0.334541 | 4,437 | 34,779 | 2.477124 | 0.028172 | 0.113002 | 0.140024 | 0.153216 | 0.951051 | 0.93158 | 0.909926 | 0.892094 | 0.862251 | 0.854426 | 0 | 0.224649 | 0.524771 | 34,779 | 720 | 79 | 48.304167 | 0.440344 | 0.042353 | 0 | 0.75485 | 0 | 0 | 0.000244 | 0 | 0 | 0 | 0 | 0 | 0.093474 | 1 | 0.026455 | false | 0 | 0.008818 | 0 | 0.037037 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
c78e80717509757cb183d46c91cf45ad0f57357d | 6,587 | py | Python | src/waldur_azure/tests/test_virtual_machines.py | opennode/nodeconductor-azure | de335758facf67465433be277af4ac3068f1f36b | [
"MIT"
] | 2 | 2017-08-16T07:25:23.000Z | 2017-12-01T18:11:53.000Z | src/waldur_azure/tests/test_virtual_machines.py | opennode/nodeconductor-azure | de335758facf67465433be277af4ac3068f1f36b | [
"MIT"
] | null | null | null | src/waldur_azure/tests/test_virtual_machines.py | opennode/nodeconductor-azure | de335758facf67465433be277af4ac3068f1f36b | [
"MIT"
] | 2 | 2017-09-24T03:14:03.000Z | 2018-08-12T07:44:25.000Z | from ddt import data, ddt
from rest_framework import test, status
from libcloud.compute.types import NodeState
import mock
from . import fixtures, factories
from .. import models
@ddt
class VirtualMachineRDPTest(test.APITransactionTestCase):
def setUp(self):
self.fixture = fixtures.AzureFixture()
def test_rdp_returns_xrdp_file_as_attachment(self):
self.client.force_authenticate(self.fixture.owner)
vm = self.fixture.virtual_machine
factories.InstanceEndpoint(instance=vm, name=models.InstanceEndpoint.Name.RDP)
url = factories.VirtualMachineFactory.get_url(vm, 'rdp')
response = self.client.get(url)
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('%s.rdp' % vm.name, response._headers['content-disposition'][1])
def test_rdp_is_not_available_if_backend_vm_does_not_have_a_port(self):
self.client.force_authenticate(self.fixture.owner)
vm = self.fixture.virtual_machine
url = factories.VirtualMachineFactory.get_url(vm, 'rdp')
response = self.client.get(url)
self.assertEquals(response.status_code, status.HTTP_404_NOT_FOUND)
def test_rdp_action_is_not_available_only_to_erred_or_instances(self):
self.client.force_authenticate(self.fixture.owner)
vm = self.fixture.virtual_machine
vm.state = vm.States.ERRED
vm.save()
url = factories.VirtualMachineFactory.get_url(vm, 'rdp')
response = self.client.get(url)
self.assertEquals(response.status_code, status.HTTP_409_CONFLICT)
@data(NodeState.STOPPED, NodeState.UNKNOWN, NodeState.ERROR, NodeState.STARTING)
def test_rdp_action_is_allowed_only_for_running_instances(self, runtime_state):
self.client.force_authenticate(self.fixture.owner)
vm = self.fixture.virtual_machine
vm.runtime_state = runtime_state
vm.save()
url = factories.VirtualMachineFactory.get_url(vm, 'rdp')
response = self.client.get(url)
self.assertEquals(response.status_code, status.HTTP_409_CONFLICT)
@ddt
class VirtualMachineStartTest(test.APITransactionTestCase):
def setUp(self):
self.fixture = fixtures.AzureFixture()
self.client.force_authenticate(self.fixture.owner)
@mock.patch('waldur_azure.executors.VirtualMachineStartExecutor.execute')
def test_stopped_machine_can_be_started(self, start_executor_mock):
vm = self.fixture.virtual_machine
vm.runtime_state = NodeState.STOPPED
vm.save()
url = factories.VirtualMachineFactory.get_url(vm, 'start')
response = self.client.post(url)
self.assertEquals(response.status_code, status.HTTP_202_ACCEPTED)
@mock.patch('waldur_azure.executors.VirtualMachineStartExecutor.execute')
def test_machine_in_ok_state_can_be_started(self, start_executor_mock):
vm = self.fixture.virtual_machine
vm.state = vm.States.OK
vm.runtime_state = NodeState.STOPPED
vm.save()
url = factories.VirtualMachineFactory.get_url(vm, 'start')
response = self.client.post(url)
self.assertEquals(response.status_code, status.HTTP_202_ACCEPTED)
@data(NodeState.RUNNING, NodeState.UNKNOWN, NodeState.ERROR, NodeState.STARTING)
@mock.patch('waldur_azure.executors.VirtualMachineStartExecutor.execute')
def test_only_stopped_machine_can_be_started(self, runtime_state, start_executor_mock):
vm = self.fixture.virtual_machine
vm.runtime_state = runtime_state
vm.save()
url = factories.VirtualMachineFactory.get_url(vm, 'start')
response = self.client.post(url)
self.assertEquals(response.status_code, status.HTTP_409_CONFLICT)
@data(models.VirtualMachine.States.DELETING, models.VirtualMachine.States.UPDATING,
models.VirtualMachine.States.DELETION_SCHEDULED, models.VirtualMachine.States.ERRED)
@mock.patch('waldur_azure.executors.VirtualMachineStartExecutor.execute')
def test_only_machine_in_ok_state_can_be_started(self, state, start_executor_mock):
vm = self.fixture.virtual_machine
vm.state = state
vm.save()
url = factories.VirtualMachineFactory.get_url(vm, 'start')
response = self.client.post(url)
self.assertEquals(response.status_code, status.HTTP_409_CONFLICT)
@ddt
class VirtualMachineStopTest(test.APITransactionTestCase):
def setUp(self):
self.fixture = fixtures.AzureFixture()
self.client.force_authenticate(self.fixture.owner)
@mock.patch('waldur_azure.executors.VirtualMachineStopExecutor.execute')
def test_running_machine_can_be_stopped(self, stop_executor_mock):
vm = self.fixture.virtual_machine
vm.runtime_state = NodeState.RUNNING
vm.save()
url = factories.VirtualMachineFactory.get_url(vm, 'stop')
response = self.client.post(url)
self.assertEquals(response.status_code, status.HTTP_202_ACCEPTED)
@mock.patch('waldur_azure.executors.VirtualMachineStopExecutor.execute')
def test_machine_in_ok_state_can_be_stopped(self, stop_executor_mock):
vm = self.fixture.virtual_machine
vm.state = vm.States.OK
vm.save()
url = factories.VirtualMachineFactory.get_url(vm, 'stop')
response = self.client.post(url)
self.assertEquals(response.status_code, status.HTTP_202_ACCEPTED)
@data(NodeState.STOPPED, NodeState.UNKNOWN, NodeState.ERROR, NodeState.STARTING)
@mock.patch('waldur_azure.executors.VirtualMachineStopExecutor.execute')
def test_only_running_machine_can_be_stopped(self, runtime_state, stop_executor_mock):
vm = self.fixture.virtual_machine
vm.runtime_state = runtime_state
vm.save()
url = factories.VirtualMachineFactory.get_url(vm, 'stop')
response = self.client.post(url)
self.assertEquals(response.status_code, status.HTTP_409_CONFLICT)
@data(models.VirtualMachine.States.DELETING, models.VirtualMachine.States.UPDATING,
models.VirtualMachine.States.DELETION_SCHEDULED, models.VirtualMachine.States.ERRED)
@mock.patch('waldur_azure.executors.VirtualMachineStopExecutor.execute')
def test_only_machine_in_ok_state_can_be_stopped(self, state, stop_executor_mock):
vm = self.fixture.virtual_machine
vm.state = state
vm.save()
url = factories.VirtualMachineFactory.get_url(vm, 'stop')
response = self.client.post(url)
self.assertEquals(response.status_code, status.HTTP_409_CONFLICT)
| 38.976331 | 94 | 0.73387 | 783 | 6,587 | 5.924649 | 0.139208 | 0.049795 | 0.033628 | 0.051735 | 0.879284 | 0.871524 | 0.852554 | 0.852554 | 0.847812 | 0.773227 | 0 | 0.006782 | 0.171702 | 6,587 | 168 | 95 | 39.208333 | 0.843475 | 0 | 0 | 0.770492 | 0 | 0 | 0.080917 | 0.069835 | 0 | 0 | 0 | 0 | 0.106557 | 1 | 0.122951 | false | 0 | 0.04918 | 0 | 0.196721 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c78fc1fe4b7d319ceabc75600bbe6a9546de6cc6 | 5,001 | py | Python | tests/TotalHostCalcGUITest.py | dwitt006/SubnetCalculator | 22099c73d829b63d0afcd4c78e4d72fa2f24359e | [
"MIT"
] | 1 | 2017-06-05T15:52:10.000Z | 2017-06-05T15:52:10.000Z | tests/TotalHostCalcGUITest.py | dwitt006/SubnetCalculator | 22099c73d829b63d0afcd4c78e4d72fa2f24359e | [
"MIT"
] | null | null | null | tests/TotalHostCalcGUITest.py | dwitt006/SubnetCalculator | 22099c73d829b63d0afcd4c78e4d72fa2f24359e | [
"MIT"
] | null | null | null | # Title: Subnet Calculator: Total Host Count Calculator GUI Testing
# Date: 04-28-17
import unittest
from SubnetCalculator.TotalHostCalcGUI import TotalHostCalcGUI
from SubnetCalculator.TotalHostCalcGUI import exponent
class TotalHostCalcGUITest(unittest.TestCase):
# Test exponent
def test_exponent(self):
self.assertEqual('2' + exponent('123'), '2¹²³')
# Test clearing entries and learning steps
def test_clear_and_reset(self):
test_gui = TotalHostCalcGUI()
test_gui.cidr_entry.insert(0, '152.2.136.0/26')
test_gui.calculate.invoke()
test_gui.clear_and_reset()
self.assertEqual(test_gui.learning_steps.winfo_children(), [])
self.assertEqual(test_gui.cidr_entry.get(), '')
# Test clearing entries and learning steps from separate button
def test_clear_and_reset_calculate(self):
test_gui = TotalHostCalcGUI()
test_gui.cidr_entry.insert(0, '152.2.136.0/26')
test_gui.calculate.invoke()
test_gui.calculate2.invoke()
self.assertEqual(test_gui.learning_steps.winfo_children(), [])
self.assertEqual(test_gui.cidr_entry.get(), '')
# Test total host count calculator using cidr: get_count_from_cidr(self)
def test_count_cidr(self):
test_gui = TotalHostCalcGUI()
test_gui.cidr_entry.insert(0, '152.2.136.0/26')
test_gui.calculate.invoke()
self.assertEqual(test_gui.learning_steps.winfo_children()[0].cget('text'), 'Step 1:')
self.assertEqual(test_gui.learning_steps.winfo_children()[1].cget('text'), 'CIDR Address: 152.2.136.0/26')
self.assertEqual(test_gui.learning_steps.winfo_children()[2].cget('text'), 'Step 2:')
self.assertEqual(test_gui.learning_steps.winfo_children()[3].cget('text'), 'CIDR: 26')
self.assertEqual(test_gui.learning_steps.winfo_children()[4].cget('text'), 'Step 3:')
self.assertEqual(test_gui.learning_steps.winfo_children()[5].cget('text'), 'Host Bits: 32 - 26 = 6')
self.assertEqual(test_gui.learning_steps.winfo_children()[6].cget('text'), 'Step 4:')
self.assertEqual(test_gui.learning_steps.winfo_children()[7].cget('text'),
'Total Host Count: 2⁶ = 64')
# Test total host count calculator using netmask: get_count_from_netmask(self)
def test_count_netmask(self):
test_gui = TotalHostCalcGUI()
test_gui.netmask_entry.insert(0, '255.255.255.192')
test_gui.calculate2.invoke()
self.assertEqual(test_gui.learning_steps.winfo_children()[0].cget('text'), 'Step 1:')
self.assertEqual(test_gui.learning_steps.winfo_children()[1].cget('text'), 'Netmask: 255.255.255.192')
self.assertEqual(test_gui.learning_steps.winfo_children()[2].cget('text'), 'Step 2:')
self.assertEqual(test_gui.learning_steps.winfo_children()[3].cget('text'),
'Netmask Binary: 11111111.11111111.11111111.11000000')
self.assertEqual(test_gui.learning_steps.winfo_children()[4].cget('text'), 'Step 3:')
self.assertEqual(test_gui.learning_steps.winfo_children()[5].cget('text'), 'Host Bits: 6')
self.assertEqual(test_gui.learning_steps.winfo_children()[6].cget('text'), 'Step 4:')
self.assertEqual(test_gui.learning_steps.winfo_children()[7].cget('text'),
'Total Host Count: 2⁶ = 64')
# Test total host count calculator using IP range: get_count_from_ip_range(self)
def test_count_ip_range(self):
test_gui = TotalHostCalcGUI()
test_gui.ip_range_entry.insert(0, '152.2.136.1-152.2.136.62')
test_gui.calculate3.invoke()
self.assertEqual(test_gui.learning_steps.winfo_children()[0].cget('text'), 'Step 1:')
self.assertEqual(test_gui.learning_steps.winfo_children()[1].cget('text'),
'Assignable IP Range: 152.2.136.1-152.2.136.62')
self.assertEqual(test_gui.learning_steps.winfo_children()[2].cget('text'), 'Step 2:')
self.assertEqual(test_gui.learning_steps.winfo_children()[3].cget('text'),
'Front: 10011000.00000010.10001000.00000001')
self.assertEqual(test_gui.learning_steps.winfo_children()[4].cget('text'),
' Back: 10011000.00000010.10001000.00111110')
self.assertEqual(test_gui.learning_steps.winfo_children()[5].cget('text'), 'Step 3:')
self.assertEqual(test_gui.learning_steps.winfo_children()[6].cget('text'),
'Comparison: 11111111.11111111.11111111.11000000')
self.assertEqual(test_gui.learning_steps.winfo_children()[7].cget('text'), 'Step 4:')
self.assertEqual(test_gui.learning_steps.winfo_children()[8].cget('text'), 'Host Bits: 6')
self.assertEqual(test_gui.learning_steps.winfo_children()[9].cget('text'), 'Step 5:')
self.assertEqual(test_gui.learning_steps.winfo_children()[10].cget('text'), 'Total Host Count: 2⁶ = 64')
if __name__ == '__main__':
unittest.main()
| 56.191011 | 114 | 0.679664 | 655 | 5,001 | 4.961832 | 0.135878 | 0.103385 | 0.181231 | 0.209846 | 0.780615 | 0.765846 | 0.712 | 0.704 | 0.678769 | 0.677538 | 0 | 0.075206 | 0.175765 | 5,001 | 88 | 115 | 56.829545 | 0.713246 | 0.086583 | 0 | 0.485294 | 0 | 0 | 0.15442 | 0.041237 | 0 | 0 | 0 | 0 | 0.470588 | 1 | 0.088235 | false | 0 | 0.044118 | 0 | 0.147059 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c7b02a0fac24b3917acc853bf9010be797af5eb9 | 26,724 | py | Python | tests/unit/test_endpoint.py | nordic-institute/X-Road-Security-Server-toolkit | 1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb | [
"MIT"
] | 7 | 2020-11-01T19:50:11.000Z | 2022-01-18T17:45:19.000Z | tests/unit/test_endpoint.py | nordic-institute/X-Road-Security-Server-toolkit | 1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb | [
"MIT"
] | 24 | 2020-11-09T08:09:10.000Z | 2021-06-16T07:22:14.000Z | tests/unit/test_endpoint.py | nordic-institute/X-Road-Security-Server-toolkit | 1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb | [
"MIT"
] | 1 | 2021-04-27T14:39:48.000Z | 2021-04-27T14:39:48.000Z | import copy
import sys
import unittest
from argparse import Namespace
from datetime import datetime
from unittest import mock
import pytest
from tests.util.test_util import StatusTestData
from xrdsst.controllers.endpoint import EndpointController, EndpointListMapper, EndpointAccessListMapper
from xrdsst.main import XRDSSTTest
from xrdsst.models import Client, ConnectionType, ClientStatus, ServiceDescription, ServiceType, ServiceClient, ServiceClientType, Service, Endpoint
from xrdsst.rest.rest import ApiException
class EndpointTestData:
add_description_response = ServiceDescription(
id='DEV:GOV:9876:SUB1',
url='https://openapi3',
type=ServiceType.OPENAPI3,
disabled=True,
disabled_notice='',
refreshed_at='2021-01-01T09:10:00',
services=[Service(id='DEV:GOV:9876:SUB1:Petstore',
full_service_code='DEV:GOV:9876:SUB1:Petstore',
service_code='Petstore',
timeout=60,
title='title',
ssl_auth=False,
subjects_count=0,
url='url',
endpoints=[Endpoint(id="1", service_code='Petstore', path='/testPath', method='POST')])],
client_id='DEV:GOV:9876:SUB1'
)
add_access_response = [ServiceClient(id='DEV:security-server-owners',
name='Security server owners',
local_group_code=None,
service_client_type='GLOBALGROUP',
rights_given_at=None)]
get_endpoint = Endpoint(
id='1',
service_code="Test",
method="PUT",
path="/testPath",
generated=False
)
class TestEndpoint(unittest.TestCase):
ss_config = {
'admin_credentials': 'user:pass',
'logging': {'file': '/tmp/xrdsst_test_token_log', 'level': 'INFO'},
'ssh_access': {'user': 'user', 'private_key': 'key'},
'security_server':
[{'name': 'ssX',
'url': 'https://non.existing.url.blah:8999/api/v1',
'api_key': '33333333-3000-4000-b000-939393939393',
'api_key_url': 'https://localhost:4000/api/v1/api-keys',
'clients': [
{
'member_class': 'GOV',
'member_code': '9876',
'subsystem_code': 'SUB1',
'member_name': 'NIIS',
'connection_type': 'HTTP',
'service_descriptions': [{
'url': 'https://openapi3',
'rest_service_code': 'RestService',
'type': 'OPENAPI3',
'access': ['SUB1'],
'url_all': False,
'timeout_all': 60,
'ssl_auth_all': False,
'services': [
{
'service_code': 'Petstore',
'access': ['SUB1'],
'timeout': 120,
'ssl_auth': True,
'url': 'http://petstore.swagger.io/v1'
}
],
'endpoints': [{
'path': '/testPath',
'method': 'POST',
'access': ['DEV:security-server-owners']
}]
}]
}
]}]}
@pytest.fixture(autouse=True)
def capsys(self, capsys):
self.capsys = capsys
def test_endpoint_add(self):
with XRDSSTTest() as app:
with mock.patch('xrdsst.api.clients_api.ClientsApi.find_clients', return_value=[Client(
id='DEV:GOV:9876:SUB1',
instance_id='DEV',
member_class='GOV',
member_code='9876',
subsystem_code='SUB1',
connection_type=ConnectionType.HTTP,
status=ClientStatus.REGISTERED,
owner=True,
has_valid_local_sign_cert=True
)]):
with mock.patch('xrdsst.api.clients_api.ClientsApi.get_client_service_descriptions',
return_value=[EndpointTestData.add_description_response]):
with mock.patch(
'xrdsst.api.services_api.ServicesApi.add_endpoint',
return_value=EndpointTestData.add_description_response):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.add()
out, err = self.capsys.readouterr()
assert out.count("Added service endpoint") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
@pytest.fixture(autouse=True)
def capsys(self, capsys):
self.capsys = capsys
def test_endpoint_already_added(self):
class AlreadyEnabledResponse:
status = 409
data = '{"status":409,"error":{"code":"service_endpoint_already_enabled"}}'
reason = None
def getheaders(self): return None
with XRDSSTTest() as app:
with mock.patch('xrdsst.api.clients_api.ClientsApi.find_clients', return_value=[Client(
id='DEV:GOV:9876:SUB1',
instance_id='DEV',
member_class='GOV',
member_code='9876',
subsystem_code='SUB1',
connection_type=ConnectionType.HTTP,
status=ClientStatus.REGISTERED,
owner=True,
has_valid_local_sign_cert=True
)]):
with mock.patch('xrdsst.api.clients_api.ClientsApi.get_client_service_descriptions',
return_value=[EndpointTestData.add_description_response]):
with mock.patch(
'xrdsst.api.services_api.ServicesApi.add_endpoint',
side_effect=ApiException(http_resp=AlreadyEnabledResponse())):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.add()
out, err = self.capsys.readouterr()
assert out.count("already added") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
def test_endpoint_add_access(self):
with XRDSSTTest() as app:
with mock.patch('xrdsst.api.clients_api.ClientsApi.find_clients', return_value=[Client(
id='DEV:GOV:9876:SUB1',
instance_id='DEV',
member_class='GOV',
member_code='9876',
subsystem_code='SUB1',
connection_type=ConnectionType.HTTP,
status=ClientStatus.REGISTERED,
owner=True,
has_valid_local_sign_cert=True
)]):
with mock.patch('xrdsst.api.clients_api.ClientsApi.get_client_service_descriptions',
return_value=[EndpointTestData.add_description_response]):
with mock.patch('xrdsst.api.clients_api.ClientsApi.find_service_client_candidates',
return_value=[ServiceClient(
id='DEV:security-server-owners',
name='Security server owners',
local_group_code=None,
service_client_type=ServiceClientType.GLOBALGROUP,
rights_given_at=datetime.now().isoformat())]):
with mock.patch(
'xrdsst.api.endpoints_api.EndpointsApi.add_endpoint_service_clients',
return_value=EndpointTestData.add_access_response):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.add_access()
out, err = self.capsys.readouterr()
assert out.count("Added client access rights") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
def test_endpoint_add_access_not_valid_candidate(self):
with XRDSSTTest() as app:
with mock.patch('xrdsst.api.clients_api.ClientsApi.find_clients', return_value=[Client(
id='DEV:GOV:9876:SUB1',
instance_id='DEV',
member_class='GOV',
member_code='9876',
subsystem_code='SUB1',
connection_type=ConnectionType.HTTP,
status=ClientStatus.REGISTERED,
owner=True,
has_valid_local_sign_cert=True
)]):
with mock.patch('xrdsst.api.clients_api.ClientsApi.get_client_service_descriptions',
return_value=[EndpointTestData.add_description_response]):
with mock.patch('xrdsst.api.clients_api.ClientsApi.find_service_client_candidates',
return_value=[]):
with mock.patch(
'xrdsst.api.endpoints_api.EndpointsApi.add_endpoint_service_clients',
return_value=EndpointTestData.add_access_response):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.add_access()
out, err = self.capsys.readouterr()
assert out.count("no valid candidate found") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
def test_endpoint_add_access_endpoint_not_found(self):
with XRDSSTTest() as app:
with mock.patch('xrdsst.api.clients_api.ClientsApi.find_clients', return_value=[Client(
id='DEV:GOV:9876:SUB1',
instance_id='DEV',
member_class='GOV',
member_code='9876',
subsystem_code='SUB1',
connection_type=ConnectionType.HTTP,
status=ClientStatus.REGISTERED,
owner=True,
has_valid_local_sign_cert=True
)]):
service_description = copy.deepcopy(EndpointTestData.add_description_response)
service_description.services[0].endpoints[0].method = 'GET'
with mock.patch('xrdsst.api.clients_api.ClientsApi.get_client_service_descriptions',
return_value=[service_description]):
with mock.patch('xrdsst.api.clients_api.ClientsApi.find_service_client_candidates',
return_value=[ServiceClient(
id='DEV:security-server-owners',
name='Security server owners',
local_group_code=None,
service_client_type=ServiceClientType.GLOBALGROUP,
rights_given_at=datetime.now().isoformat())]):
with mock.patch(
'xrdsst.api.endpoints_api.EndpointsApi.add_endpoint_service_clients',
return_value=EndpointTestData.add_access_response):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.add_access()
out, err = self.capsys.readouterr()
assert out.count("endpoint not found") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
def test_endpoint_add_access_already_added(self):
class AlreadyEnabledResponse:
status = 409
data = '{"status":409,"error":{"code":"service_endpoint_already_enabled"}}'
reason = None
def getheaders(self): return None
with XRDSSTTest() as app:
with mock.patch('xrdsst.api.clients_api.ClientsApi.find_clients', return_value=[Client(
id='DEV:GOV:9876:SUB1',
instance_id='DEV',
member_class='GOV',
member_code='9876',
subsystem_code='SUB1',
connection_type=ConnectionType.HTTP,
status=ClientStatus.REGISTERED,
owner=True,
has_valid_local_sign_cert=True
)]):
with mock.patch('xrdsst.api.clients_api.ClientsApi.get_client_service_descriptions',
return_value=[EndpointTestData.add_description_response]):
with mock.patch('xrdsst.api.clients_api.ClientsApi.find_service_client_candidates',
return_value=[ServiceClient(
id='DEV:security-server-owners',
name='Security server owners',
local_group_code=None,
service_client_type=ServiceClientType.GLOBALGROUP,
rights_given_at=datetime.now().isoformat())]):
with mock.patch(
'xrdsst.api.endpoints_api.EndpointsApi.add_endpoint_service_clients',
side_effect=ApiException(http_resp=AlreadyEnabledResponse())):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.add_access()
out, err = self.capsys.readouterr()
assert out.count("already added") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
def test_endpoint_list(self):
with XRDSSTTest() as app:
app._parsed_args = Namespace(ss='ssX', description='1')
with mock.patch('xrdsst.api.service_descriptions_api.ServiceDescriptionsApi.get_service_description',
return_value=EndpointTestData.add_description_response):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.list()
for header in EndpointListMapper.headers():
assert header in endpoint_controller.app._last_rendered[0][0]
assert endpoint_controller.app._last_rendered[0][1][0] == '1'
assert endpoint_controller.app._last_rendered[0][1][1] == 'POST'
assert endpoint_controller.app._last_rendered[0][1][2] == '/testPath'
assert endpoint_controller.app._last_rendered[0][1][3] == 'Petstore'
assert endpoint_controller.app._last_rendered[0][1][4] == 'DEV:GOV:9876:SUB1'
assert endpoint_controller.app._last_rendered[0][1][5] == 'https://openapi3'
assert endpoint_controller.app._last_rendered[0][1][6] == ServiceType.OPENAPI3
def test_endpoint_update(self):
with XRDSSTTest() as app:
app._parsed_args = Namespace(ss='ssX',
endpoint='1',
method='POST',
path='/testPath')
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.get_endpoint', return_value=EndpointTestData.get_endpoint):
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.update_endpoint',
return_value={}):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.update()
out, err = self.capsys.readouterr()
assert out.count("Updated endpoint ") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
def test_endpoint_update_endpoint_not_found(self):
class NotFoundResponse:
status = 404
data = '{"status":404}'
reason = None
def getheaders(self): return None
with XRDSSTTest() as app:
app._parsed_args = Namespace(ss='ssX',
endpoint='1',
method='POST',
path='/testPath')
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.get_endpoint', side_effect=ApiException(http_resp=NotFoundResponse())):
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.update_endpoint',
return_value={}):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.update()
out, err = self.capsys.readouterr()
assert out.count("Could not find an endpoint with id: '1' for security server: 'ssX'") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
def test_endpoint_is_generated(self):
with XRDSSTTest() as app:
app._parsed_args = Namespace(ss='ssX',
endpoint='1',
method='POST',
path='/testPath')
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.get_endpoint', return_value=Endpoint(
id=1,
service_code="Test",
method="PUT",
path="/testPath",
generated=True
)):
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.update_endpoint',
return_value={}):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.update()
out, err = self.capsys.readouterr()
assert out.count("could not update generated endpoints") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
def test_endpoint_delete(self):
with XRDSSTTest() as app:
app._parsed_args = Namespace(ss='ssX',
endpoint='1')
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.get_endpoint', return_value=EndpointTestData.get_endpoint):
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.delete_endpoint',
return_value={}):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.delete()
out, err = self.capsys.readouterr()
assert out.count("Deleted endpoint with id") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
def test_endpoint_delete_endpoint_not_found(self):
class NotFoundResponse:
status = 404
data = '{"status":404}'
reason = None
def getheaders(self): return None
with XRDSSTTest() as app:
app._parsed_args = Namespace(ss='ssX',
endpoint='1')
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.get_endpoint', side_effect=ApiException(http_resp=NotFoundResponse())):
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.delete_endpoint',
return_value={}):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.get_server_status = (lambda x, y: StatusTestData.server_status_essentials_complete)
endpoint_controller.delete()
out, err = self.capsys.readouterr()
assert out.count("Could not find an endpoint with id: '1' for security server: 'ssX'") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
def test_endpoint_list_access(self):
with XRDSSTTest() as app:
app._parsed_args = Namespace(ss='ssX', endpoint='1')
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.get_endpoint', return_value=EndpointTestData.get_endpoint):
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.get_endpoint_service_clients',
return_value=EndpointTestData.add_access_response):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.list_access()
for header in EndpointAccessListMapper.headers():
assert header in endpoint_controller.app._last_rendered[0][0]
assert endpoint_controller.app._last_rendered[0][1][0] == '1'
assert endpoint_controller.app._last_rendered[0][1][1] == 'PUT /testPath'
assert endpoint_controller.app._last_rendered[0][1][2] == 'Test'
assert endpoint_controller.app._last_rendered[0][1][3] == 'DEV:security-server-owners'
def test_endpoint_delete_access(self):
with XRDSSTTest() as app:
app._parsed_args = Namespace(ss='ssX', endpoint='1', access='DEV:security-server-owners')
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.get_endpoint', return_value=EndpointTestData.get_endpoint):
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.get_endpoint_service_clients',
return_value=EndpointTestData.add_access_response):
with mock.patch('xrdsst.api.endpoints_api.EndpointsApi.delete_endpoint_service_clients',
return_value={}):
endpoint_controller = EndpointController()
endpoint_controller.app = app
endpoint_controller.load_config = (lambda: self.ss_config)
endpoint_controller.delete_access()
out, err = self.capsys.readouterr()
assert out.count("Deleted access rights: '['DEV:security-server-owners']', endpoint id: '1', security server: 'ssX'") > 0
with self.capsys.disabled():
sys.stdout.write(out)
sys.stderr.write(err)
| 51.690522 | 148 | 0.528813 | 2,354 | 26,724 | 5.759558 | 0.091334 | 0.10621 | 0.036436 | 0.053253 | 0.85551 | 0.844004 | 0.840389 | 0.840389 | 0.831317 | 0.809264 | 0 | 0.015358 | 0.383588 | 26,724 | 516 | 149 | 51.790698 | 0.807685 | 0 | 0 | 0.725664 | 0 | 0.002212 | 0.155703 | 0.098825 | 0 | 0 | 0 | 0 | 0.05531 | 1 | 0.044248 | false | 0.002212 | 0.026549 | 0.00885 | 0.09292 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1bdd92c031676d5065731c3bd84ae2678e41f720 | 4,639 | py | Python | tests/models/test_inv_discoveryid.py | xUndero/noc | 9fb34627721149fcf7064860bd63887e38849131 | [
"BSD-3-Clause"
] | 1 | 2019-09-20T09:36:48.000Z | 2019-09-20T09:36:48.000Z | tests/models/test_inv_discoveryid.py | ewwwcha/noc | aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb | [
"BSD-3-Clause"
] | null | null | null | tests/models/test_inv_discoveryid.py | ewwwcha/noc | aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# ----------------------------------------------------------------------
# inv.DiscoveryID tests
# ----------------------------------------------------------------------
# Copyright (C) 2007-2018 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# Python modules
from __future__ import absolute_import
# Third-party modules
from builtins import str
import pytest
# NOC modules
from noc.core.mac import MAC
from noc.inv.models.discoveryid import DiscoveryID
@pytest.mark.parametrize(
"ranges,additional,result",
[
(
[{"first_chassis_mac": "01:02:03:25:6C:80", "last_chassis_mac": "01:02:03:25:6C:80"}],
[
"01:02:03:25:6C:8D",
"01:02:03:25:6C:8E",
"01:02:03:25:6C:8F",
"01:02:03:25:6C:8A",
"01:02:03:25:6C:8B",
"01:02:03:25:6C:8C",
"01:02:03:25:6C:99",
"01:02:03:25:6C:98",
"01:02:03:25:6C:93",
"01:02:03:25:6C:92",
"01:02:03:25:6C:91",
"01:02:03:25:6C:90",
"01:02:03:25:6C:97",
"01:02:03:25:6C:96",
"01:02:03:25:6C:95",
"01:02:03:25:6C:94",
"01:02:03:25:6C:9C",
"01:02:03:25:6C:9A",
"01:02:03:25:6C:C0",
"01:02:03:25:6C:88",
"01:02:03:25:6C:89",
"01:02:03:25:6C:84",
"01:02:03:25:6C:85",
"01:02:03:25:6C:86",
"01:02:03:25:6C:87",
"01:02:03:25:6C:81",
"01:02:03:25:6C:82",
"01:02:03:25:6C:83",
],
[
"01:02:03:25:6C:80",
"01:02:03:25:6C:81",
"01:02:03:25:6C:82",
"01:02:03:25:6C:83",
"01:02:03:25:6C:84",
"01:02:03:25:6C:85",
"01:02:03:25:6C:86",
"01:02:03:25:6C:87",
"01:02:03:25:6C:88",
"01:02:03:25:6C:89",
"01:02:03:25:6C:8A",
"01:02:03:25:6C:8B",
"01:02:03:25:6C:8C",
"01:02:03:25:6C:8D",
"01:02:03:25:6C:8E",
"01:02:03:25:6C:8F",
"01:02:03:25:6C:90",
"01:02:03:25:6C:91",
"01:02:03:25:6C:92",
"01:02:03:25:6C:93",
"01:02:03:25:6C:94",
"01:02:03:25:6C:95",
"01:02:03:25:6C:96",
"01:02:03:25:6C:97",
"01:02:03:25:6C:98",
"01:02:03:25:6C:99",
"01:02:03:25:6C:9A",
"01:02:03:25:6C:9C",
"01:02:03:25:6C:C0",
],
)
],
)
def test_macs_as_ints(ranges, additional, result):
r = [str(MAC(x)) for x in DiscoveryID._macs_as_ints(ranges, additional)]
assert r == result
@pytest.mark.parametrize(
"macs,result",
[
(
[
"01:02:03:25:6C:80",
"01:02:03:25:6C:81",
"01:02:03:25:6C:82",
"01:02:03:25:6C:83",
"01:02:03:25:6C:84",
"01:02:03:25:6C:85",
"01:02:03:25:6C:86",
"01:02:03:25:6C:87",
"01:02:03:25:6C:88",
"01:02:03:25:6C:89",
"01:02:03:25:6C:8A",
"01:02:03:25:6C:8B",
"01:02:03:25:6C:8C",
"01:02:03:25:6C:8D",
"01:02:03:25:6C:8E",
"01:02:03:25:6C:8F",
"01:02:03:25:6C:90",
"01:02:03:25:6C:91",
"01:02:03:25:6C:92",
"01:02:03:25:6C:93",
"01:02:03:25:6C:94",
"01:02:03:25:6C:95",
"01:02:03:25:6C:96",
"01:02:03:25:6C:97",
"01:02:03:25:6C:98",
"01:02:03:25:6C:99",
"01:02:03:25:6C:9A",
"01:02:03:25:6C:9C",
"01:02:03:25:6C:C0",
],
[
("01:02:03:25:6C:80", "01:02:03:25:6C:9A"),
("01:02:03:25:6C:9C", "01:02:03:25:6C:9C"),
("01:02:03:25:6C:C0", "01:02:03:25:6C:C0"),
],
)
],
)
def test_macs_to_ranges(macs, result):
imacs = [int(MAC(m)) for m in macs]
r = [(mr.first_mac, mr.last_mac) for mr in DiscoveryID._macs_to_ranges(imacs)]
assert r == result
| 32.669014 | 98 | 0.372063 | 686 | 4,639 | 2.482507 | 0.126822 | 0.220787 | 0.33118 | 0.441574 | 0.715208 | 0.687023 | 0.687023 | 0.687023 | 0.668233 | 0.65414 | 0 | 0.359147 | 0.393188 | 4,639 | 141 | 99 | 32.900709 | 0.245826 | 0.079112 | 0 | 0.774194 | 0 | 0 | 0.390988 | 0.005632 | 0 | 0 | 0 | 0 | 0.016129 | 1 | 0.016129 | false | 0 | 0.040323 | 0 | 0.056452 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4010778002edc1cb4a8b39a93b7af83d09502083 | 151 | py | Python | eod/plugins/yolox/models/__init__.py | Helicopt/EOD | b5db36f4ce267bf64d093b8174bde2c4097b4718 | [
"Apache-2.0"
] | 196 | 2021-10-30T05:15:36.000Z | 2022-03-30T18:43:40.000Z | eod/tasks/det/plugins/yolox/models/__init__.py | YZW-explorer/EOD | f10e64de86c0f356ebf5c7e923f4042eec4207b1 | [
"Apache-2.0"
] | 12 | 2021-10-30T11:33:28.000Z | 2022-03-31T14:22:58.000Z | eod/tasks/det/plugins/yolox/models/__init__.py | YZW-explorer/EOD | f10e64de86c0f356ebf5c7e923f4042eec4207b1 | [
"Apache-2.0"
] | 23 | 2021-11-01T07:26:17.000Z | 2022-03-27T05:55:37.000Z | from .backbone import * # noqa
from .head import * # noqa
from .neck import * # noqa
from .preprocess import * # noqa
from .postprocess import * # noqa | 30.2 | 33 | 0.708609 | 20 | 151 | 5.35 | 0.4 | 0.46729 | 0.523364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.192053 | 151 | 5 | 33 | 30.2 | 0.877049 | 0.15894 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
40130f96b9c7c156f16f3d48080d5210cb40effb | 165 | py | Python | Codewars/5kyu/memoized-fibonacci/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | 7 | 2017-09-20T16:40:39.000Z | 2021-08-31T18:15:08.000Z | Codewars/5kyu/memoized-fibonacci/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | Codewars/5kyu/memoized-fibonacci/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | # Python - 3.4.3
test.assert_equals(fibonacci(70), 190392490709135)
test.assert_equals(fibonacci(60), 1548008755920)
test.assert_equals(fibonacci(50), 12586269025)
| 27.5 | 50 | 0.8 | 22 | 165 | 5.863636 | 0.590909 | 0.232558 | 0.372093 | 0.581395 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.311688 | 0.066667 | 165 | 5 | 51 | 33 | 0.525974 | 0.084848 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
4013fb7c4d67e01e0b5704604f99d6116b4fff09 | 64,253 | py | Python | lrs/tests/StatementManagerTests.py | Sembian/ADL_LRS | 3535dad6371af3f9f5b67f7eabfd0f4a393e0d62 | [
"Apache-2.0"
] | null | null | null | lrs/tests/StatementManagerTests.py | Sembian/ADL_LRS | 3535dad6371af3f9f5b67f7eabfd0f4a393e0d62 | [
"Apache-2.0"
] | null | null | null | lrs/tests/StatementManagerTests.py | Sembian/ADL_LRS | 3535dad6371af3f9f5b67f7eabfd0f4a393e0d62 | [
"Apache-2.0"
] | null | null | null | import uuid
import json
import urllib
import base64
from django.test import TestCase
from lrs import models, views
from lrs.objects.ActivityManager import ActivityManager
from django.core.urlresolvers import reverse
class StatementManagerTests(TestCase):
@classmethod
def setUpClass(cls):
print "\n%s" % __name__
def setUp(self):
self.username = "tester1"
self.email = "test1@tester.com"
self.password = "test"
self.auth = "Basic %s" % base64.b64encode("%s:%s" % (self.username, self.password))
form = {"username":self.username, "email":self.email,"password":self.password,"password2":self.password}
self.client.post(reverse(views.register),form, X_Experience_API_Version="1.0.0")
def test_minimum_stmt(self):
stmt = json.dumps({"actor":{"objectType":"Agent","mbox": "mailto:tincan@adlnet.gov"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/created","display": {"en-US":"created"}},
"object":{"id":"http://example.adlnet.gov/tincan/example/simplestatement"}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
verb = models.Verb.objects.get(id=stmt.verb.id)
actor = models.Agent.objects.get(id=stmt.actor.id)
self.assertEqual(activity.activity_id, "http://example.adlnet.gov/tincan/example/simplestatement")
self.assertEqual(actor.mbox, "mailto:tincan@adlnet.gov")
self.assertEqual(verb.verb_id, "http://adlnet.gov/expapi/verbs/created")
def test_given_stmtID_stmt(self):
st_id = str(uuid.uuid1())
stmt = json.dumps({"actor":{"objectType":"Agent","mbox": "mailto:tincan@adlnet.gov"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/created","display": {"en-US":"created", "en-GB":"made"}},
"object":{"id":"http://example.adlnet.gov/tincan/example/simplestatement"}})
path = "%s?%s" % (reverse(views.statements), urllib.urlencode({"statementId":st_id}))
response = self.client.put(path, stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 204)
stmt = models.Statement.objects.get(statement_id=st_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
verb = models.Verb.objects.get(id=stmt.verb.id)
actor = models.Agent.objects.get(id=stmt.actor.id)
lang_maps = verb.display
for k, v in lang_maps.iteritems():
if k == 'en-GB':
self.assertEqual(v, 'made')
elif k == 'en-US':
self.assertEqual(v, 'created')
self.assertEqual(activity.activity_id, "http://example.adlnet.gov/tincan/example/simplestatement")
self.assertEqual(actor.mbox, "mailto:tincan@adlnet.gov")
self.assertEqual(verb.verb_id, "http://adlnet.gov/expapi/verbs/created")
st = models.Statement.objects.get(statement_id=st_id)
self.assertEqual(st.object_activity.id, activity.id)
self.assertEqual(st.verb.id, verb.id)
def test_stmt_ref_as_object(self):
st_id = str(uuid.uuid1())
stmt = json.dumps({"actor":{"objectType":"Agent","mbox": "mailto:tincan@adlnet.gov"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/created","display": {"en-US":"created"}},
"object":{"id":"http://example.adlnet.gov/tincan/example/simplestatement"}})
path = "%s?%s" % (reverse(views.statements), urllib.urlencode({"statementId":st_id}))
response = self.client.put(path, stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 204)
stmt2 = json.dumps({"actor":{"name":"Example Admin", "mbox":"mailto:admin@example.com"},
'verb': {"id":"http://adlnet.gov/expapi/verbs/attempted"}, 'object': {'objectType':'StatementRef',
'id': st_id}})
response = self.client.post(reverse(views.statements), stmt2, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmts = models.Statement.objects.all()
stmt_refs = models.StatementRef.objects.filter(ref_id=st_id)
self.assertEqual(len(stmt_refs), 1)
self.assertEqual(stmt_refs[0].ref_id, st_id)
self.assertEqual(len(stmts), 2)
def test_voided_wrong_type(self):
stmt = json.dumps({"actor":{"name":"Example Admin", "mbox":"mailto:admin@example.com"},
'verb': {"id":"http://adlnet.gov/expapi/verbs/voided"}, 'object': {'objectType':'Statement', 'id': "12345678-1234-5678-1234-567812345678"}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, "The objectType in the statement's object is not valid - Statement")
def test_no_verb_stmt(self):
stmt = json.dumps({"actor":{"objectType":"Agent", "mbox":"mailto:t@t.com"}, "object": {'id':'act:activity2'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Statement is missing actor, verb, or object')
def test_no_object_stmt(self):
stmt = json.dumps({"actor":{"objectType":"Agent", "mbox":"mailto:t@t.com"}, "verb": {"id":"verb:verb/url"}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Statement is missing actor, verb, or object')
def test_no_actor_stmt(self):
stmt = json.dumps({"object":{"id":"act:activity_test"}, "verb": {"id":"verb:verb/url"}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Statement is missing actor, verb, or object')
def test_voided_true_stmt(self):
stmt = json.dumps({'actor':{'objectType':'Agent', 'mbox':'mailto:l@l.com'}, 'verb': {"id":'verb:verb/url/kicked'},'voided': True, 'object': {'id':'act:activity3'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Invalid field(s) found in Statement - voided')
def test_result_stmt(self):
time = "P0Y0M0DT1H311M01S"
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity12'},
"result": {'completion': True, 'success': True, 'response': 'kicked', 'duration': time}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
self.assertEqual(stmt.verb.verb_id, "verb:verb/url")
self.assertEqual(stmt.object_activity.id, activity.id)
st = models.Statement.objects.get(id=stmt.id)
self.assertEqual(st.object_activity.id, activity.id)
self.assertEqual(st.result_completion, True)
self.assertEqual(st.result_success, True)
self.assertEqual(st.result_response, 'kicked')
self.assertEqual(st.result_duration, time)
def test_result_ext_stmt(self):
time = "P0Y0M0DT1H311M01S"
stmt = json.dumps({"actor":{'name':'jon',
'mbox':'mailto:jon@example.com'},'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity13'},
"result": {'completion': True, 'success': True, 'response': 'yes', 'duration': time,
'extensions':{'ext:key1': 'value1', 'ext:key2':'value2'}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
actor = models.Agent.objects.get(id=stmt.actor.id)
extKeys = stmt.result_extensions.keys()
extVals = stmt.result_extensions.values()
self.assertEqual(stmt.verb.verb_id, "verb:verb/url")
self.assertEqual(stmt.object_activity.id, activity.id)
self.assertEqual(stmt.actor.id, actor.id)
st = models.Statement.objects.get(id=stmt.id)
self.assertEqual(st.object_activity.id, activity.id)
self.assertEqual(st.actor.id, actor.id)
self.assertEqual(st.result_completion, True)
self.assertEqual(st.result_success, True)
self.assertEqual(st.result_response, 'yes')
self.assertEqual(st.result_duration, time)
self.assertEqual(actor.name, 'jon')
self.assertEqual(actor.mbox, 'mailto:jon@example.com')
self.assertEqual(actor.objectType, 'Agent')
self.assertIn('ext:key1', extKeys)
self.assertIn('ext:key2', extKeys)
self.assertIn('value1', extVals)
self.assertIn('value2', extVals)
def test_result_score_scaled_up_good(self):
stmt = json.dumps({"actor":{'objectType':'Agent',
'name':'jon','mbox':'mailto:jon@example.com'},'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity14'}, "result": {'score':{'scaled':1.0},'completion': True,
'success': True, 'response': 'yes'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
def test_result_score_scaled_down_good(self):
stmt = json.dumps({"actor":{'objectType':'Agent',
'name':'jon','mbox':'mailto:jon@example.com'},'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity14'}, "result": {'score':{'scaled':00.000},'completion': True,
'success': True, 'response': 'yes'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
def test_result_score_scaled_up_bad(self):
stmt = json.dumps({"actor":{'objectType':'Agent',
'name':'jon','mbox':'mailto:jon@example.com'},'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity14'}, "result": {'score':{'scaled':1.01},'completion': True,
'success': True, 'response': 'yes'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Score scaled value in statement result must be between -1 and 1')
def test_result_score_scaled(self):
stmt = json.dumps({"actor":{'objectType':'Agent',
'name':'jon','mbox':'mailto:jon@example.com'},'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity14'}, "result": {'score':{'scaled':-1.00001},'completion': True,
'success': True, 'response': 'yes'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Score scaled value in statement result must be between -1 and 1')
def test_result_score_raw_up_good(self):
stmt = json.dumps({"actor":{'objectType':'Agent',
'name':'jon','mbox':'mailto:jon@example.com'},'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity14'}, "result": {'score':{'raw':1.01,'min':-2.0, 'max':1.01},
'completion': True,'success': True, 'response': 'yes'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
def test_result_score_raw_down_good(self):
stmt = json.dumps({"actor":{'objectType':'Agent',
'name':'jon','mbox':'mailto:jon@example.com'},'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity14'}, "result": {'score':{'raw':-20.0,'min':-20.0, 'max':1.01},
'completion': True,'success': True, 'response': 'yes'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
def test_result_score_raw_up_bad(self):
stmt = json.dumps({"actor":{'objectType':'Agent',
'name':'jon','mbox':'mailto:jon@example.com'},'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity14'}, "result": {'score':{'raw':1.02,'min':-2.0, 'max':1.01},
'completion': True,'success': True, 'response': 'yes'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Score raw value in statement result must be between minimum and maximum')
def test_result_score_raw_down_bad(self):
stmt = json.dumps({"actor":{'objectType':'Agent',
'name':'jon','mbox':'mailto:jon@example.com'},'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity14'}, "result": {'score':{'raw':-2.00001,'min':-2.0, 'max':1.01},
'completion': True,'success': True, 'response': 'yes'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Score raw value in statement result must be between minimum and maximum')
def test_result_score_min_max_bad(self):
stmt = json.dumps({"actor":{'objectType':'Agent',
'name':'jon','mbox':'mailto:jon@example.com'},'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity14'}, "result": {'score':{'raw':1.5,'min':2.0, 'max':1.01},
'completion': True,'success': True, 'response': 'yes'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Score minimum in statement result must be less than the maximum')
def test_result_score_stmt(self):
time = "P0Y0M0DT1H311M01S"
stmt = json.dumps({"actor":{'objectType':'Agent','name':'jon','mbox':'mailto:jon@example.com'},
'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity14'}, "result": {'score':{'scaled':.95},
'completion': True, 'success': True, 'response': 'yes', 'duration': time,
'extensions':{'ext:key1': 'value1', 'ext:key2':'value2'}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
actor = models.Agent.objects.get(id=stmt.actor.id)
extKeys = stmt.result_extensions.keys()
extVals = stmt.result_extensions.values()
self.assertEqual(stmt.verb.verb_id, "verb:verb/url")
self.assertEqual(stmt.object_activity.id, activity.id)
self.assertEqual(stmt.actor.id, actor.id)
st = models.Statement.objects.get(id=stmt.id)
self.assertEqual(st.object_activity.id, activity.id)
self.assertEqual(st.actor.id, actor.id)
self.assertEqual(st.result_completion, True)
self.assertEqual(st.result_success, True)
self.assertEqual(st.result_response, 'yes')
self.assertEqual(st.result_duration, time)
self.assertEqual(st.result_score_scaled, .95)
self.assertEqual(activity.activity_id, 'act:activity14')
self.assertEqual(actor.name, 'jon')
self.assertEqual(actor.mbox, 'mailto:jon@example.com')
self.assertIn('ext:key1', extKeys)
self.assertIn('ext:key2', extKeys)
self.assertIn('value1', extVals)
self.assertIn('value2', extVals)
def test_no_registration_context_stmt(self):
# expect the LRS to assign a context registration uuid
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},"verb":{"id":"verb:verb/url"},"object": {'id':'act:activity14'},
'context': {'contextActivities': {'other': {'id': 'act:NewActivityID'}}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
self.assertIsNotNone(stmt.context_registration)
def test_wrong_statement_type_in_context(self):
stmt = json.dumps({'actor':{'objectType':'Agent',
'mbox':'mailto:s@s.com'},'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity16'},
'context':{'contextActivities': {'other': {'id': 'act:NewActivityID'}},
'revision': 'foo', 'platform':'bar','language': 'en-US',
'statement': {'objectType': 'Activity','id': "act:some/act"}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, "StatementRef objectType must be set to 'StatementRef'")
def test_invalid_context_registration(self):
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity15'},
'context':{'registration': "bbb", 'contextActivities': {'other': {'id': 'act:NewActivityID'}, 'grouping':{'id':'act:GroupID'}},
'revision': 'foo', 'platform':'bar',
'language': 'en-US'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Context registration - bbb is not a valid UUID')
def test_context_stmt(self):
guid = str(uuid.uuid1())
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity15'},
'context':{'registration': guid, 'contextActivities': {'other': {'id': 'act:NewActivityID'},
'grouping':{'id':'act:GroupID'}},'revision': 'foo', 'platform':'bar','language': 'en-US'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
context_activities = stmt.statementcontextactivity_set.all()
self.assertEqual(stmt.verb.verb_id, "verb:verb/url")
self.assertEqual(stmt.object_activity.id, activity.id)
st = models.Statement.objects.get(id=stmt.id)
self.assertEqual(st.object_activity.id, activity.id)
for ca in context_activities:
if ca.key == 'grouping':
self.assertEqual(ca.context_activity.all()[0].activity_id, 'act:GroupID')
elif ca.key == 'other':
self.assertEqual(ca.context_activity.all()[0].activity_id, 'act:NewActivityID')
self.assertEqual(st.context_registration, guid)
self.assertEqual(st.context_revision, 'foo')
self.assertEqual(st.context_platform, 'bar')
self.assertEqual(st.context_language, 'en-US')
def test_context_activity_list(self):
guid = str(uuid.uuid1())
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity15'},
'context':{'registration': guid,
'contextActivities': {'other': [{'id': 'act:NewActivityID'},{'id':'act:anotherActID'}],
'grouping':{'id':'act:GroupID'}},
'revision': 'foo', 'platform':'bar',
'language': 'en-US'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
context_activities = models.StatementContextActivity.objects.filter(statement=stmt)
self.assertEqual(len(context_activities), 2)
context_activity_keys = [ca.key for ca in context_activities]
self.assertEqual(len(context_activity_keys), 2)
self.assertIn('grouping', context_activity_keys)
self.assertIn('other', context_activity_keys)
context_activity_activities = []
for ca in context_activities:
for c in ca.context_activity.all():
context_activity_activities.append(c.activity_id)
self.assertEqual(len(context_activity_activities), 3)
self.assertIn('act:NewActivityID', context_activity_activities)
self.assertIn('act:anotherActID', context_activity_activities)
self.assertIn('act:GroupID', context_activity_activities)
self.assertEqual(stmt.verb.verb_id, "verb:verb/url")
self.assertEqual(stmt.object_activity.id, activity.id)
st = models.Statement.objects.get(id=stmt.id)
self.assertEqual(st.object_activity.id, activity.id)
self.assertEqual(st.context_registration, guid)
self.assertEqual(st.context_revision, 'foo')
self.assertEqual(st.context_platform, 'bar')
self.assertEqual(st.context_language, 'en-US')
def test_context_ext_stmt(self):
guid = str(uuid.uuid1())
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity16'},
'context':{'registration': guid, 'contextActivities': {'other': {'id': 'act:NewActivityID'}},
'revision': 'foo', 'platform':'bar','language': 'en-US', 'extensions':{'ext:k1': 'v1', 'ext:k2': 'v2'}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
extKeys = stmt.context_extensions.keys()
extVals = stmt.context_extensions.values()
context_activities = stmt.statementcontextactivity_set.all()
self.assertEqual(stmt.verb.verb_id, "verb:verb/url")
self.assertEqual(stmt.object_activity.id, activity.id)
st = models.Statement.objects.get(id=stmt.id)
self.assertEqual(st.object_activity.id, activity.id)
self.assertEqual(st.context_registration, guid)
self.assertEqual(context_activities[0].key, 'other')
self.assertEqual(context_activities[0].context_activity.all()[0].activity_id, 'act:NewActivityID')
self.assertEqual(st.context_revision, 'foo')
self.assertEqual(st.context_platform, 'bar')
self.assertEqual(st.context_language, 'en-US')
self.assertIn('ext:k1', extKeys)
self.assertIn('ext:k2', extKeys)
self.assertIn('v1', extVals)
self.assertIn('v2', extVals)
def test_stmtref_in_context_stmt(self):
stmt_guid = str(uuid.uuid1())
existing_stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url/outer"},"object": {'id':'act:activityy16'}})
path = "%s?%s" % (reverse(views.statements), urllib.urlencode({"statementId":stmt_guid}))
response = self.client.put(path, existing_stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 204)
guid = str(uuid.uuid1())
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity16'},
'context':{'registration': guid, 'contextActivities': {'other': {'id': 'act:NewActivityID'}},
'revision': 'foo', 'platform':'bar','language': 'en-US',
'statement': {'objectType': 'StatementRef','id': stmt_guid}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
stmt_ref = models.StatementRef(ref_id=stmt_guid)
neststmt = models.Statement.objects.get(statement_id=stmt_ref.ref_id)
st = models.Statement.objects.get(id=stmt.id)
self.assertEqual(st.object_activity.id, activity.id)
self.assertEqual(st.context_registration, guid)
self.assertEqual(st.context_revision, 'foo')
self.assertEqual(st.context_platform, 'bar')
self.assertEqual(st.context_language, 'en-US')
self.assertEqual(stmt_ref.ref_id, stmt_guid)
self.assertEqual(neststmt.verb.verb_id, "verb:verb/url/outer")
def test_substmt_in_context_stmt(self):
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity16'},
'context':{'contextActivities': {'other': {'id': 'act:NewActivityID'}},
'revision': 'foo', 'platform':'bar','language': 'en-US',
'statement': {'objectType':'SubStatement', 'actor':{'objectType':'Agent',
'mbox':'mailto:sss@sss.com'},'verb':{'id':'verb:verb/url/nest/nest'},
'object':{'id':'act://activity/url'}}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, "StatementRef objectType must be set to 'StatementRef'")
def test_instructor_in_context_stmt(self):
stmt_guid = str(uuid.uuid1())
existing_stmt = json.dumps({'actor':{'objectType':'Agent',
'mbox':'mailto:s@s.com'},'verb': {"id":"verb:verb/url/outer"},"object": {'id':'act:activityy16'}})
path = "%s?%s" % (reverse(views.statements), urllib.urlencode({"statementId":stmt_guid}))
response = self.client.put(path, existing_stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 204)
guid = str(uuid.uuid1())
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:jon@example.com',
'name':'jon'},'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity17'},
'context':{'registration': guid, 'instructor': {'objectType':'Agent','name':'jon',
'mbox':'mailto:jon@example.com'},'contextActivities': {'other': {'id': 'act:NewActivityID'}},
'revision': 'foo', 'platform':'bar','language': 'en-US', 'statement': {'id': stmt_guid,
'objectType':'StatementRef'}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
stmt_ref = models.StatementRef(ref_id=stmt_guid)
neststmt = models.Statement.objects.get(statement_id=stmt_ref.ref_id)
context_activities = stmt.statementcontextactivity_set.all()
st = models.Statement.objects.get(id=stmt.id)
self.assertEqual(st.object_activity.id, activity.id)
self.assertEqual(st.context_registration, guid)
self.assertEqual(context_activities[0].key, 'other')
self.assertEqual(context_activities[0].context_activity.all()[0].activity_id, 'act:NewActivityID')
self.assertEqual(st.context_revision, 'foo')
self.assertEqual(st.context_platform, 'bar')
self.assertEqual(st.context_language, 'en-US')
self.assertEqual(neststmt.verb.verb_id, "verb:verb/url/outer")
self.assertEqual(st.context_instructor.objectType, 'Agent')
self.assertEqual(st.context_instructor.name, 'jon')
self.assertEqual(st.context_instructor.mbox, 'mailto:jon@example.com')
def test_actor_with_context_stmt(self):
stmt_guid = str(uuid.uuid1())
existing_stmt = json.dumps({'actor':{'objectType':'Agent',
'mbox':'mailto:s@s.com'},'verb': {"id":"verb:verb/url/outer"},"object": {'id':'act:activityy16'}})
path = "%s?%s" % (reverse(views.statements), urllib.urlencode({"statementId":stmt_guid}))
response = self.client.put(path, existing_stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 204)
guid = str(uuid.uuid1())
stmt = json.dumps({'actor':{'objectType':'Agent', 'name': 'steve',
'mbox':'mailto:mailto:s@s.com'},'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity18'},
'context':{'registration': guid, 'instructor': {'objectType':'Agent','name':'jon',
'mbox':'mailto:jon@example.com'},'contextActivities': {'other': {'id': 'act:NewActivityID1'}},
'revision': 'foob', 'platform':'bard','language': 'en-US', 'statement': {'id':stmt_guid,
"objectType":"StatementRef"}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
stmt_ref = models.StatementRef(ref_id=stmt_guid)
neststmt = models.Statement.objects.get(statement_id=stmt_ref.ref_id)
st = models.Statement.objects.get(id=stmt.id)
context_activities = stmt.statementcontextactivity_set.all()
self.assertEqual(st.object_activity.id, activity.id)
self.assertEqual(st.verb.verb_id, "verb:verb/url" )
self.assertEqual(st.context_registration, guid)
self.assertEqual(context_activities[0].key, 'other')
self.assertEqual(context_activities[0].context_activity.all()[0].activity_id, 'act:NewActivityID1')
self.assertEqual(st.context_revision, 'foob')
self.assertEqual(st.context_platform, 'bard')
self.assertEqual(st.context_language, 'en-US')
self.assertEqual(neststmt.verb.verb_id, "verb:verb/url/outer")
self.assertEqual(st.context_instructor.objectType, 'Agent')
self.assertEqual(st.context_instructor.name, 'jon')
self.assertEqual(st.context_instructor.mbox, 'mailto:jon@example.com')
def test_agent_as_object_with_context_stmt(self):
stmt_guid = str(uuid.uuid1())
existing_stmt = json.dumps({'actor':{'objectType':'Agent',
'mbox':'mailto:mailto:s@s.com'},'verb': {"id":"verb:verb/url/outer"},"object": {'id':'act:activityy16'}})
path = "%s?%s" % (reverse(views.statements), urllib.urlencode({"statementId":stmt_guid}))
response = self.client.put(path, existing_stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 204)
guid = str(uuid.uuid1())
stmt = json.dumps(
{'actor':{
'objectType':'Agent',
'mbox':'mailto:l@l.com',
'name':'lou'
},
'object':{
'objectType':'Agent',
'name': 'lou',
'mbox':'mailto:l@l.com'
},
'verb': {"id":"verb:verb/url"},
'context':{
'registration': guid,
'instructor': {
'objectType':'Agent',
'name':'jon',
'mbox':'mailto:jon@example.com'
},
'contextActivities': {
'other': {'id': 'act:NewActivityID1'}
},
'language': 'en-US',
'statement': {
'id': stmt_guid,
'objectType': 'StatementRef'
}
}
}
)
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
stmt_ref = models.StatementRef(ref_id=stmt_guid)
neststmt = models.Statement.objects.get(statement_id=stmt_ref.ref_id)
context_activities = stmt.statementcontextactivity_set.all()
st = models.Statement.objects.get(id=stmt.id)
self.assertEqual(st.verb.verb_id, "verb:verb/url")
self.assertEqual(st.context_registration, guid)
self.assertEqual(context_activities[0].key, 'other')
self.assertEqual(context_activities[0].context_activity.all()[0].activity_id, 'act:NewActivityID1')
self.assertEqual(st.context_language, 'en-US')
self.assertEqual(neststmt.verb.verb_id, "verb:verb/url/outer")
self.assertEqual(st.context_instructor.objectType, 'Agent')
# Should be jon
self.assertEqual(st.context_instructor.name, 'jon')
self.assertEqual(st.context_instructor.mbox, 'mailto:jon@example.com')
def test_agent_as_object(self):
stmt = json.dumps({'object':{'objectType':'Agent', 'name': 'lulu', 'openID':'id:luluid'},
'verb': {"id":"verb:verb/url"},'actor':{'objectType':'Agent','mbox':'mailto:t@t.com'}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
agent = models.Agent.objects.get(id=stmt.object_agent.id)
self.assertEqual(agent.name, 'lulu')
self.assertEqual(agent.openID, 'id:luluid')
def test_unallowed_substmt_field(self):
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url"}, 'object':{'objectType':'SubStatement',
'actor':{'objectType':'Agent','mbox':'mailto:ss@ss.com'},'verb': {"id":"verb:verb/url/nest"},
'object': {'objectType':'activity', 'id':'act:testex.com'},
'authority':{'objectType':'Agent','mbox':'mailto:s@s.com'}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Invalid field(s) found in SubStatement - authority')
def test_nested_substatement(self):
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url"}, 'object':{'objectType':'SubStatement',
'actor':{'objectType':'Agent','mbox':'mailto:ss@ss.com'},'verb': {"id":"verb:verb/url/nest"},
'object': {'objectType':'SubStatement', 'actor':{'objectType':'Agent','mbox':'mailto:sss@sss.com'},
'verb':{'id':'verb:verb/url/nest/nest'}, 'object':{'id':'act://activity/url'}}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Cannot nest a SubStatement inside of another SubStatement')
def test_substatement_as_object(self):
guid = str(uuid.uuid1())
stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:s@s.com'},
'verb': {"id":"verb:verb/url"}, 'object':{'objectType':'SubStatement',
'actor':{'objectType':'Agent','mbox':'mailto:ss@ss.com'},'verb': {"id":"verb:verb/url/nest"},
'object': {'objectType':'Activity', 'id':'act:testex.com'}, 'result':{'completion': True, 'success': True,
'response': 'kicked'}, 'context':{'registration': guid,
'contextActivities': {'other': {'id': 'act:NewActivityID'}},'revision': 'foo', 'platform':'bar',
'language': 'en-US', 'extensions':{'ext:k1': 'v1', 'ext:k2': 'v2'}}}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
outer_stmt = models.Statement.objects.get(id=stmt.id)
sub_stmt = models.SubStatement.objects.get(id=outer_stmt.object_substatement.id)
sub_obj = models.Activity.objects.get(id=sub_stmt.object_activity.id)
sub_act = models.Agent.objects.get(id=sub_stmt.actor.id)
self.assertEqual(outer_stmt.verb.verb_id, "verb:verb/url")
self.assertEqual(outer_stmt.actor.mbox, 'mailto:s@s.com')
self.assertEqual(sub_stmt.verb.verb_id, "verb:verb/url/nest")
self.assertEqual(sub_obj.activity_id, 'act:testex.com')
self.assertEqual(sub_act.mbox, 'mailto:ss@ss.com')
self.assertEqual(sub_stmt.context_registration, guid)
self.assertEqual(sub_stmt.result_response, 'kicked')
def test_group_stmt(self):
ot = "Group"
name = "the group SMT"
mbox = "mailto:the.groupSMT@example.com"
members = [{"name":"agentA","mbox":"mailto:agentA@example.com"},
{"name":"agentB","mbox":"mailto:agentB@example.com"}]
testagent = {"objectType":ot, "name":name, "mbox":mbox,"member":members}
stmt = json.dumps({"actor":testagent, 'verb': {"id":"verb:verb/url"},"object": {"id":"act:activity5",
"objectType": "Activity"}})
response = self.client.post(reverse(views.statements), stmt, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt = models.Statement.objects.get(statement_id=stmt_id)
activity = models.Activity.objects.get(id=stmt.object_activity.id)
actor = models.Agent.objects.get(id=stmt.actor.id)
self.assertEqual(stmt.verb.verb_id, "verb:verb/url")
self.assertEqual(stmt.object_activity.id, activity.id)
self.assertEqual(stmt.actor.id, actor.id)
st = models.Statement.objects.get(id=stmt.id)
self.assertEqual(st.object_activity.id, activity.id)
self.assertEqual(st.actor.id, actor.id)
self.assertEqual(actor.name, name)
self.assertEqual(actor.mbox, mbox)
def test_activity_correctresponsepattern(self):
act1 = ActivityManager({
'objectType': 'Activity', 'id':'act:foo',
'definition': {'name': {'en-US':'testname'},'description': {'en-US':'testdesc'},
'type': 'http://adlnet.gov/expapi/activities/cmi.interaction',
'interactionType': 'true-false','correctResponsesPattern': ['true'],
'extensions': {'ext:key1': 'value1'}}})
act2 = ActivityManager({
'objectType': 'Activity', 'id':'act:baz',
'definition': {'name': {'en-US':'testname2'},'description': {'en-US':'testdesc2'},
'type': 'http://adlnet.gov/expapi/activities/cmi.interaction',
'interactionType': 'true-false','correctResponsesPattern': ['true'],
'extensions': {'ext2:key1': 'value1'}}})
acts = len(models.Activity.objects.all())
self.assertEqual(acts, 2)
self.assertIn('true', act1.Activity.activity_definition_crpanswers)
self.assertIn('true', act2.Activity.activity_definition_crpanswers)
# Tests if an act from context already exists in a different stmt, if an act from context is the object in the
# same stmt, and if an act from context doesn't exist anywhere
def test_context_statement_delete(self):
guid = str(uuid.uuid1())
stmt1 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity'}})
response = self.client.post(reverse(views.statements), stmt1, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt1 = models.Statement.objects.get(statement_id=stmt_id)
st1_id = str(stmt1.statement_id)
stmt2 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity1'},
'context':{'registration': guid, 'instructor':{'objectType':'Agent', 'mbox':'mailto:inst@inst.com'},
'team':{'objectType': 'Group', 'name':'mygroup',
'member':[{"name":"agent_in_group","mbox":"mailto:agentingroup@example.com"}]},
'contextActivities': {'other': [{'id': 'act:activity'},{'id':'act:activity1'}],
'grouping':{'id':'act:activity2'}},'revision': 'foo', 'platform':'bar','language': 'en-US',
'extensions':{'ext:key1': 'value1'},
'statement':{'objectType': 'StatementRef','id':st1_id}}})
response = self.client.post(reverse(views.statements), stmt2, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt2 = models.Statement.objects.get(statement_id=stmt_id)
self.assertEqual(len(models.Statement.objects.all()), 2)
# Team creates a group object and the agent inside of itself, plus self.auth
self.assertEqual(len(models.Agent.objects.all()), 5)
self.assertEqual(len(models.Verb.objects.all()), 1)
self.assertEqual(len(models.Activity.objects.all()), 3)
models.Statement.objects.get(id=stmt2.id).delete()
self.assertEqual(len(models.Statement.objects.all()), 1)
# Agents/activities/verbs are not deleted
self.assertEqual(len(models.Agent.objects.all()), 5)
self.assertEqual(len(models.Verb.objects.all()), 1)
self.assertEqual(len(models.Activity.objects.all()), 3)
self.assertIn('act:activity', models.Activity.objects.values_list('activity_id', flat=True))
def test_context_in_another_context_statement_delete(self):
stmt1 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"verb:verb/url1"},
"object": {'id':'act:activity1'},
'context':{'instructor':{'objectType':'Agent', 'mbox':'mailto:inst@inst.com'},
'team':{'objectType': 'Group', 'name':'mygroup',
'member':[{"name":"agent_in_group","mbox":"mailto:agentingroup@example.com"}]},
'contextActivities': {'other': [{'id': 'act:activity1'},{'id':'act:activity2'}],
'grouping':{'id':'act:activity3'}},'revision': 'foo', 'platform':'bar','language': 'en-US',
'extensions':{'ext:key1': 'value1'}}})
response = self.client.post(reverse(views.statements), stmt1, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt1 = models.Statement.objects.get(statement_id=stmt_id)
stmt2 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"verb:verb/url2"},
"object": {'id':'act:activity4'},
'context':{'instructor':{'objectType':'Agent', 'mbox':'mailto:inst@inst.com'},
'team':{'objectType': 'Group', 'name':'mygroup',
'member':[{"name":"agent_in_group","mbox":"mailto:agentingroup@example.com"}]},
'contextActivities': {'other': [{'id': 'act:activity2'},{'id':'act:activity3'}],
'grouping':{'id':'act:activity5'}},'revision': 'foo', 'platform':'bar','language': 'en-US'}})
response = self.client.post(reverse(views.statements), stmt2, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt2 = models.Statement.objects.get(statement_id=stmt_id)
stmt3 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"verb:verb/url3"},
"object": {'id':'act:activity1'},
'context':{'instructor':{'objectType':'Agent', 'mbox':'mailto:three@inst.com'},
'team':{'objectType': 'Group', 'name':'mygroup',
'member':[{"name":"agent_in_group","mbox":"mailto:agentingroup@example.com"}]},
'contextActivities': {'other': [{'id': 'act:activity6'},{'id':'act:activity5'}],
'grouping':{'id':'act:activity2'}},'revision': 'three', 'platform':'bar','language': 'en-US'}})
response = self.client.post(reverse(views.statements), stmt3, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt3 = models.Statement.objects.get(statement_id=stmt_id)
self.assertEqual(len(models.Activity.objects.all()), 6)
self.assertEqual(len(models.Agent.objects.all()), 8)
self.assertEqual(len(models.Verb.objects.all()), 3)
self.assertEqual(len(models.StatementContextActivity.objects.all()), 6)
self.assertEqual(len(models.Statement.objects.all()), 3)
models.Statement.objects.get(id=stmt3.id).delete()
# Agents/activities/verbs are not deleted
self.assertEqual(len(models.Activity.objects.all()), 6)
self.assertEqual(len(models.Agent.objects.all()), 8)
self.assertEqual(len(models.Verb.objects.all()), 3)
self.assertEqual(len(models.StatementContextActivity.objects.all()), 4)
self.assertEqual(len(models.Statement.objects.all()), 2)
models.Statement.objects.get(id=stmt2.id).delete()
self.assertEqual(len(models.Activity.objects.all()), 6)
self.assertEqual(len(models.Agent.objects.all()), 8)
self.assertEqual(len(models.Verb.objects.all()), 3)
self.assertEqual(len(models.StatementContextActivity.objects.all()), 2)
self.assertEqual(len(models.Statement.objects.all()), 1)
models.Statement.objects.get(id=stmt1.id).delete()
self.assertEqual(len(models.Activity.objects.all()), 6)
self.assertEqual(len(models.Agent.objects.all()), 8)
self.assertEqual(len(models.Verb.objects.all()), 3)
self.assertEqual(len(models.StatementContextActivity.objects.all()), 0)
self.assertEqual(len(models.Statement.objects.all()), 0)
def test_simple_statement_delete(self):
stmt1 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity1'}})
response = self.client.post(reverse(views.statements), stmt1, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt1 = models.Statement.objects.get(statement_id=stmt_id)
stmt2 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:b@b.com'},
'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity1'}})
response = self.client.post(reverse(views.statements), stmt2, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt2 = models.Statement.objects.get(statement_id=stmt_id)
self.assertEqual(len(models.Agent.objects.all()), 3)
self.assertEqual(len(models.Activity.objects.all()), 1)
self.assertEqual(len(models.Verb.objects.all()), 1)
self.assertEqual(len(models.Statement.objects.all()), 2)
models.Statement.objects.get(id=stmt2.id).delete()
self.assertEqual(len(models.Agent.objects.all()), 3)
self.assertEqual(len(models.Activity.objects.all()), 1)
self.assertEqual(len(models.Verb.objects.all()), 1)
self.assertEqual(len(models.Statement.objects.all()), 1)
self.assertEqual(models.Statement.objects.all()[0].id, stmt1.id)
def test_more_conacts_delete(self):
stmt1 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity1'}})
response = self.client.post(reverse(views.statements), stmt1, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt1 = models.Statement.objects.get(statement_id=stmt_id)
stmt2 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity2'},
'context':{'instructor':{'objectType':'Agent', 'mbox':'mailto:inst@inst.com'},
'contextActivities': {'other': {'id': 'act:activity1'}},'revision': 'foo', 'platform':'bar',
'language': 'en-US'}})
response = self.client.post(reverse(views.statements), stmt2, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt2 = models.Statement.objects.get(statement_id=stmt_id)
self.assertEqual(len(models.Agent.objects.all()), 3)
self.assertEqual(len(models.Activity.objects.all()), 2)
self.assertEqual(len(models.Verb.objects.all()), 1)
self.assertEqual(len(models.Statement.objects.all()), 2)
models.Statement.objects.get(id=stmt2.id).delete()
self.assertEqual(len(models.Agent.objects.all()), 3)
self.assertEqual(len(models.Activity.objects.all()), 2)
self.assertEqual(len(models.Verb.objects.all()), 1)
self.assertEqual(len(models.Statement.objects.all()), 1)
def test_activity_also_in_conact(self):
stmt1 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity1'},
'context':{'instructor':{'objectType':'Agent', 'mbox':'mailto:inst@inst.com'},
'contextActivities': {'other': {'id': 'act:activity2'}},'revision': 'foo', 'platform':'bar',
'language': 'en-US'}})
response = self.client.post(reverse(views.statements), stmt1, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt1 = models.Statement.objects.get(statement_id=stmt_id)
stmt2 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"verb:verb/url"},
"object": {'id':'act:activity2'}})
response = self.client.post(reverse(views.statements), stmt2, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt2 = models.Statement.objects.get(statement_id=stmt_id)
self.assertEqual(len(models.Agent.objects.all()), 3)
self.assertEqual(len(models.Activity.objects.all()), 2)
self.assertEqual(len(models.Verb.objects.all()), 1)
self.assertEqual(len(models.Statement.objects.all()), 2)
models.Statement.objects.get(id=stmt2.id).delete()
self.assertEqual(len(models.Agent.objects.all()), 3)
self.assertEqual(len(models.Activity.objects.all()), 2)
self.assertEqual(len(models.Verb.objects.all()), 1)
self.assertEqual(len(models.Statement.objects.all()), 1)
agents = models.Agent.objects.values_list('mbox', flat=True)
self.assertIn('mailto:a@a.com', agents)
self.assertIn('mailto:inst@inst.com', agents)
acts = models.Activity.objects.values_list('activity_id', flat=True)
self.assertIn('act:activity1', acts)
self.assertIn('act:activity2', acts)
self.assertEqual(models.Verb.objects.all()[0].verb_id, 'verb:verb/url')
self.assertEqual(models.Statement.objects.all()[0].id, stmt1.id)
def test_sub_delete(self):
stmt1 = json.dumps(
{"actor":{"objectType":"Agent","mbox":"mailto:out@out.com"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/1"},
"object":{"objectType":"SubStatement",
"actor":{"objectType":"Agent","mbox":"mailto:sub@sub.com"},
"verb": {"id":"verb:verb/url/nest1"},
"object": {"objectType":"Activity", "id":"act:subactivity1"},
"result":{"completion": True, "success": True,"response": "kicked"},
"context":{"contextActivities": {"other": {"id": "act:subconactivity1"}},
'team':{'objectType': 'Group', 'name':'conteamgroup',
'member':[{"name":"agent_in_conteamgroup","mbox":"mailto:actg@actg.com"}]},"revision": "foo",
"platform":"bar","language": "en-US","extensions":{"ext:k1": "v1", "ext:k2": "v2"}}}})
response = self.client.post(reverse(views.statements), stmt1, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt1 = models.Statement.objects.get(statement_id=stmt_id)
stmt2 = json.dumps(
{"actor": {"objectType": "Agent", "mbox": "mailto:ref@ref.com"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/2"},
"object":{"objectType": "StatementRef", "id":str(stmt1.statement_id)}})
response = self.client.post(reverse(views.statements), stmt2, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt2 = models.Statement.objects.get(statement_id=stmt_id)
stmt3 = json.dumps(
{"actor": {"objectType": "Agent", "mbox": "mailto:norm@norm.com"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/3"},
"object":{"objectType": "Activity", "id":"act:activity1"}})
response = self.client.post(reverse(views.statements), stmt3, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt3 = models.Statement.objects.get(statement_id=stmt_id)
stmt4 = json.dumps({
'actor':{'objectType':'Agent','mbox':'mailto:a@a.com'},
'verb': {"id":"http://adlnet.gov/expapi/verbs/4"},
"object": {'id':'act:activity2'},
'context':{'instructor':{'objectType':'Agent', 'mbox':'mailto:inst@inst.com'},
'contextActivities': {'other': {'id': 'act:conactivity1'}},'revision': 'foo', 'platform':'bar',
'language': 'en-US', 'statement':{'objectType': 'StatementRef',
'id':str(stmt3.statement_id)}}})
response = self.client.post(reverse(views.statements), stmt4, content_type="application/json",
Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(response.status_code, 200)
stmt_id = json.loads(response.content)[0]
stmt4 = models.Statement.objects.get(statement_id=stmt_id)
self.assertEqual(len(models.Statement.objects.all()), 4)
self.assertEqual(len(models.Agent.objects.all()), 9)
self.assertEqual(len(models.Activity.objects.all()), 5)
self.assertEqual(len(models.Verb.objects.all()), 5)
self.assertEqual(len(models.SubStatement.objects.all()), 1)
self.assertEqual(len(models.StatementRef.objects.all()), 1)
self.assertEqual(len(models.StatementContextActivity.objects.all()), 1)
self.assertEqual(len(models.SubStatementContextActivity.objects.all()), 1)
models.Statement.objects.get(id=stmt4.id).delete()
self.assertEqual(len(models.Statement.objects.all()), 3)
self.assertEqual(len(models.Agent.objects.all()), 9)
self.assertEqual(len(models.Activity.objects.all()), 5)
self.assertEqual(len(models.Verb.objects.all()), 5)
self.assertEqual(len(models.SubStatement.objects.all()), 1)
self.assertEqual(len(models.StatementRef.objects.all()), 1)
self.assertEqual(len(models.StatementContextActivity.objects.all()), 0)
self.assertEqual(len(models.SubStatementContextActivity.objects.all()), 1)
models.Statement.objects.get(id=stmt3.id).delete()
self.assertEqual(len(models.Statement.objects.all()), 2)
self.assertEqual(len(models.Agent.objects.all()), 9)
self.assertEqual(len(models.Activity.objects.all()), 5)
self.assertEqual(len(models.Verb.objects.all()), 5)
self.assertEqual(len(models.SubStatement.objects.all()), 1)
self.assertEqual(len(models.StatementRef.objects.all()), 1)
self.assertEqual(len(models.StatementContextActivity.objects.all()), 0)
self.assertEqual(len(models.SubStatementContextActivity.objects.all()), 1)
models.Statement.objects.get(id=stmt2.id).delete()
self.assertEqual(len(models.Statement.objects.all()), 1)
self.assertEqual(len(models.Agent.objects.all()), 9)
self.assertEqual(len(models.Activity.objects.all()), 5)
self.assertEqual(len(models.Verb.objects.all()), 5)
self.assertEqual(len(models.SubStatement.objects.all()), 1)
self.assertEqual(len(models.StatementRef.objects.all()), 0)
self.assertEqual(len(models.StatementContextActivity.objects.all()), 0)
self.assertEqual(len(models.SubStatementContextActivity.objects.all()), 1)
models.Statement.objects.get(id=stmt1.id).delete()
self.assertEqual(len(models.Statement.objects.all()), 0)
self.assertEqual(len(models.Agent.objects.all()), 9)
self.assertEqual(len(models.Activity.objects.all()), 5)
self.assertEqual(len(models.Verb.objects.all()), 5)
self.assertEqual(len(models.SubStatement.objects.all()), 0)
self.assertEqual(len(models.StatementRef.objects.all()), 0)
self.assertEqual(len(models.StatementContextActivity.objects.all()), 0)
self.assertEqual(len(models.SubStatementContextActivity.objects.all()), 0)
| 55.342808 | 172 | 0.631457 | 7,576 | 64,253 | 5.24934 | 0.044879 | 0.111268 | 0.043904 | 0.055521 | 0.894516 | 0.870603 | 0.86326 | 0.857251 | 0.840881 | 0.831225 | 0 | 0.01638 | 0.196162 | 64,253 | 1,160 | 173 | 55.390517 | 0.753606 | 0.006085 | 0 | 0.680585 | 0 | 0 | 0.215914 | 0.016052 | 0 | 0 | 0 | 0 | 0.334029 | 0 | null | null | 0.003132 | 0.008351 | null | null | 0.001044 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
404d5fef89df73f64110ff83c61af9054aa4566a | 19,518 | py | Python | iirnet/filter.py | csteinmetz1/IIRNet | ed82e559ec36afde38f071a95293a5f8c2f3048c | [
"Apache-2.0"
] | 27 | 2021-10-11T08:28:44.000Z | 2022-02-28T10:33:50.000Z | iirnet/filter.py | csteinmetz1/IIRNet | ed82e559ec36afde38f071a95293a5f8c2f3048c | [
"Apache-2.0"
] | null | null | null | iirnet/filter.py | csteinmetz1/IIRNet | ed82e559ec36afde38f071a95293a5f8c2f3048c | [
"Apache-2.0"
] | 3 | 2021-11-10T01:10:11.000Z | 2022-01-25T03:30:07.000Z | import sys
import torch
import numpy as np
import scipy.signal
import scipy.stats as stats
from numpy import linalg as LA
from scipy.stats import loguniform
def generate_pass_filter(num_points=512, max_order=2):
"""Generate a random highpass/lowpass filter along with its magnitude and phase response.
Returns:
coef (ndarray): Recursive filter coeffients stored as [b0, b1, ..., bN, a0, a1, ..., aN].
mag (ndarray): Magnitude response of the filter (linear) of `num_points`.
phs (ndarray): Phase response of the filter (unwraped) of 'num_points`.
"""
# first generate random coefs
# we lay out coefs in an array [b0, b1, b2, a0, a1, a2]
# in the future we will want to enforce some kind of normalization
# coef = self.factor * (np.random.rand((self.filter_order + 1) * 2) * 2) - 1
btype = np.random.choice(["lowpass", "highpass"])
wn = float(loguniform.rvs(1e-3, 1e0))
# rp = np.random.rand() * 20
N = max_order
# sos = scipy.signal.cheby1(N, rp, wn, output='sos', btype=btype)
sos = scipy.signal.butter(N, wn, output="sos", btype=btype)
w, h = scipy.signal.sosfreqz(sos, worN=num_points)
mag = np.abs(h)
phs = np.unwrap(np.angle(h))
real = np.real(h)
imag = np.imag(h)
return mag, phs, real, imag, sos
def generate_parametric_eq(num_points, max_order, f_s=48000):
"""Generate a random parametric EQ cascase according to the method specified in
[Nercessian 2020](https://dafx2020.mdw.ac.at/proceedings/papers/DAFx2020_paper_7.pdf).
Returns:
coef
mag
phs
"""
zeros = []
poles = []
sos_holder = []
f_min = 20
f_max = 20000
g_min = -10
g_max = 10
q_min = 0.1
q_max_shelf = 1
q_max_peak = 3
bern_shelf = 0.5 # Probability shelf filters has non-zero dB gain paper=0.5
bern_peak = 0.33 # Probability shelf filters has non-zero dB gain paper=0.333
max_ord = max_order
num_peaks = (max_order) // 2 - 2 # Number of peaking filters to use paper=10
##Low Shelf Filter
f_low = np.random.beta(0.25, 5) * (f_max - f_min) + f_min
omega_low = 2 * np.pi * f_low / f_s
g = np.random.binomial(1, bern_shelf) * (
np.random.beta(5, 5) * (g_max - g_min) + g_min
)
q = np.random.beta(1, 5) * (q_max_shelf - q_min) + q_min
A = np.power(10, g / 40)
alpha = np.sin(omega_low) * np.sqrt((A ** 2 + 1) * ((1 / q) - 1) + 2 * A)
b0 = A * ((A + 1) - (A - 1) * np.cos(omega_low) + alpha)
b1 = 2 * A * ((A - 1) - (A + 1) * np.cos(omega_low))
b2 = A * ((A + 1) - (A - 1) * np.cos(omega_low) - alpha)
a0 = (A + 1) + (A - 1) * np.cos(omega_low) + alpha
a1 = -2 * A * ((A - 1) + (A + 1) * np.cos(omega_low))
a2 = (A + 1) + (A - 1) * np.cos(omega_low) - alpha
sos_poly = np.asarray([b0, b1, b2, a0, a1, a2])
sos_holder.append(sos_poly)
num_poly = np.asarray([b0, b1, b2])
zeros.append(num_poly)
den_poly = np.asarray([a0, a1, a2])
poles.append(den_poly)
##High Shelf Filter
f_high = np.random.beta(4, 5) * (f_max - f_min) + f_min
omega_high = 2 * np.pi * f_high / f_s
g = np.random.binomial(1, bern_shelf) * (
np.random.beta(5, 5) * (g_max - g_min) + g_min
)
q = np.random.beta(1, 5) * (q_max_shelf - q_min) + q_min
A = np.power(10, g / 40)
alpha = np.sin(omega_high) * np.sqrt((A ** 2 + 1) * ((1 / q) - 1) + 2 * A)
b0 = A * ((A + 1) + (A - 1) * np.cos(omega_high) + alpha)
b1 = -2 * A * ((A - 1) + (A + 1) * np.cos(omega_high))
b2 = A * ((A + 1) + (A - 1) * np.cos(omega_high) - alpha)
a0 = (A + 1) - (A - 1) * np.cos(omega_high) + alpha
a1 = 2 * A * ((A - 1) - (A + 1) * np.cos(omega_high))
a2 = (A + 1) - (A - 1) * np.cos(omega_high) - alpha
sos_poly = np.asarray([b0, b1, b2, a0, a1, a2])
sos_holder.append(sos_poly)
num_poly = np.asarray([b0, b1, b2])
zeros.append(num_poly)
den_poly = np.asarray([a0, a1, a2])
poles.append(den_poly)
##Peaking Filters
for jj in range(num_peaks):
f_peak = np.random.uniform(low=f_low, high=f_high)
omega = 2 * np.pi * f_peak / f_s
g = np.random.binomial(1, bern_peak) * (
np.random.beta(5, 5) * (g_max - g_min) + g_min
)
q = np.random.beta(1, 5) * (q_max_peak - q_min) + q_min
alpha = np.sin(omega) / (2 * q)
A = np.power(10, g / 40)
b0 = 1 + (alpha * A)
b1 = -2 * np.cos(omega)
b2 = 1 - (alpha * A)
a0 = 1 + (alpha / A)
a1 = -2 * np.cos(omega)
a2 = 1 - (alpha / A)
sos_poly = np.asarray([b0, b1, b2, a0, a1, a2])
sos_holder.append(sos_poly)
num_poly = np.asarray([b0, b1, b2])
zeros.append(num_poly)
den_poly = np.asarray([a0, a1, a2])
poles.append(den_poly)
sos = np.vstack(sos_holder)
my_norms = sos[:, 3]
sos = sos / my_norms[:, None] ##sosfreqz requires sos[:,3]=1
w, h = scipy.signal.sosfreqz(sos, worN=num_points)
mag = np.abs(h)
phs = np.unwrap(np.angle(h))
real = np.real(h)
imag = np.imag(h)
return mag, phs, real, imag, sos
def generate_normal_biquad(num_points, max_order, min_order=None, norm=1.0, seed=0):
if min_order == None:
chosen_ord = max_order
else:
chosen_ord = np.random.randint(min_order, max_order)
sos = np.random.normal(scale=norm, size=(chosen_ord // 2, 6))
a0 = sos[:, 3].reshape(-1, 1)
sos = sos / a0
w, h = scipy.signal.sosfreqz(sos, worN=num_points)
mag = np.abs(h)
phs = np.unwrap(np.angle(h))
real = np.real(h)
imag = np.imag(h)
return mag, phs, real, imag, sos
def generate_uniform_parametric_eq(num_points, max_order, f_s=48000, seed=0):
"""Generate a random parametric EQ cascase according to the method specified in
[Nercessian 2020](https://dafx2020.mdw.ac.at/proceedings/papers/DAFx2020_paper_7.pdf).
Returns:
coef
mag
phs
"""
zeros = []
poles = []
sos_holder = []
f_min = 20
f_max = 20000
g_min = -10
g_max = 10
q_min = 0.1
q_max_shelf = 1
q_max_peak = 3
bern_shelf = 0.5 # Probability shelf filters has non-zero dB gain paper=0.5
bern_peak = 0.33 # Probability shelf filters has non-zero dB gain paper=0.333
max_ord = max_order
num_peaks = (max_order) // 2 - 2 # Number of peaking filters to use paper=10
##Low Shelf Filter
# f_low = np.random.beta(0.25,5)*(f_max-f_min)+f_min
# omega_low = 2*np.pi*f_low/f_s
omega_low = np.random.uniform(low=0.0, high=np.pi)
# g = np.random.binomial(1,bern_shelf)*(np.random.beta(5,5)*(g_max-g_min)+g_min)
g = np.random.uniform(low=-10.0, high=10.0)
# q = np.random.beta(1,5)*(q_max_shelf-q_min)+q_min
q = np.random.uniform(low=0.1, high=1.0)
A = np.power(10, g / 40)
alpha = np.sin(omega_low) * np.sqrt((A ** 2 + 1) * ((1 / q) - 1) + 2 * A)
b0 = A * ((A + 1) - (A - 1) * np.cos(omega_low) + alpha)
b1 = 2 * A * ((A - 1) - (A + 1) * np.cos(omega_low))
b2 = A * ((A + 1) - (A - 1) * np.cos(omega_low) - alpha)
a0 = (A + 1) + (A - 1) * np.cos(omega_low) + alpha
a1 = -2 * A * ((A - 1) + (A + 1) * np.cos(omega_low))
a2 = (A + 1) + (A - 1) * np.cos(omega_low) - alpha
sos_poly = np.asarray([b0, b1, b2, a0, a1, a2])
sos_holder.append(sos_poly)
num_poly = np.asarray([b0, b1, b2])
zeros.append(num_poly)
den_poly = np.asarray([a0, a1, a2])
poles.append(den_poly)
##High Shelf Filter
# f_high = np.random.beta(4,5)*(f_max-f_min)+f_min
# omega_high = 2*np.pi*f_high/f_s
omega_high = np.random.uniform(low=0.0, high=np.pi)
# g = np.random.binomial(1,bern_shelf)*(np.random.beta(5,5)*(g_max-g_min)+g_min)
g = np.random.uniform(low=-10.0, high=10.0)
# q = np.random.beta(1,5)*(q_max_shelf-q_min)+q_min
q = np.random.uniform(low=0.1, high=1.0)
A = np.power(10, g / 40)
alpha = np.sin(omega_high) * np.sqrt((A ** 2 + 1) * ((1 / q) - 1) + 2 * A)
b0 = A * ((A + 1) + (A - 1) * np.cos(omega_high) + alpha)
b1 = -2 * A * ((A - 1) + (A + 1) * np.cos(omega_high))
b2 = A * ((A + 1) + (A - 1) * np.cos(omega_high) - alpha)
a0 = (A + 1) - (A - 1) * np.cos(omega_high) + alpha
a1 = 2 * A * ((A - 1) - (A + 1) * np.cos(omega_high))
a2 = (A + 1) - (A - 1) * np.cos(omega_high) - alpha
sos_poly = np.asarray([b0, b1, b2, a0, a1, a2])
sos_holder.append(sos_poly)
num_poly = np.asarray([b0, b1, b2])
zeros.append(num_poly)
den_poly = np.asarray([a0, a1, a2])
poles.append(den_poly)
##Peaking Filters
for jj in range(num_peaks):
# f_peak = np.random.uniform(low=f_low,high=f_high)
# omega = 2*np.pi*f_peak/f_s
omega = np.random.uniform(low=0.0, high=np.pi)
# g = np.random.binomial(1,bern_peak)*(np.random.beta(5,5)*(g_max-g_min)+g_min)
g = np.random.uniform(low=-10, high=10)
# q = np.random.beta(1,5)*(q_max_peak-q_min)+q_min
q = np.random.uniform(low=0.1, high=3.0)
alpha = np.sin(omega) / (2 * q)
A = np.power(10, g / 40)
b0 = 1 + (alpha * A)
b1 = -2 * np.cos(omega)
b2 = 1 - (alpha * A)
a0 = 1 + (alpha / A)
a1 = -2 * np.cos(omega)
a2 = 1 - (alpha / A)
sos_poly = np.asarray([b0, b1, b2, a0, a1, a2])
sos_holder.append(sos_poly)
num_poly = np.asarray([b0, b1, b2])
zeros.append(num_poly)
den_poly = np.asarray([a0, a1, a2])
poles.append(den_poly)
sos = np.vstack(sos_holder)
my_norms = sos[:, 3]
sos = sos / my_norms[:, None] ##sosfreqz requires sos[:,3]=1
w, h = scipy.signal.sosfreqz(sos, worN=num_points)
mag = np.abs(h)
phs = np.unwrap(np.angle(h))
real = np.real(h)
imag = np.imag(h)
return mag, phs, real, imag, sos
def generate_characteristic_poly_filter(
num_points, max_order, min_order=None, eps=1e-8, seed=0
):
norm = 1.0 ##SHOULD BE HYPERPARAMETER
sos = []
if min_order == None:
chosen_ord = max_order
else:
chosen_ord = np.random.randint(low=min_order, high=max_order)
num_ord = chosen_ord
den_ord = chosen_ord
chosen_max = chosen_ord
all_num = np.zeros(chosen_max, dtype=np.cdouble)
all_den = np.zeros(chosen_max, dtype=np.cdouble)
num_char_matrix = np.random.normal(size=(num_ord, num_ord))
den_char_matrix = np.random.normal(size=(den_ord, den_ord))
num_w, _ = LA.eig(num_char_matrix)
den_w, _ = LA.eig(den_char_matrix)
sort_num = np.argsort(-1 * np.abs(np.imag(num_w)))
sort_den = np.argsort(-1 * np.abs(np.imag(den_w)))
num_w = norm * (1 / np.sqrt(chosen_ord)) * num_w[sort_num]
all_num[: len(num_w)] = num_w
den_w = norm * (1 / np.sqrt(chosen_ord)) * den_w[sort_den]
all_den[: len(den_w)] = den_w
for ii in range(chosen_max // 2):
num_poly = np.real(
np.polymul([1, -1 * all_num[2 * ii]], [1, -1 * all_num[2 * ii + 1]])
)
den_poly = np.real(
np.polymul([1, -1 * all_den[2 * ii]], [1, -1 * all_den[2 * ii + 1]])
)
sos.append(np.hstack((num_poly, den_poly)))
if chosen_max % 2 == 1: # add an extra section to make even number of sections
num_poly = np.real(np.polymul([1, 0], [1, -1 * all_num[-1]]))
den_poly = np.real(np.polymul([1, 0], [1, -1 * all_den[-1]]))
sos.append(np.hstack((num_poly, den_poly)))
sos = np.asarray(sos)
num_sos = sos.shape[0]
sos_proto = np.tile(np.asarray([1.0, 0, 0, 1.0, 0, 0]), ((chosen_ord + 1) // 2, 1))
sos_proto[:num_sos, :] = sos
sos = sos_proto
my_norms = sos[:, 3]
sos = sos / my_norms[:, None] ##sosfreqz requires sos[:,3]=1
w, h = scipy.signal.sosfreqz(sos, worN=num_points)
mag = np.abs(h)
phs = np.unwrap(np.angle(h))
real = np.real(h)
imag = np.imag(h)
out = mag, phs, real, imag, sos
return out
def generate_uniform_disk_filter(
num_points,
max_order,
min_order=None,
eps=1e-8,
min_freq=20.0,
log=False,
fs=44100,
):
##a and b are used for the loguniform sampling
a = min_freq / (
0.5 * fs * np.pi
) ##MIN CAN'T BE ZERO, CHOOSING 20HZ AS MINIMUM POLE/ZERO FREQUENCY
b = np.pi
norm = 0.9 ##SHOULD BE HYPERPARAMETER
sos = []
if min_order == None:
num_ord = torch.tensor([max_order]).numpy()
den_ord = torch.tensor([max_order]).numpy()
else:
num_ord = torch.randint(2, max_order, [1]).numpy()
den_ord = num_ord
chosen_max = np.max((num_ord, den_ord))
all_num = np.zeros(chosen_max, dtype=np.cdouble)
all_den = np.zeros(chosen_max, dtype=np.cdouble)
zeros_mags = torch.distributions.uniform.Uniform(eps, norm).sample(num_ord // 2)
poles_mags = torch.distributions.uniform.Uniform(eps, norm).sample(num_ord // 2)
if not log:
zeros_args = torch.distributions.uniform.Uniform(eps, np.pi).sample(
num_ord // 2
)
poles_args = torch.distributions.uniform.Uniform(eps, np.pi).sample(
num_ord // 2
)
else:
zeros_args = loguniform.rvs(a, b, size=(num_ord) // 2)
poles_args = loguniform.rvs(a, b, size=(num_ord) // 2)
for z_mag, z_arg, p_mag, p_arg in zip(
zeros_mags, zeros_args, poles_mags, poles_args
):
num_poly = [1, -2 * z_mag * np.cos(z_arg), z_mag ** 2]
den_poly = [1, -2 * p_mag * np.cos(p_arg), p_mag ** 2]
sos.append(np.hstack((num_poly, den_poly)))
if chosen_max % 2 == 1: ##IF ODD, add an extra zero and pole
zeros_mags = torch.distributions.uniform.Uniform(0.0, 1).sample()
poles_mags = torch.distributions.uniform.Uniform(0.0, 1).sample()
num_poly = [1, -1 * zeros_mags, 0]
den_poly = [1, -1 * poles_mags, 0]
sos.append(np.hstack((num_poly, den_poly)))
sos = np.asarray(sos)
num_sos = sos.shape[0]
sos_proto = np.tile(np.asarray([1.0, 0, 0, 1.0, 0, 0]), ((max_order + 1) // 2, 1))
sos_proto[:num_sos, :] = sos
sos = sos_proto
my_norms = sos[:, 3]
sos = sos / my_norms[:, None] ##sosfreqz requires sos[:,3]=1
w, h = scipy.signal.sosfreqz(sos, worN=num_points)
mag = np.abs(h)
phs = np.unwrap(np.angle(h))
real = np.real(h)
imag = np.imag(h)
out = mag, phs, real, imag, sos
return out
def generate_uniform_mag_disk_filter(
num_points,
max_order,
min_order=None,
eps=1e-8,
min_freq=20.0,
log=False,
fs=44100,
):
##a and b are used for the loguniform sampling
a = min_freq / (
0.5 * fs * np.pi
) ##MIN CAN'T BE ZERO, CHOOSING 20HZ AS MINIMUM POLE/ZERO FREQUENCY
b = np.pi
norm = 0.9 #
sos = []
if min_order == None:
num_ord = torch.tensor([max_order]).numpy()
den_ord = torch.tensor([max_order]).numpy()
else:
num_ord = torch.randint(2, max_order, [1]).numpy()
den_ord = num_ord
chosen_max = np.max((num_ord, den_ord))
all_num = np.zeros(chosen_max, dtype=np.cdouble)
all_den = np.zeros(chosen_max, dtype=np.cdouble)
zeros_mags = torch.sqrt(
torch.distributions.uniform.Uniform(eps, norm).sample(num_ord // 2)
)
poles_mags = torch.sqrt(
torch.distributions.uniform.Uniform(eps, norm).sample(num_ord // 2)
)
if not log:
zeros_args = torch.distributions.uniform.Uniform(eps, np.pi).sample(
num_ord // 2
)
poles_args = torch.distributions.uniform.Uniform(eps, np.pi).sample(
num_ord // 2
)
else:
zeros_args = loguniform.rvs(a, b, size=(num_ord) // 2)
poles_args = loguniform.rvs(a, b, size=(num_ord) // 2)
for z_mag, z_arg, p_mag, p_arg in zip(
zeros_mags, zeros_args, poles_mags, poles_args
):
num_poly = [1, -2 * z_mag * np.cos(z_arg), z_mag ** 2]
den_poly = [1, -2 * p_mag * np.cos(p_arg), p_mag ** 2]
sos.append(np.hstack((num_poly, den_poly)))
if chosen_max % 2 == 1: ##IF ODD, add an extra zero and pole
zeros_mags = torch.distributions.uniform.Uniform(0.0, 1).sample()
poles_mags = torch.distributions.uniform.Uniform(0.0, 1).sample()
num_poly = [1, -1 * zeros_mags, 0]
den_poly = [1, -1 * poles_mags, 0]
sos.append(np.hstack((num_poly, den_poly)))
sos = np.asarray(sos)
num_sos = sos.shape[0]
sos_proto = np.tile(np.asarray([1.0, 0, 0, 1.0, 0, 0]), ((max_order + 1) // 2, 1))
sos_proto[:num_sos, :] = sos
sos = sos_proto
my_norms = sos[:, 3]
sos = sos / my_norms[:, None] ##sosfreqz requires sos[:,3]=1
w, h = scipy.signal.sosfreqz(sos, worN=num_points)
mag = np.abs(h)
phs = np.unwrap(np.angle(h))
real = np.real(h)
imag = np.imag(h)
out = mag, phs, real, imag, sos
return out
def generate_gaussian_peaks(
num_points,
max_order,
min_order=None,
):
max_peaks = max_order
mag = np.zeros(num_points)
if min_order == None:
num_peaks = max_order
else:
num_peaks = torch.randint(1, max_peaks, [1])
for n in range(num_peaks):
# sample parameters
mu = torch.rand(1) * 20
mu = -mu if torch.rand(1) > 0.5 else mu
variance = ((torch.rand(1) * 1) + 0.1).squeeze().numpy()
sigma = np.sqrt(variance)
width = (torch.rand(1) * 100).squeeze().numpy()
gain = torch.rand(1).numpy() * 40
gain = -gain if torch.rand(1) > 0.5 else gain
x = np.linspace(-width * sigma, width * sigma, num_points)
mag += gain * stats.norm.pdf(x, mu, sigma)
phs = np.zeros(num_points)
real = np.zeros(num_points)
imag = np.zeros(num_points)
sos = np.tile(np.asarray([1.0, 0, 0, 1.0, 0, 0]), ((max_order + 1) // 2, 1))
out = mag, phs, real, imag, sos
return out
def generate_normal_poly_filter(
num_points,
max_order,
min_order=None,
eps=1e-8,
seed=42,
):
sos = []
if min_order == None:
chosen_ord = max_order
else:
chosen_ord = np.random.randint(low=min_order, high=max_order)
num_poly = np.random.normal(size=chosen_ord + 1)
den_poly = np.random.normal(size=chosen_ord + 1)
num_w = np.roots(num_poly)
den_w = np.roots(den_poly)
sort_num = np.argsort(-1 * np.abs(np.imag(num_w)))
sort_den = np.argsort(-1 * np.abs(np.imag(den_w)))
all_num = num_w[sort_num]
all_den = den_w[sort_den]
for ii in range(chosen_ord // 2):
num_poly = np.real(
np.polymul([1, -1 * all_num[2 * ii]], [1, -1 * all_num[2 * ii + 1]])
)
den_poly = np.real(
np.polymul([1, -1 * all_den[2 * ii]], [1, -1 * all_den[2 * ii + 1]])
)
sos.append(np.hstack((num_poly, den_poly)))
if chosen_ord % 2 == 1:
num_poly = np.real(np.polymul([1, 0], [1, -1 * all_num[-1]]))
den_poly = np.real(np.polymul([1, 0], [1, -1 * all_den[-1]]))
sos.append(np.hstack((num_poly, den_poly)))
sos = np.asarray(sos)
num_sos = sos.shape[0]
sos_proto = np.tile(np.asarray([1.0, 0, 0, 1.0, 0, 0]), ((chosen_ord + 1) // 2, 1))
sos_proto[:num_sos, :] = sos
sos = sos_proto
my_norms = sos[:, 3]
sos = sos / my_norms[:, None] ##sosfreqz requires sos[:,3]=1
w, h = scipy.signal.sosfreqz(sos, worN=num_points)
mag = np.abs(h)
phs = np.unwrap(np.angle(h))
real = np.real(h)
imag = np.imag(h)
out = mag, phs, real, imag, sos
return out
| 33.137521 | 97 | 0.578082 | 3,265 | 19,518 | 3.291271 | 0.077489 | 0.008934 | 0.026056 | 0.008934 | 0.85697 | 0.845524 | 0.839196 | 0.82654 | 0.81649 | 0.80979 | 0 | 0.048071 | 0.260324 | 19,518 | 588 | 98 | 33.193878 | 0.696267 | 0.136336 | 0 | 0.781532 | 1 | 0 | 0.001078 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02027 | false | 0.004505 | 0.015766 | 0 | 0.056306 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
405389d03791bb64c1515dbf3c8bbb3b0c0ca6d1 | 252,503 | py | Python | build/lib.linux-x86_64-2.7/ims_tests/tests.py | grovesr/django-ims | 2e3046c7dac45831879ff578675f6c5234226f95 | [
"BSD-3-Clause"
] | null | null | null | build/lib.linux-x86_64-2.7/ims_tests/tests.py | grovesr/django-ims | 2e3046c7dac45831879ff578675f6c5234226f95 | [
"BSD-3-Clause"
] | 7 | 2020-06-05T17:04:46.000Z | 2022-03-11T23:12:55.000Z | ims_tests/tests.py | grovesr/django-ims | 2e3046c7dac45831879ff578675f6c5234226f95 | [
"BSD-3-Clause"
] | null | null | null | from django.test import TestCase, RequestFactory
from unittest import skip
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User, Permission
from django.contrib.sessions.middleware import SessionMiddleware
from collections import OrderedDict
from urllib import urlencode
from django.utils import timezone
from datetime import timedelta
import os
import StringIO
import re
import logging
from ims.models import Site, ProductInformation, InventoryItem, ProductCategory
from ims.views import (inventory_delete_all, site_delete_all, product_delete_all,
site_delete, product_delete, product_add, site_add, site_detail,
site_add_inventory, products_add_to_site_inventory, product_detail,
product_select_add_site)
from ims.settings import PAGE_SIZE, APP_DIR
import zipfile
logging.disable(logging.CRITICAL)
# test helper functions
def create_inventory_item_for_site(site=None,
product=None,
quantity=1,
deleted=0,
modifier='none'):
if not site:
site=Site(name="test site 1",
modifier=modifier)
site.save()
if not product:
product=ProductInformation(name="test product 1",
code="pdt1",
modifier=modifier,)
product.save()
inventoryItem=site.add_inventory(product=product,
quantity=quantity,
deleted=deleted,
modifier=modifier,)
return site, product, inventoryItem
def create_products_with_inventory_items_for_sites(numSites=1,
numProducts=1,
numItems=1,
modifier='none',
uniqueCategories=False):
sitesList=[]
productList=[]
inventoryItemList=[]
categoryList=[]
for s in range(numSites):
siteName="test site "+str(s+1)
site=Site(name=siteName,)
site.save()
sitesList.append(site)
for p in range(numProducts):
productName="test product "+str(p+1)
productCode="pdt"+str(p+1)
if uniqueCategories:
categoryName="category-" + str(p+1)
else:
categoryName="category-1"
category, created = ProductCategory.objects.get_or_create(category = categoryName)
if created:
category.save()
categoryList.append(category)
product, created=ProductInformation.objects.get_or_create(name=productName,
code=productCode,
category=category)
if created:
product.save()
productList.append(product)
for i in range(numItems):
# increment the quantity for each addition of a new item for
# the same product code, so we can distinguish them
site,product,inventoryItem=create_inventory_item_for_site(
site=site,
product=product,
quantity=i+1,
deleted=0,
modifier=modifier)
inventoryItemList.append(inventoryItem)
return sitesList,productList,inventoryItemList,categoryList
def get_announcement_from_response(response=None, cls=None):
if response and cls:
m=re.search(('^.*<div\s*id="announcement".*?<p.*?class="' +
cls + '">\s*<i .*?</i>\s*<i .*?</i>\s*(.*?)\s*</p>.*?</div>'),
response.content, re.S)
if m and len(m.groups()) > 0:
return m.groups()[0].replace('\n','')
return ''
def add_session_to_request(request):
"""Annotate a request object with a session"""
middleware = SessionMiddleware()
middleware.process_request(request)
request.session.save()
class SiteMethodTests(TestCase):
"""
ims_tests for Site instance methods
"""
#Site inventory ims_tests
def test_latest_inventory_after_initial_creation(self):
"""
site.latest_inventory should only return the latest change
"""
print 'running SiteMethodTests.test_latest_inventory_after_initial_creation... '
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
#latest_inventory is a queryset of all the most recent changes to the
#site's inventory.
latestInventory=[]
for site in createdSites:
latestInventory += site.latest_inventory()
sortedCreatedInventory=[]
for site in createdSites:
for item in site.inventoryitem_set.all():
sortedCreatedInventory.append(item.create_key())
sortedCreatedInventory.sort()
sortedLatestInventory=[]
for item in latestInventory:
sortedLatestInventory.append(item.create_key())
# make sure we return only one thing, since we only added one thing
self.assertListEqual(sortedLatestInventory,
sortedCreatedInventory,
'created inventory in database doesn''t match created inventory')
def test_latest_inventory_after_deletion(self):
"""
site.latest_inventory should only return the latest change, and should
not return any deleted items
"""
print 'running SiteMethodTests.test_latest_inventory_after_deletion... '
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
# indicate that the just added item is deleted
create_inventory_item_for_site(site=createdSites[0],
product=createdProducts[0],
deleted=1)
#latest_inventory is a queryset of all the most recent changes to the
#site's inventory
latestInventory=createdSites[0].latest_inventory()
# latest_inventory is a queryset of all the most recent changes to the
# site's inventory. Check that a deleted item doesn't show up in
# inventory
with self.assertRaises(InventoryItem.DoesNotExist):
latestInventory.get(information_id=createdProducts[0].pk)
def test_latest_inventory_after_3_quantity_change(self):
"""
site.latest_inventory should only return the latest change
"""
print 'running SiteMethodTests.test_latest_inventory_after_3_quantity_change... '
(createdSites,
createdProducts,
createdInventoryItems,
createdCategories)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
# latest_inventory is a queryset of all the most recent changes to the
# site's inventory.
latestInventory=createdSites[0].latest_inventory()
# check that the inventoryItem that we just added
# and then changed several times has the appropriate final quantity
self.assertEqual(latestInventory.get(
information_id=createdProducts[0].pk).create_key(),
createdInventoryItems.pop().create_key())
self.assertEqual(latestInventory.get(
information_id=createdProducts[0].pk).information.category.pk,
createdCategories.pop().pk)
def test_latest_inventory_after_3_quantity_change_and_deletion(self):
"""
site.latest_inventory should only return the latest change and not
return any deleted items.
"""
print 'running SiteMethodTests.test_latest_inventory_after_3_quantity_change_and_deletion... '
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
# indicate that the just added item is deleted
create_inventory_item_for_site(site=createdSites[0],
product=createdProducts[0],
deleted=1)
#latest_inventory is a queryset of all the most recent changes to the
#site's inventory
latestInventory=createdSites[0].latest_inventory()
# Check that a deleted InventoryItem doesn't show up
# in inventory
with self.assertRaises(InventoryItem.DoesNotExist):
latestInventory.get(information_id=createdProducts[0].pk)
def test_inventory_set_after_3_changes(self):
"""
InventoryItem history of changes should be retained in the database
"""
print 'running SiteMethodTests.test_inventory_set_after_3_changes... '
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
self.assertEqual(createdSites[0].inventoryitem_set.all().count(),3)
def test_latest_inventory_after_deletion_and_re_addition(self):
"""
site.latest_inventory should only return the latest change and not
return any deleted items. If an item is deleted and then re-added, we
should always see the last change
"""
print 'running SiteMethodTests.test_latest_inventory_after_deletion_and_re_addition... '
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
# indicate that the just added item is deleted
create_inventory_item_for_site(site=createdSites[0],
product=createdProducts[0],
deleted=1)
#latest_inventory is a queryset of all the most recent changes to the
#site's inventory
(__,
__,
lastItemChange)=create_inventory_item_for_site(
site=createdSites[0],
product=createdProducts[0],
quantity=100)
# latest_inventory is a queryset of all the most recent changes to the
# site's inventory.
latestInventory=createdSites[0].latest_inventory()
# Check that we still have inventory after a deletion
# and re-addition
self.assertEqual(
latestInventory.get(
information_id=createdProducts[0].pk).create_key(),
lastItemChange.create_key())
def test_latest_inventory_3_products_after_3_changes(self):
"""
site.latest_inventory should only return the latest changes
"""
print 'running SiteMethodTests.test_latest_inventory_3_products_after_3_changes... '
(createdSites,
createdProducts,
createdInventoryItems,
createdCategories)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=3,
numItems=3,
uniqueCategories=False,
)
# latest_inventory is a queryset of all the most recent changes to the
# site's inventory.
latestInventory=createdSites[0].latest_inventory()
self.assertEqual(
latestInventory.get(information_id=createdProducts[0].pk).create_key(),
createdInventoryItems[3*1-1].create_key())
self.assertEqual(
latestInventory.get(information_id=createdProducts[1].pk).create_key(),
createdInventoryItems[3*2-1].create_key())
self.assertEqual(
latestInventory.get(information_id=createdProducts[2].pk).create_key(),
createdInventoryItems[3*3-1].create_key())
self.assertEqual(
latestInventory.get(information_id=createdProducts[0].pk).information.category.pk,
createdCategories.pop().pk)
def test_parse_sites_from_xls_initial(self):
"""
import 3 sites from Excel
"""
print 'running SiteMethodTests.test_parse_sites_from_xls_initial... '
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
importedSites,__=Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedSites,
None,
'Failure to import sites from excel')
queriedSites=Site.objects.all()
# check that we saved 3 sites
self.assertEqual(
queriedSites.count(),
3,
'Number of imported sites mismatch. Some sites didn''t get stored.')
# check that the site modifiers are correctly stored
sortedImportedSites=[]
for site in importedSites:
sortedImportedSites.append(site.create_key())
sortedImportedSites.sort()
sortedQueriedSites=[]
for site in queriedSites:
sortedQueriedSites.append(site.create_key())
sortedQueriedSites.sort()
self.assertListEqual(sortedImportedSites,
sortedQueriedSites,
'Imported sites don''t match the stored sites')
def test_parse_sites_from_xls_with_dups(self):
"""
import 3 sites from Excel, plus one duplicate site
"""
print 'running SiteMethodTests.test_parse_sites_from_xls_with_dups... '
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3_site3.xls')
importedSites,__=Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedSites,
None,
'Failure to import sites from excel')
queriedSites=Site.objects.all()
# check that we only saved 3 sites
self.assertEqual(
queriedSites.count(),
3,
'You stored a duplicate site as a separate entity.')
def test_parse_sites_from_xls_with_bad_header(self):
"""
import 3 sites from Excel but use a file with invalid headers
"""
print 'running SiteMethodTests.test_parse_sites_from_xls_with_bad_header... '
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
__, siteMessage=Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
self.assert_(
'Xlrdutils' in siteMessage,
('Failure to recognize a file with bad headers.\nSite.parse_sites_from_xls returned: %s'
% siteMessage))
def test_import_parse_from_xls_with_bad_date(self):
"""
import 3 sites from Excel but use a file with a bad date format
"""
print 'running SiteMethodTests.test_parse_sites_from_xls_with_bad_date... '
filename=os.path.join(
APP_DIR,
'testData/sites_add_site1_site2_site3_bad_date.xls')
__, siteMessage=Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
self.assert_('Xlrdutils' in siteMessage,
('Failure to recognize a file with bad date format.\nSite.parse_sites_from_xls returned: %s'
% siteMessage))
def test_parse_sites_from_xls_unicode(self):
print 'running SiteMethodTests.test_parse_sites_from_xls_unicode... '
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_unicode.xls')
try:
(__,
siteMessage) = Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
except UnicodeEncodeError as e:
self.fail("Import of spreadsheet containing unicode caused UnicodeEncodeError: %s" % e)
self.assertEqual(siteMessage,
'',
('Import of spreadsheet containing unicode generated warnings %s'
% siteMessage))
class ProductInformationMethodTests(TestCase):
"""
ProductInformation class method ims_tests
"""
def test_num_sites_containing_with_3_sites(self):
print 'running ProductInformationMethodTests.test_num_sites_containing_with_3_sites... '
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=1,
numItems=1)
product = createdProducts[0]
self.assertEqual(product.num_sites_containing(), 3)
def test_num_sites_containing_with_3_sites_after_inventory_change(self):
print 'running ProductInformationMethodTests.test_num_sites_containing_with_3_sites_after_inventory_change... '
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=1,
numItems=2)
product = createdProducts[0]
self.assertEqual(product.num_sites_containing(), 3)
def test_parse_product_information_from_xls_initial(self):
"""
import 3 products from Excel
"""
print 'running ProductInformationMethodTests.test_parse_product_information_from_xls_initial... '
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
(importedProducts,
__)=ProductInformation.parse_product_information_from_xls(
filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedProducts,
None,
'Failure to import products from Excel')
queriedProducts=ProductInformation.objects.all()
# check that we saved 3 sites
self.assertEqual(queriedProducts.count(),
3,
'Number of imported products mismatch. \
Some product didn''t get stored.')
# check that the product modifiers are correctly stored
sortedImportedProducts=[]
for product in importedProducts:
sortedImportedProducts.append(product.create_key())
sortedImportedProducts.sort()
sortedQueriedProducts=[]
for product in queriedProducts:
sortedQueriedProducts.append(product.create_key())
sortedQueriedProducts.sort()
self.assertListEqual(sortedImportedProducts, sortedQueriedProducts)
def test_parse_product_information_from_xls_with_dups(self):
"""
import 3 products from Excel, plus one duplicate product
"""
print 'running ProductInformationMethodTests.test_parse_product_information_from_xls_with_dups... '
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3_prod3.xls')
(importedProducts,
__)=ProductInformation.parse_product_information_from_xls(
filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedProducts,
None,
'Failure to import products from excel')
queriedProducts=ProductInformation.objects.all()
# check that we only saved 3 products
self.assertTrue(
queriedProducts.count() < 4,
'You stored a duplicate product as a separate entity.')
def test_parse_product_information_from_xls_with_bad_header(self):
"""
import 3 products from Excel but use a file with invalid headers
"""
print 'running ProductInformationMethodTests.test_parse_product_information_from_xls_with_bad_header... '
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
(__,
productMessage)=ProductInformation.parse_product_information_from_xls(
filename=filename,
modifier='none',
save=True)
self.assert_(
'Xlrdutils' in productMessage,
('Failure to recognize a file with bad headers.\nProductInformation.parse_product_information_from_xls returned: %s'
% productMessage))
def test_parse_product_information_from_xls_with_bad_date(self):
"""
import 3 products from Excel but use a file with a bad date format
"""
print 'running ProductInformationMethodTests.test_parse_product_information_from_xls_with_bad_date... '
filename=os.path.join(
APP_DIR,
'testData/products_add_prod1_prod2_prod3_bad_date.xls')
(__,
productMessage)=ProductInformation.parse_product_information_from_xls(
filename=filename,
modifier='none',
save=True)
self.assert_('Xlrdutils' in productMessage,
('Failure to recognize a file with bad date format.\nProductInformation.parse_product_information_from_xls returned: %s'
% productMessage))
def test_parse_product_information_from_xls_with_unicode(self):
print 'running ProductInformationMethodTests.test_parse_product_information_from_xls_with_unicode... '
filename=os.path.join(
APP_DIR,
'testData/products_add_prod1_unicode.xls')
try:
(__,
productMessage)=ProductInformation.parse_product_information_from_xls(
filename=filename,
modifier='none',
save=True)
except UnicodeEncodeError as e:
self.fail("Import of spreadsheet containing unicode caused UnicodeEncodeError: %s" % e)
self.assertEqual(productMessage,
'',
('Import of spreadsheet containing unicode generated warnings %s'
% productMessage))
class ProductCategoryMethodTests(TestCase):
"""
ProductCategory class method ims_tests
"""
def test_parse_product_category_from_xls_initial(self):
print 'running ProductInformationMethodTests.test_parse_product_category_from_xls_initial... '
filename=os.path.join(APP_DIR,
'testData/category_add_3.xls')
(importedCategories,
__)=ProductCategory.parse_product_categories_from_xls(
filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedCategories,
None,
'Failure to import categories from Excel')
queriedCategories=ProductCategory.objects.all()
# check that we saved 3 sites
self.assertEqual(queriedCategories.count(),
3,
'Number of imported categories mismatch. \
Some categories didn''t get stored.')
def test_parse_product_category_from_xls_with_unicode(self):
print 'running ProductInformationMethodTests.test_parse_product_category_from_xls_with_unicode... '
filename=os.path.join(
APP_DIR,
'testData/category_add_3_unicode.xls')
try:
(__,
categoryMessage)=ProductCategory.parse_product_categories_from_xls(
filename=filename,
modifier='none',
save=True)
except UnicodeEncodeError as e:
self.fail("Import of spreadsheet containing unicode caused UnicodeEncodeError: %s" % e)
self.assertEqual(categoryMessage,
'',
('Import of spreadsheet containing unicode generated warnings %s'
% categoryMessage))
class InventoryItemMethodTests(TestCase):
"""
InventoryItem class method ims_tests
"""
def test_parse_inventory_from_xls_initial(self):
"""
import 3 inventory items to 3 sites from Excel
"""
print 'running InventoryItemMethodTests.test_parse_inventory_from_xls_initial... '
for number in range(3):
#create three sites
siteName = 'test site %d' % (number + 1)
siteNumber = number + 1
site=Site(name = siteName,
number = siteNumber,
modifier = 'none')
site.save()
for number in range(3):
#create three products
productName="test product %d" % (number+1)
productCode="pdt%d" % (number+1)
product=ProductInformation(name=productName,
code=productCode,
modifier='none')
product.save()
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
ProductInformation.parse_product_information_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3.xls')
(importedInventoryItems,
__)=InventoryItem.parse_inventory_from_xls(
filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedInventoryItems,
None,
'Failure to import inventory from Excel')
self.assertEqual(len(importedInventoryItems),
9,
'Failure to create one or more inventoryItems. Missing associated Site or ProductInformation?')
queriedInventoryItems=InventoryItem.objects.all()
# check that we saved 3 sites
self.assertEqual(queriedInventoryItems.count(),
3*3,
'Total inventory mismatch. Some InventoryItems didn''t get stored.')
# check that the inventory IDs are correctly stored
sortedImportedInventoryItems=[]
for item in importedInventoryItems:
sortedImportedInventoryItems.append(item.create_key())
sortedImportedInventoryItems.sort()
sortedQueriedInventoryItems=[]
for item in queriedInventoryItems:
sortedQueriedInventoryItems.append(item.create_key())
sortedQueriedInventoryItems.sort()
self.assertListEqual(sortedImportedInventoryItems,
sortedQueriedInventoryItems,
'Imported inventory doesn''t match stored inventory')
def test_parse_inventory_from_xls_with_dups(self):
"""
import 3 inventory items to 3 sites from Excel
"""
print 'running InventoryItemMethodTests.test_parse_inventory_from_xls_initial... '
for number in range(3):
#create three sites
siteName = 'test site %d' % (number + 1)
siteNumber = number + 1
site=Site(name = siteName,
number = siteNumber,
modifier = 'none')
site.save()
for number in range(3):
#create three products
productName="test product %d" % (number+1)
productCode="pdt%d" % (number+1)
product=ProductInformation(name=productName,
code=productCode,
modifier='none')
product.save()
filename=os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3_dups.xls')
(importedInventoryItems,
__)=InventoryItem.parse_inventory_from_xls(
filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedInventoryItems,
None,
'Failure to import inventory from Excel')
queriedInventory=InventoryItem.objects.all()
# check that we only saved 9 inventory items
self.assertEqual(
queriedInventory.count(), 10,
'You didn''t store all all the inventory items')
def test_parse_inventory_from_xls_with_bad_header(self):
"""
import 3 inventory items to 3 sites from Excel file with a bad header
"""
print 'running InventoryItemMethodTests.test_parse_inventory_from_xls_with_bad_header... '
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
(__,
inventoryMessage)=InventoryItem.parse_inventory_from_xls(
filename=filename,
modifier='none',
save=True)
self.assert_('Xlrdutils' in inventoryMessage,
('Failure to recognize a file with bad header format.\nInventoryItem.parse_inventory_from_xl returned: %s'
% inventoryMessage))
def test_parse_inventory_from_xls_with_bad_date(self):
"""
import 3 inventory items to 3 sites from Excel file with a bad header
"""
print 'running InventoryItemMethodTests.test_parse_inventory_from_xls_with_bad_date... '
filename=os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3_bad_date.xls')
(__,
inventoryMessage)=InventoryItem.parse_inventory_from_xls(
filename=filename,
modifier='none',
save=True)
self.assert_('Xlrdutils' in inventoryMessage,
('Failure to recognize a file with bad date format.\nInventoryItem.parse_inventory_from_xl returned: %s'
% inventoryMessage))
@skip('No longer using IMS page view')
class HomeViewTests(TestCase):
"""
ims_tests for Home view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_home_for_latest_changes_1(self):
"""
The home view should display sites with recently edited inventory with
the latest changes at the top and latest inventory changes with the
latest changes at the top as well
"""
print 'running HomeViewTests.test_home_for_latest_changes_1... '
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
response=self.client.get(reverse('ims:home'))
sitesResponseList=[]
itemsResponseList=[]
for site in response.context['sitesList']:
sitesResponseList.append(site.create_key())
for item in response.context['inventoryList']:
# include the timestamp to ensure uniqueness when comparing
itemsResponseList.append(item.create_key())
sortedCreatedSites=[]
for site in createdSites:
sortedCreatedSites.append(site.create_key())
# compare the latest changed sites only
sortedCreatedSites.reverse()
# just retain the latest inventory changes to compare to the response
latestInventoryItems=OrderedDict()
sortedCreatedInventoryItems=[]
createdInventoryItems.reverse()
for item in createdInventoryItems:
if not latestInventoryItems.has_key(item.information):
latestInventoryItems[item.information]=item
for item in latestInventoryItems.values():
# include the timestamp to ensure uniqueness when comparing
sortedCreatedInventoryItems.append(item.create_key())
self.assertListEqual(sitesResponseList, sortedCreatedSites[:PAGE_SIZE])
self.assertListEqual(itemsResponseList,
sortedCreatedInventoryItems[:PAGE_SIZE])
def test_home_for_latest_changes_2(self):
"""
The home view should display sites with recently edited inventory with
the latest changes at the top and latest inventory changes with the
latest changes at the top as well
"""
print 'running HomeViewTests.test_home_for_latest_changes_2... '
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
response=self.client.get(reverse('ims:home'))
sitesResponseList=[]
itemsResponseList=[]
for site in response.context['sitesList']:
sitesResponseList.append(site.create_key())
for item in response.context['inventoryList']:
# include the timestamp to ensure uniqueness when comparing
itemsResponseList.append(item.create_key())
sortedCreatedSites=[]
for site in createdSites:
sortedCreatedSites.append(site.create_key())
# compare the latest changed sites only
sortedCreatedSites.reverse()
# just retain the latest inventory changes to compare to the response
latestInventoryItems=OrderedDict()
sortedCreatedInventoryItems=[]
createdInventoryItems.reverse()
for item in createdInventoryItems:
if not latestInventoryItems.has_key(item.information):
latestInventoryItems[item.information]=item
for item in latestInventoryItems.values():
# include the timestamp to ensure uniqueness when comparing
sortedCreatedInventoryItems.append(item.create_key())
self.assertListEqual(sitesResponseList, sortedCreatedSites[:PAGE_SIZE])
self.assertListEqual(itemsResponseList,
sortedCreatedInventoryItems[:PAGE_SIZE])
def test_home_for_no_inventory(self):
"""
If there is no inventory, ims:home should display nothing
"""
print 'running HomeViewTests.test_home_for_no_inventory... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:home'))
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No inventory found', resultWarning,
'IMS Home view didn''t generate the correct warning when there is no inventory.\nactual warning message = %s'
% resultWarning)
class InventoryHistoryViewTests(TestCase):
"""
ims_tests for inventory_history view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_inventory_history_with_invalid_site(self):
print 'running InventoryHistoryViewTests.test_inventory_history_with_invalid_site... '
self.client.login(username='testUser', password='12345678')
siteId = 1
code="D11"
response=self.client.get(reverse('ims:inventory_history',
kwargs =
{'siteId':siteId,
'code':code,}),
follow=True)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('Unable to check inventory history.<br />Site %d does not exist' %
siteId, resultError,
'IMS inventory_history view didn''t generate the correct warning when an invalid site was requested.\nactual message = %s' %
resultError)
def test_inventory_history_with_invalid_code(self):
print 'running InventoryHistoryViewTests.test_inventory_history_with_invalid_code... '
self.client.login(username='testUser', password='12345678')
siteId = 1
code="D11"
site=Site(number = siteId)
site.save()
response=self.client.get(reverse('ims:inventory_history',
kwargs =
{'siteId':siteId,
'code':code,}),
follow=True)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('Unable to check inventory history.<br />Item %s does not exist' %
code, resultError,
'IMS inventory_history view didn''t generate the correct warning when an invalid code was requested.\nactual message = %s' %
resultError)
def test_inventory_history_with_valid_history(self):
print 'running InventoryHistoryViewTests.test_inventory_history_with_valid_history... '
self.client.login(username='testUser', password='12345678')
# create initial inventory item
site, product, __ = create_inventory_item_for_site(quantity=1)
# change it to create a history
site, product, __ = create_inventory_item_for_site(
site = site,
product = product,
quantity=2)
response=self.client.get(reverse('ims:inventory_history',
kwargs =
{'siteId':site.number,
'code':product.code,}),
follow=True)
self.assertEqual(response.status_code, 200,
'Inventory History generated a non-200 response code')
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultError, '',
'IMS inventory_history view generated an error with a valid request.\nactual message = %s' %
resultError)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual(resultWarning, '',
'IMS inventory_history view generated a warning with a valid request.\nactual message = %s' %
resultWarning)
resultInfo = get_announcement_from_response(response=response,
cls="infonote")
self.assertEqual(resultInfo, '',
'IMS inventory_history view generated info with a valid request.\nactual message = %s' %
resultInfo)
class SitesViewTests(TestCase):
"""
ims_tests for sites view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_sites_get_with_no_sites(self):
print 'running SitesViewTests.test_sites_get_with_no_sites... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:sites'),
follow=True)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No sites found',
'IMS sites view didn''t generate the correct warning when no sites were found.\nactual message = %s' %
resultWarning)
def test_sites_get_with_filter_and_no_sites(self):
print 'running SitesViewTests.test_products_get_with_filter_and_no_products... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:sites',) +
'?searchField=name&searchValue=blah',
follow = False,)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No sites found',
'IMS sites view didn''t generate the correct warning when no sites were found.\nactual message = %s' %
resultWarning)
def test_sites_get_with_sites(self):
print 'running SitesViewTests.test_sites_get_with_sites... '
self.client.login(username='testUser', password='12345678')
site = Site(name='test site',)
site.save()
response=self.client.get(reverse('ims:sites',),
follow = False,)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual('', resultWarning)
def test_sites_get_with_filter(self):
print 'running SitesViewTests.test_sites_get_with_filter... '
self.client.login(username='testUser', password='12345678')
site = Site(name='test site',)
site.save()
response=self.client.get(reverse('ims:sites',) +
'?searchField=name&searchValue=test',
follow = False,)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual('', resultWarning)
def test_sites_get_with_bad_filter(self):
print 'running SitesViewTests.test_sites_get_with_bad_filter... '
self.client.login(username='testUser', password='12345678')
site = Site(name='test site',)
site.save()
response=self.client.get(reverse('ims:sites',) +
'?searchField=name&searchValue=blah',
follow = False,)
self.assertRedirects(response, reverse('ims:sites',) +
'?page=1&pageSize=%d' % PAGE_SIZE,
status_code = 302,
target_status_code = 200)
def test_sites_post_add(self):
print 'running SitesViewTests.test_sites_post_add... '
perms = ['add_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:sites'),
{'Add':''},
follow=False)
self.assertRedirects(response, reverse('ims:site_add'),
status_code = 302,
target_status_code = 200)
def test_sites_post_add_without_add_site_perm(self):
print 'running SitesViewTests.test_sites_post_add_without_add_site_perm... '
self.client.login(username='testUser', password='12345678')
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['0'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['0'],
'Add':'Add',}
response=self.client.post(reverse('ims:sites',),
postData,
follow = False,)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to add sites',
resultError,
'IMS sites view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
def test_sites_post_delete(self):
print 'running SitesViewTests.test_sites_post_delete... '
perms = ['delete_site', 'delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-Delete': ['on'],
'form-0-number': [site.number],
'Delete': ['Delete']}
response=self.client.post(reverse('ims:sites'),
postData,
follow=False)
self.assertRedirects(response,
(reverse('ims:site_delete') +
'?site=' + str(site.number) + '&'),
status_code = 302,
target_status_code = 200)
def test_sites_post_delete_without_delete_site_perms(self):
print 'running SitesViewTests.test_sites_post_delete... '
perms = [ 'delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-Delete': ['on'],
'form-0-number': [site.number],
'Delete': ['Delete']}
response=self.client.post(reverse('ims:sites'),
postData,
follow=False)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete sites',
resultError,
'IMS sites view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
class ProductDeleteViewTests(TestCase):
"""
ims_tests for product_delete view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_product_delete_get_with_no_get_parms(self):
print 'running ProductDeleteViewTests.test_product_delete_get_with_no_get_parms... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:product_delete'),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No products requested for deletion',
resultWarning,
'IMS product_delete view didn''t generate the correct warning when no sites requested found.\nactual message = %s' %
resultWarning)
def test_product_delete_get(self):
print 'running ProductDeleteViewTests.test_product_delete_get... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
response=self.client.get(reverse('ims:product_delete') + '?' +
urlencode({'code':code}),
follow = False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('Are you sure?',
resultWarning,
'IMS product_delete view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_product_delete_get_with_inventory(self):
print 'running ProductDeleteViewTests.test_product_delete_get_with_inventory... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
response=self.client.get(reverse('ims:product_delete') + '?' +
urlencode({'code':createdProducts[0].pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('One or more products contain inventory. Deleting the products will delete all inventory in all sites containing this product as well. Delete anyway?',
resultWarning,
'IMS product_delete view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_product_delete_get_after_deleting_inventory_from_site(self):
print 'running ProductDeleteViewTests.test_product_delete_get_after_deleting_inventory_from_site... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
product = createdProducts[0]
site =createdSites[0]
site.add_inventory(product=product,
deleted=True,
modifier='testUesr',)
response=self.client.get(reverse('ims:product_delete') + '?' +
urlencode({'code':createdProducts[0].pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('Are you sure?',
resultWarning,
'IMS product_delete view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_product_delete_get_without_delete_productinformation_perm(self):
print 'running ProductDeleteViewTests.test_product_delete_get_without_delete_productinformation_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
response=self.client.get(reverse('ims:product_delete') + '?' +
urlencode({'code':code}),
follow = False)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS product_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_product_delete_get_without_delete_inventoryitem_perm(self):
print 'running ProductDeleteViewTests.test_product_delete_get_without_delete_inventoryitem_perm... '
perms = ['delete_productinformation',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
response=self.client.get(reverse('ims:product_delete') + '?' +
urlencode({'code':code}),
follow = False)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS product_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_product_delete_post_with_no_post_parms(self):
print 'running ProductDeleteViewTests.test_product_delete_post_with_no_post_parms... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Delete':'Delete'}
request = self.factory.post(reverse('ims:product_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_delete(request)
response.client = self.client
resultError = request.session['errorMessage']
self.assertIn('No products requested for deletion',
resultError,
'IMS product_delete view didn''t generate the correct warning when no products requested found.\nactual message = %s' %
resultError)
self.assertRedirects(response, reverse('ims:products') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
def test_product_delete_post(self):
print 'running ProductDeleteViewTests.test_product_delete_post... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
postData = {'Delete':'Delete',
'products':[product.code,]}
request = self.factory.post(reverse('ims:product_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_delete(request)
response.client = self.client
resultInfo = request.session['infoMessage']
self.assertIn(('Successfully deleted product and associated inventory for product code %s with name "%s"<br/>' %
(product.meaningful_code(), product.name)),
resultInfo,
'IMS product_delete view didn''t generate the correct info when product deleted.\nactual message = %s' %
resultInfo)
self.assertRedirects(response, reverse('ims:products') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
self.assertEqual(ProductInformation.objects.all().count(),
0,
'Product still in database after deleting.')
def test_product_delete_post_with_inventory(self):
print 'running ProductDeleteViewTests.test_product_delete_post_with_inventory... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
postData = {'Delete':'Delete',
'products':[createdProducts[0].code,]}
request = self.factory.post(reverse('ims:product_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_delete(request)
response.client = self.client
resultInfo = request.session['infoMessage']
self.assertIn(('Successfully deleted product and associated inventory for product code %s with name "%s"<br/>' %
(createdProducts[0].meaningful_code(), createdProducts[0].name)),
resultInfo,
'IMS product_delete view didn''t generate the correct info when product deleted.\nactual message = %s' %
resultInfo)
self.assertRedirects(response, reverse('ims:products') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
self.assertEqual(ProductInformation.objects.all().count(),
0,
'Product still in database after deleting.')
def test_product_delete_post_without_delete_productinformation_perm(self):
print 'running ProductDeleteViewTests.test_product_delete_post_without_delete_productinformation_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
postData = {'Delete':'Delete',
'products':[product.code,]}
response = self.client.post(reverse('ims:product_delete'),
postData,
follow = False)
self.assertEqual(response.status_code,200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS product_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_product_delete_post_without_delete_inventoryitem_perm(self):
print 'running ProductDeleteViewTests.test_product_delete_post_without_delete_inventoryitem_perm... '
perms = ['delete_productinformation',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
postData = {'Delete':'Delete',
'products':[product.code,]}
response = self.client.post(reverse('ims:product_delete'),
postData,
follow = False)
self.assertEqual(response.status_code,200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS product_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_product_delete_post_cancel(self):
print 'running ProductDeleteViewTests.test_product_delete_post_cancel... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
postData = {'Cancel':'Cancel',
'products':[code,]}
response = self.client.post(reverse('ims:product_delete'),
postData,
follow = False)
self.assertRedirects(response, reverse('ims:products') + '?' +
urlencode({'page':1,
'pageSize':1,}),
status_code = 302,
target_status_code = 200)
class SiteDeleteViewTests(TestCase):
"""
ims_tests for site_delete view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_site_delete_get_with_no_get_parms(self):
print 'running SiteDeleteViewTests.test_site_delete_get_with_no_get_parms... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:site_delete'),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No sites requested for deletion',
resultWarning,
'IMS site_delete view didn''t generate the correct warning when no sites requested found.\nactual message = %s' %
resultWarning)
def test_site_delete_get(self):
print 'running SiteDeleteViewTests.test_site_delete_get... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
response=self.client.get(reverse('ims:site_delete') + '?' +
urlencode({'site':site.pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('Are you sure?',
resultWarning,
'IMS site_delete view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_delete_get_with_inventory(self):
print 'running SiteDeleteViewTests.test_site_delete_get_with_inventory... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
response=self.client.get(reverse('ims:site_delete') + '?' +
urlencode({'site':createdSites[0].pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('One or more sites contain inventory. Deleting the sites will delete all inventory as well. Delete anyway?',
resultWarning,
'IMS site_delete view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_delete_get_without_delete_site_perm(self):
print 'running SiteDeleteViewTests.test_site_delete_get_without_delete_site_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
response=self.client.get(reverse('ims:site_delete') + '?' +
urlencode({'site':site.pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete sites',
resultError,
'IMS site_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_delete_get_without_delete_inventoryitem_perm(self):
print 'running SiteDeleteViewTests.test_site_delete_get_without_delete_inventoryitem_perm... '
perms = ['delete_site',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
response=self.client.get(reverse('ims:site_delete') + '?' +
urlencode({'site':site.pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete sites',
resultError,
'IMS site_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_delete_post_with_no_post_parms(self):
print 'running SiteDeleteViewTests.test_site_delete_post_with_no_post_parms... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Delete':'Delete'}
request = self.factory.post(reverse('ims:site_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_delete(request)
response.client = self.client
resultError = request.session['errorMessage']
self.assertIn('No sites requested for deletion',
resultError,
'IMS site_delete view didn''t generate the correct warning when no sites requested found.\nactual message = %s' %
resultError)
self.assertRedirects(response, reverse('ims:sites') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
def test_site_delete_post(self):
print 'running SiteDeleteViewTests.test_site_delete_post... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'Delete':'Delete',
'sites':[site.number,]}
request = self.factory.post(reverse('ims:site_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_delete(request)
response.client = self.client
resultInfo = request.session['infoMessage']
self.assertIn('Successfully deleted site %s<br />' % site.name,
resultInfo,
'IMS site_delete view didn''t generate the correct info site deleted.\nactual message = %s' %
resultInfo)
self.assertRedirects(response, reverse('ims:sites') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
self.assertEqual(Site.objects.all().count(),
0,
'Site still in database after deleting.')
def test_site_delete_post_with_inventory(self):
print 'running SiteDeleteViewTests.test_site_delete_post_with_inventory... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
postData = {'Delete':'Delete',
'sites':[createdSites[0].number,]}
request = self.factory.post(reverse('ims:site_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_delete(request)
response.client = self.client
resultInfo = request.session['infoMessage']
self.assertIn('Successfully deleted site %s<br />' % createdSites[0].name,
resultInfo,
'IMS site_delete view didn''t generate the correct info site deleted.\nactual message = %s' %
resultInfo)
self.assertRedirects(response, reverse('ims:sites') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
self.assertEqual(Site.objects.all().count(),
0,
'Site still in database after deleting.')
self.assertEqual(InventoryItem.objects.all().count(),
0,
'Inventory still in database after deleting.')
def test_site_delete_post_without_delete_site_perm(self):
print 'running SiteDeleteViewTests.test_site_delete_post_without_delete_site_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'Delete':'Delete',
'sites':[site.number,]}
response = self.client.post(reverse('ims:site_delete'),
postData,
follow = False)
self.assertEqual(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete sites',
resultError,
'IMS site_delete view didn''t generate the correct error with incorrect user permissions.\nactual message = %s' %
resultError)
def test_site_delete_post_without_delete_inventoryitem_perm(self):
print 'running SiteDeleteViewTests.test_site_delete_post_without_delete_inventoryitem_perm... '
perms = ['delete_site',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'Delete':'Delete',
'sites':[site.number,]}
response = self.client.post(reverse('ims:site_delete'),
postData,
follow = False)
self.assertEqual(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete sites',
resultError,
'IMS site_delete view didn''t generate the correct error with incorrect user permissions.\nactual message = %s' %
resultError)
def test_site_delete_post_cancel(self):
print 'running SiteDeleteViewTests.test_site_delete_post_cancel... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'Cancel':'Cancel',
'sites':[site.number,]}
response = self.client.post(reverse('ims:site_delete'),
postData,
follow = False)
self.assertRedirects(response, reverse('ims:sites') + '?' +
urlencode({'page':1,
'pageSize':1,}),
status_code = 302,
target_status_code = 200)
class SiteAddViewTests(TestCase):
"""
ims_tests for site_add view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_site_add_get(self):
print 'running SiteAddViewTests.test_site_add_get... '
perms = ['add_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response = self.client.get(reverse('ims:site_add'))
self.assertEquals(response.status_code, 200)
def test_site_add_get_without_add_site_perm(self):
print 'running SiteAddViewTests.test_site_add_get_without_add_site_perm... '
self.client.login(username='testUser', password='12345678')
request = self.factory.get(reverse('ims:site_add'),
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_add(request)
response.client = self.client
resultError = request.session['errorMessage']
self.assertIn('You don''t have permission to add sites',
resultError,
'IMS site_add view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
self.assertRedirects(response, reverse('ims:sites',) +
'?' + urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
def test_site_add_post(self):
print 'running SiteAddViewTests.test_site_add_post... '
perms = ['add_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'name': 'test site',
'county': '',
'address1': '11 main st.',
'contactName' : 'John Smith',
'contactPhone' : '555-1212',
'modifier' : self.user.username,
'Save': 'Save', }
request = self.factory.post(reverse('ims:site_add'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_add(request)
self.assertEqual(Site.objects.count(), 1)
site = Site.objects.all()[0]
resultInfo = request.session['infoMessage']
self.assertIn('Successfully added site', resultInfo,
'IMS site_add view didn''t generate the correct info when saving.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs={'siteId':site.pk,},),
status_code = 302,
target_status_code = 200)
def test_site_add_post_no_change(self):
print 'running SiteAddViewTests.test_site_add_post_no_change... '
perms = ['add_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Save':'Save'}
response = self.client.post(reverse('ims:site_add'),
postData,
follow = False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('More information required before site can be added',
resultWarning,
'IMS site_add view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_add_post_without_add_site_perm(self):
print 'running SiteAddViewTests.test_site_add_post_without_add_site_perm... '
self.client.login(username='testUser', password='12345678')
postData = {'name': 'test site',
'county': '',
'address1': '11 main st.',
'contactName' : 'John Smith',
'contactPhone' : '555-1212',
'modifier' : self.user.username,
'Save': 'Save', }
request = self.factory.post(reverse('ims:site_add'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_add(request)
resultInfo = request.session['errorMessage']
self.assertIn('You don''t have permission to add sites', resultInfo,
'IMS site_add view didn''t generate the correct error when saving.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:sites',) + '?' +
urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
class SiteDetailViewTests(TestCase):
"""
ims_tests for site_detail view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_site_detail_get(self):
print 'running SiteDetailViewTests.test_site_detail_get... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
response=self.client.get(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
follow=False)
self.assertEqual(response.status_code, 200)
def test_site_detail_get_with_invalid_site(self):
print 'running SiteDetailViewTests.test_site_detail_get_with_invalid_site... '
self.client.login(username='testUser', password='12345678')
siteId = 1
request=self.factory.get(reverse('ims:site_detail',
kwargs =
{'siteId':siteId,}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = siteId)
resultError = request.session['errorMessage']
self.assertIn('Site %d does not exist' % siteId,
resultError,
'IMS site detail view didn''t generate the correct error when an invalid site was requested.\nactual message = %s' %
resultError)
response.client = self.client
self.assertRedirects(response,
reverse('ims:sites',) + '?' +
urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
def test_site_detail_get_with_filter(self):
print 'running SiteDetailViewTests.test_site_detail_get_with_filter... '
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=2,
numItems=1)
site = createdSites[0]
response=self.client.get(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}) +
'?searchField=information__name&searchValue=test product 1',
follow=False)
self.assertEqual(response.status_code, 200)
def test_site_detail_get_with_bad_inventory_filter(self):
print 'running SiteDetailViewTests.test_site_detail_get_with_bad_inventory_filter... '
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=2,
numItems=1)
site = createdSites[0]
request=self.factory.get(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}) +
'?searchField=information__name&searchValue=blah',
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = site.pk)
resultWarning = request.session['warningMessage']
self.assertIn('No inventory found using filter criteria.<br/>Showing all inventory.',
resultWarning,
'IMS site detail view didn''t generate the correct error with a bad inventory filter.\nactual message = %s' %
resultWarning)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}) + '?' +
urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
def test_site_detail_post_save_site(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_site... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
perms = ['change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'name': 'test site',
'county': '',
'address1': '11 main st.',
'contactName' : 'John Smith',
'contactPhone' : '555-1212',
'modifier' : self.user.username,
'Save Site': 'Save Site', }
request=self.factory.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = site.pk)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully changed site information',
resultInfo,
'IMS site detail view didn''t generate the correct info.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs={'siteId':site.pk,},) +
'?' + urlencode({'page':1,
'pageSize':PAGE_SIZE,
'adjust':'False'}),
302,
200)
def test_site_detail_post_save_site_invalid_fields(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_site_invalid_fields... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
perms = ['change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Save Site': 'Save Site', }
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('More information required before the site can be saved',
resultWarning,
'IMS site detail view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_detail_post_save_site_no_change(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_site_no_change... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',
county = '',
address1 = '11 main st.',
contactName = 'John Smith',
contactPhone = '555-1212',
modifier = self.user.username,)
site.save()
perms = ['change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'name': 'test site',
'county': '',
'address1': '11 main st.',
'contactName' : 'John Smith',
'contactPhone' : '555-1212',
'modifier' : self.user.username,
'Save Site': 'Save Site', }
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No changes made to the site information',
resultWarning,
'IMS site detail view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_detail_post_save_site_without_change_site_perm(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_site_without_change_site_perm... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
self.client.login(username='testUser', password='12345678')
postData = {'name': 'test site',
'county': '',
'address1': '11 main st.',
'contactName' : 'John Smith',
'contactPhone' : '555-1212',
'modifier' : self.user.username,
'Save Site': 'Save Site', }
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
self.assertEqual(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to change site information',
resultError,
'IMS site detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_detail_post_save_adjust_changes_quantity(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_adjust_changes_quantity... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
product = createdProducts[0]
inventory = createdInventory[0]
perms = ['change_inventoryitem', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
newQuantity = 5
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-id':[inventory.pk],
'form-0-site':[site.pk],
'form-0-information':[product.pk],
'form-0-quantity':[newQuantity],
'Save Adjust Changes':'Save Adjust Changes',}
request=self.factory.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = site.pk)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully changed site inventory',
resultInfo,
'IMS site detail view didn''t generate the correct info.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs={'siteId':site.pk,},) +
'?' + urlencode({'page':1,
'pageSize':PAGE_SIZE,
'adjust':'True'}),
302,
200)
newInventory = site.latest_inventory_for_product(code = product.pk)
self.assertEqual(newInventory.quantity,
5,
'site_detail view didn''t show the correct inventory quantity after changing to %d\n Quantity = %d' % (newQuantity, newInventory.quantity))
def test_site_detail_post_save_adjust_changes_delete(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_adjust_changes... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=2,
numItems=1)
site = createdSites[0]
numInventory = site.latest_inventory().count()
self.assertEqual(numInventory,
2,
'site_detail view didn''t show the correct inventory after adding 2. Quantity = %d' % numInventory)
perms = ['change_inventoryitem', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': [len(createdProducts)],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'Save Adjust Changes':'Save Adjust Changes',}
addItemDict = {}
deleteIndex = 1
for index in range(len(createdInventory)):
addItemDict['form-%d-id' % index] = createdInventory[index].pk
addItemDict['form-%d-site' % index] = createdInventory[index].site.pk
addItemDict['form-%d-quantity' % index] = createdInventory[index].quantity
addItemDict['form-%d-information' % index] = createdInventory[index].information.pk
if index == deleteIndex:
addItemDict['form-%d-deleteItem' % index] = 'on'
postData.update(addItemDict)
request=self.factory.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = site.pk)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully changed site inventory',
resultInfo,
'IMS site detail view didn''t generate the correct info.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs={'siteId':site.pk,},) +
'?' + urlencode({'page':1,
'pageSize':PAGE_SIZE,
'adjust':'True'}),
302,
200)
numInventory = site.latest_inventory().count()
self.assertEqual(numInventory,
1,
'site_detail view didn''t show the correct inventory after deleting 1. Quantity = %d' % numInventory)
def test_site_detail_post_save_adjust_changes_without_change_inventoryitem_perm(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_adjust_changes_without_change_inventoryitem_perm... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
product = createdProducts[0]
inventory = createdInventory[0]
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
newQuantity = 5
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-id':[inventory.pk],
'form-0-site':[site.pk],
'form-0-information':[product.pk],
'form-0-quantity':[newQuantity],
'Save Adjust Changes':'Save Adjust Changes',}
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to change or delete inventory',
resultError,
'IMS site detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_detail_post_save_adjust_changes_without_delete_inventoryitem_perm(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_adjust_changes_without_delete_inventoryitem_perm... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
product = createdProducts[0]
inventory = createdInventory[0]
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
newQuantity = 5
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-id':[inventory.pk],
'form-0-site':[site.pk],
'form-0-information':[product.pk],
'form-0-quantity':[newQuantity],
'Save Adjust Changes':'Save Adjust Changes',}
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to change or delete inventory',
resultError,
'IMS site detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_detail_post_save_add_subtract_changes_quantity(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_add_subtract_changes_quantity... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
product = createdProducts[0]
inventory = createdInventory[0]
perms = ['change_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
quantityAdd = 5
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-id':[inventory.pk],
'form-0-site':[site.pk],
'form-0-information':[product.pk],
'form-0-addSubtract':[quantityAdd],
'Save Add Subtract Changes':'Save Add Subtract Changes',}
request=self.factory.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = site.pk)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully changed site inventory',
resultInfo,
'IMS site detail view didn''t generate the correct info.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs={'siteId':site.pk,},) +
'?' + urlencode({'page':1,
'pageSize':PAGE_SIZE,
'adjust':'False'}),
302,
200)
newInventory = site.latest_inventory_for_product(code = product.pk)
self.assertEqual(newInventory.quantity,
1 + quantityAdd,
'site_detail view didn''t show the correct inventory quantity after changing to %d\n Quantity = %d' % (1 + quantityAdd, newInventory.quantity))
def test_site_detail_post_save_add_subtract_changes_without_change_inventoryitem_perm(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_add_subtract_changes_without_change_inventoryitem_perm... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
product = createdProducts[0]
inventory = createdInventory[0]
self.client.login(username='testUser', password='12345678')
quantityAdd = 5
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-id':[inventory.pk],
'form-0-site':[site.pk],
'form-0-information':[product.pk],
'form-0-addSubtract':[quantityAdd],
'Save Add Subtract Changes':'Save Add Subtract Changes',}
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to change inventory',
resultError,
'IMS site detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_detail_post_add_new_inventory(self):
print 'running SiteDetailViewTests.test_site_detail_post_add_new_inventory... '
site = Site(name = 'test site')
site.save()
product = ProductInformation(name = 'test product',
code= 'D11',)
product.save()
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Add New Inventory':'Add New Inventory',}
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
self.assertRedirects(response,
reverse('ims:site_add_inventory',kwargs={'siteId':site.pk}),
302,
200)
def test_site_detail_post_add_new_inventory_without_add_inventory_perm(self):
print 'running SiteDetailViewTests.test_site_detail_post_add_new_inventory_without_change_inventory_perm... '
site = Site(name = 'test site')
site.save()
self.client.login(username='testUser', password='12345678')
postData = {'Add New Inventory':'Add New Inventory',}
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to add inventory',
resultError,
'IMS site detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
class SiteAddInventoryViewTests(TestCase):
"""
ims_tests for site_add_inventory view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_site_add_inventory_get(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_get... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
product = ProductInformation(name = 'test product',
code = 'D11')
product.save()
response=self.client.get(reverse('ims:site_add_inventory',
kwargs = {'siteId':site.pk,}),
follow=False)
self.assertEqual(response.status_code, 200)
def test_site_add_inventory_get_without_add_inventoryitem_perm(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_get_without_add_inventoryitem_perm... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
product = ProductInformation(name = 'test product',
code = 'D11')
product.save()
response=self.client.get(reverse('ims:site_add_inventory',
kwargs = {'siteId':site.pk,}),
follow=False)
self.assertEqual(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to add site inventory',
resultError,
'IMS site_add_inventory view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_add_inventory_with_invalid_site(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_with_invalid_site... '
self.client.login(username='testUser', password='12345678')
siteId = 1
request = self.factory.get(reverse('ims:site_add_inventory',
kwargs = {'siteId':siteId,}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_add_inventory(request, siteId = siteId)
resultError = request.session['errorMessage']
self.assertIn('Site %d does not exist' %
siteId, resultError,
'IMS site_add_inventory view didn''t generate the correct error when an invalid site was requested.\nactual message = %s' %
resultError)
response.client = self.client
self.assertRedirects(response,
reverse('ims:sites'),
302,
200)
def test_site_add_inventory_with_no_products(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_with_no_products... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
request = self.factory.get(reverse('ims:site_add_inventory',
kwargs = {'siteId':site.pk,}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_add_inventory(request, siteId = site.pk)
resultWarning = request.session['warningMessage']
self.assertIn('No products found to add',
resultWarning,
'IMS site_add_inventory view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs = {'siteId':site.pk}),
302,
200)
def test_site_add_inventory_post(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_post... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
product = ProductInformation(name = 'test product',
code = 'D11')
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[product.pk],
'form-0-Add':['on'],
'Add Products':'Add Products',}
response=self.client.post(reverse('ims:site_add_inventory',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
productsToAdd = '?code=D11&'
self.assertRedirects(response,
reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId': site.pk}) +
productsToAdd,
302,
200)
def test_site_add_inventory_post_no_products(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_post_no_products... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
product = ProductInformation(name = 'test product',
code = 'D11')
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[product.pk],
'Add Products':'Add Products',}
response=self.client.post(reverse('ims:site_add_inventory',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No products selected to add',
resultWarning,
'IMS site_add_inventory didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_add_inventory_post_without_add_inventoryitem_perm(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_post_without_add_inventoryitem_perm... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
product = ProductInformation(name = 'test product',
code = 'D11')
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[product.pk],
'form-0-Add':['on'],
'Add Products':'Add Products',}
response=self.client.post(reverse('ims:site_add_inventory',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to add site inventory',
resultError,
'IMS site_add_inventory didn''t generate the correct error.\nactual message = %s' %
resultError)
class ProductsViewTests(TestCase):
"""
ims_tests for products view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_products_get_with_no_products(self):
print 'running ProductsViewTests.test_products_get_with_no_products... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:products'),
follow=True)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No products found',
'IMS products view didn''t generate the correct warning when no products were found.\nactual message = %s' %
resultWarning)
def test_products_get_with_filter_and_no_products(self):
print 'running ProductsViewTests.test_products_get_with_filter_and_no_products... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:products',) +
'?searchField=name&searchValue=blah',
follow = False,)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No products found',
'IMS products view didn''t generate the correct warning when no products were found.\nactual message = %s' %
resultWarning)
def test_products_get_with_products(self):
print 'running ProductsViewTests.test_products_get_with_products... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name='test product',
code = code)
product.save()
response=self.client.get(reverse('ims:products',),
follow = False,)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual('', resultWarning)
def test_products_get_with_filter(self):
print 'running ProductsViewTests.test_products_get_with_filter... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name='test product',
code = code)
product.save()
response=self.client.get(reverse('ims:products',) +
'?searchField=name&searchValue=test',
follow = False,)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual('', resultWarning)
def test_products_get_with_bad_filter(self):
print 'running ProductsViewTests.test_products_get_with_bad_filter... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name='test product',
code = code)
product.save()
response=self.client.get(reverse('ims:products',) +
'?searchField=name&searchValue=blah',
follow = False,)
self.assertRedirects(response, reverse('ims:products',) +
'?page=1&pageSize=%d' % PAGE_SIZE,
status_code = 302,
target_status_code = 200)
def test_products_post_add(self):
print 'running ProductsViewTests.test_products_post_add... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['0'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['0'],
'Add':'Add',}
response=self.client.post(reverse('ims:products',),
postData,
follow = False,)
self.assertRedirects(response, reverse('ims:product_add',),
status_code = 302,
target_status_code = 200)
def test_products_post_add_without_add_productinformation_perm(self):
print 'running ProductsViewTests.test_products_post_add_without_add_productinformation_perm... '
self.client.login(username='testUser', password='12345678')
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['0'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['0'],
'Add':'Add',}
response=self.client.post(reverse('ims:products',),
postData,
follow = False,)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to add new products',
resultError,
'IMS products view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
def test_products_post_delete(self):
print 'running ProductsViewTests.test_products_post_delete... '
perms = ['delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
productName = 'test product'
code = 'D11'
product = ProductInformation(name = productName,
code = code,)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[code],
'form-0-Delete':['on'],
'Delete':'Delete',}
response=self.client.post(reverse('ims:products',),
postData,
follow = False,)
self.assertRedirects(response, reverse('ims:product_delete',) +
'?code=D11&',
status_code = 302,
target_status_code = 200)
def test_products_post_delete_without_delete_inventoryitem_perms(self):
print 'running ProductsViewTests.test_products_post_delete_without_delete_inventoryitem_perms... '
perms = ['delete_productinformation',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
productName = 'test product'
code = 'D11'
product = ProductInformation(name = productName,
code = code,)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[code],
'form-0-Delete':['on'],
'Delete':'Delete',}
response=self.client.post(reverse('ims:products',),
postData,
follow = False,)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS products view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
def test_products_post_delete_without_delete_productinformation_perms(self):
print 'running ProductsViewTests.test_products_post_delete_without_delete_productinformation_perms... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
productName = 'test product'
code = 'D11'
product = ProductInformation(name = productName,
code = code,)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[code],
'form-0-Delete':['on'],
'Delete':'Delete',}
response=self.client.post(reverse('ims:products',),
postData,
follow = False,)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS products view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
class ProductAddViewTests(TestCase):
"""
ims_tests for product_add view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_product_add_get(self):
print 'running ProductAddViewTests.test_product_add_get... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response = self.client.get(reverse('ims:product_add'))
self.assertEquals(response.status_code, 200)
def test_product_add_get_without_add_productinformation_perm(self):
print 'running ProductAddViewTests.test_product_add_get_without_add_productinformation_perm... '
self.client.login(username='testUser', password='12345678')
request = self.factory.get(reverse('ims:product_add'),
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_add(request)
response.client = self.client
resultError = request.session['errorMessage']
self.assertIn('You don''t have permission to add new products',
resultError,
'IMS product_add view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
self.assertRedirects(response, reverse('ims:products',) +
'?' + urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
def test_product_add_post(self):
print 'running ProductAddViewTests.test_product_add_post... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': 'D11',
'Save': 'Save',
'name': 'test product'}
request = self.factory.post(reverse('ims:product_add'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_add(request)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully saved product.', resultInfo,
'IMS product_add view didn''t generate the correct info when saving.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:product_detail',
kwargs={'code':'D11'}) + '?' +
urlencode({'page':1,
'picture':'False'}),
status_code = 302,
target_status_code = 200)
def test_product_add_post_no_change(self):
print 'running ProductAddViewTests.test_product_add_post_no_change... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Save':'Save'}
response = self.client.post(reverse('ims:product_add'),
postData,
follow = False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('More information required before product can be added',
resultWarning,
'IMS product_add view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_product_add_post_with_error_message(self):
print 'running ProductAddViewTests.test_product_add_post_with_error_message... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': 'D11',
'Save': 'Save',
'name': 'test product'}
request = self.factory.post(reverse('ims:product_add'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
request.session['errorMessage'] = 'Error'
response = product_add(request)
response.client = self.client
self.assertRedirects(response, reverse('ims:products',) +
'?' + urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
def test_product_add_post_without_add_productinformation_perm(self):
print 'running ProductAddViewTests.test_product_add_post_without_add_productinformation_perm... '
self.client.login(username='testUser', password='12345678')
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': 'D11',
'Save': 'Save',
'name': 'test product'}
request = self.factory.post(reverse('ims:product_add'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_add(request)
resultInfo = request.session['errorMessage']
self.assertIn('You don''t have permission to add new products', resultInfo,
'IMS product_add view didn''t generate the correct error when saving.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:products',) + '?' +
urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
class ProductDetailViewTests(TestCase):
"""
ims_tests for product_detail view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_product_detail_get(self):
print 'running ProductDetailViewTests.test_product_detail_get... '
self.client.login(username='testUser', password='12345678')
product = ProductInformation(code='D11')
product.save()
code="D11"
response=self.client.get(reverse('ims:product_detail',
kwargs =
{'code':code,}),
follow=True)
self.assertEqual(response.status_code, 200,
"Product Detail View didn't return status code 200 with a valid product code.")
def test_product_detail_get_with_filter_and_no_sites(self):
print 'running ProductDetailViewTests.test_product_detail_get_with_filter_and_no_sites... '
self.client.login(username='testUser', password='12345678')
product = ProductInformation(code='D11')
product.save()
code="D11"
response=self.client.get(reverse('ims:product_detail',
kwargs =
{'code':code,}) +
'?searchField=site__name&searchValue=blah',
follow = False,)
self.assertEqual(response.status_code, 200,)
def test_product_detail_get_with_bad_filter(self):
print 'running ProductDetailViewTests.test_product_detail_get_with_bad_filter... '
self.client.login(username='testUser', password='12345678')
code="D11"
product = ProductInformation(code=code)
product.save()
site = Site(name='test site')
site.save()
site.add_inventory(product = product,
quantity = 1,
modifier = self.user.username)
request=self.factory.get(reverse('ims:product_detail',
kwargs =
{'code':code,}) +
'?searchField=site__name&searchValue=blah',
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_detail(request, code = code)
resultWarning = request.session['warningMessage']
self.assertIn('No sites found using filter criteria.<br/>Showing all sites.',
resultWarning,
'IMS product detail view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
response.client = self.client
self.assertRedirects(response, reverse('ims:product_detail',
kwargs =
{'code':code,}) +
'?page=1&picture=False',
status_code = 302,
target_status_code = 200)
def test_product_detail_get_with_filter(self):
print 'running ProductDetailViewTests.test_product_detail_get_with_filter... '
self.client.login(username='testUser', password='12345678')
code="D11"
product = ProductInformation(code=code)
product.save()
site = Site(name='test site')
site.save()
site.add_inventory(product = product,
quantity = 1,
modifier = self.user.username)
response=self.client.get(reverse('ims:product_detail',
kwargs =
{'code':code,}) +
'?searchField=site__name&searchValue=test',
follow = False)
self.assertEqual(response.status_code, 200,)
def test_product_detail_get_with_invalid_product(self):
print 'running ProductDetailViewTests.test_product_detail_get_with_invalid_product... '
self.client.login(username='testUser', password='12345678')
code="D11"
request=self.factory.get(reverse('ims:product_detail',
kwargs =
{'code':code,}),
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_detail(request, code = code)
resultError = request.session['errorMessage']
self.assertIn('Product %s does not exist.' % code,
resultError,
'IMS product detail view didn''t generate the correct warning.\nactual message = %s' %
resultError)
response.client = self.client
self.assertRedirects(response, reverse('ims:products',),
status_code = 302,
target_status_code = 200)
def test_product_detail_get_when_sites_have_inventory(self):
print 'running ProductDetailViewTests.test_product_detail_get_when_sites_have_inventory... '
self.client.login(username='testUser', password='12345678')
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=1,
numItems=1)
product = createdProducts[0]
response=self.client.get(reverse('ims:product_detail',
kwargs =
{'code':product.code,}) +
'?searchField=site__name&searchValue=test',
follow = False)
self.assertEqual(response.status_code, 200,)
self.assertEqual(len(response.context['paginatedItems']),
len(createdSites))
def test_product_detail_get_after_deleting_inventory_from_site(self):
print 'running ProductDetailViewTests.test_product_detail_get_after_deleting_inventory_from_site... '
self.client.login(username='testUser', password='12345678')
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=1,
numItems=1)
product = createdProducts[0]
createdInventory[0].deleted = True
createdInventory[0].save()
response=self.client.get(reverse('ims:product_detail',
kwargs =
{'code':product.code,}) +
'?searchField=site__name&searchValue=test',
follow = False)
self.assertEqual(response.status_code, 200,)
self.assertEqual(len(response.context['paginatedItems']),
len(createdSites) - 1)
def test_product_detail_post_save(self):
print 'running ProductDetailViewTests.test_product_detail_post_save... '
perms = ['change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': code,
'Save': 'Save',
'name': productName}
request=self.factory.post(reverse('ims:product_detail',
kwargs =
{'code':code,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = product_detail(request, code = code)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully saved product information changes.',
resultInfo,
'IMS product detail view didn''t generate the correct info.\nactual message = %s' %
resultInfo)
response.client = self.client
picture = 'picture=False'
filterQuery = ''
self.assertRedirects(response,
reverse('ims:product_detail',
kwargs={'code':code,})
+ '?' + picture + '&' + filterQuery,
302,
200)
def test_product_detail_post_save_invalid_fields(self):
print 'running ProductDetailViewTests.test_product_detail_post_save_invalid_fields... '
perms = ['change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': '',
'Save': 'Save',
'name': productName}
response=self.client.post(reverse('ims:product_detail',
kwargs =
{'code':code,}),
postData,
follow=False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('More information required before the product can be saved',
resultWarning,
'IMS product detail view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
#TODO: figure out why this sets productForm.has_changed() = True
# def test_product_detail_post_no_change(self):
# print 'running ProductDetailViewTests.test_product_detail_post_no_change... '
# perms = ['change_productinformation']
# permissions = Permission.objects.filter(codename__in = perms)
# self.user.user_permissions=permissions
# self.client.login(username='testUser', password='12345678')
# code = 'D11'
# productName = 'test product'
# product = ProductInformation(name = productName,
# code = code,)
# product.save()
# postData = {'Save':'Save',}
# response=self.client.post(reverse('ims:product_detail',
# kwargs =
# {'code':code,}),
# postData,
# follow=False)
# self.assertEqual(response.status_code, 200)
# resultWarning = get_announcement_from_response(response=response,
# cls="warningnote")
# self.assertIn('No changes made to the product information.',
# resultWarning,
# 'IMS product detail view didn''t generate the correct warning.\nactual message = %s' %
# resultWarning)
def test_product_detail_post_without_change_productinformation_perm(self):
print 'running ProductDetailViewTests.test_product_detail_post_without_change_productinformation_perm... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code,)
product.save()
postData = {'Save':'Save',}
response=self.client.post(reverse('ims:product_detail',
kwargs =
{'code':code,}),
postData,
follow=False)
self.assertEqual(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to change product information.',
resultError,
'IMS product detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_product_detail_post_save_check_modification_date(self):
print 'running ProductDetailViewTests.test_product_detail_post_save_check_modification_date... '
perms = ['change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code)
# back date the modified field
product.modified = timezone.now() - timedelta(days = 1)
creationDate = product.modified.date()
product.save()
# now we change th eproduct and see if the modified date changes
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': code,
'Save': 'Save',
'name': productName}
request=self.factory.post(reverse('ims:product_detail',
kwargs =
{'code':code,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
product_detail(request, code = code)
product = ProductInformation.objects.get(pk = code)
changeDate = product.modified.date()
deltaDays = (changeDate - creationDate).days
self.assertEqual(deltaDays,
1,
'IMS product detail view didn''t change the modification date after change')
class ProductSelectAddSiteViewTests(TestCase):
"""
ims_tests for product_select_add_site view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_product_select_add_site_get(self):
print 'running ProductSelectAddSiteViewTests.test_product_select_add_site_get... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code,)
product.save()
site1 = Site(name = 'test site 1')
site1.save()
site2 = Site(name = 'test site 2')
site2.save()
response=self.client.get(reverse('ims:product_select_add_site',
kwargs={'code':code}),
follow=False)
self.assertEquals(response.status_code, 200)
def test_product_select_add_site_get_bad_product(self):
print 'running ProductSelectAddSiteViewTests.test_product_select_add_site_get_bad_product... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
request=self.factory.get(reverse('ims:product_select_add_site',
kwargs={'code':code}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = product_select_add_site(request, code = code)
resultError = request.session['errorMessage']
self.assertIn('Product %s does not exist.' % code,
resultError,
'IMS product_select_add_site view didn''t generate the correct error.\nactual message = %s' %
resultError)
response.client = self.client
self.assertRedirects(response,reverse('ims:products'),
status_code = 302,
target_status_code = 200)
def test_product_select_add_site_single_site(self):
print 'running ProductSelectAddSiteViewTests.test_product_select_add_site_single_site... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code,)
product.save()
site = Site(name = 'test site 1')
site.save()
response=self.client.get(reverse('ims:product_select_add_site',
kwargs={'code':code}),
follow=False)
self.assertRedirects(response,reverse('ims:products_add_to_site_inventory',
kwargs={'siteId':site.pk}) + '?' +
urlencode({'code':product.pk}),
status_code = 302,
target_status_code = 200)
def test_product_select_add_site_no_sites(self):
print 'running ProductSelectAddSiteViewTests.test_product_select_add_site_no_sites... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code,)
product.save()
request=self.factory.get(reverse('ims:product_select_add_site',
kwargs={'code':code}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = product_select_add_site(request, code = code)
resultWarning = request.session['warningMessage']
self.assertIn('No sites found.',
resultWarning,
'IMS product_select_add_site view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
response.client = self.client
self.assertRedirects(response,reverse('ims:product_detail',
kwargs={'code':product.code,}),
status_code = 302,
target_status_code = 200)
class ProductsAddToSiteInventoryViewTests(TestCase):
"""
ims_tests for products_add_to_site_inventory view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_products_add_to_site_inventory_get(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_get... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
productName = 'test product'
code = 'D11'
product = ProductInformation(name = productName, code = code)
product.save()
response=self.client.get(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}) +
'?' + urlencode({'code':code}),
follow=True)
self.assertEqual(response.status_code, 200)
def test_products_add_to_site_inventory_get_without_add_inventoryitem_perm(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_get_without_add_inventoryitem_perm... '
self.client.login(username='testUser', password='12345678')
productName = 'test product'
code = 'D11'
site = Site(name = 'test site')
site.save()
product = ProductInformation(name = productName, code = code)
product.save()
request=self.factory.get(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}) +
'?' + urlencode({'code':code}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = products_add_to_site_inventory(request, siteId = site.pk)
resultError = request.session['errorMessage']
self.assertIn('You don''t have permission to add to site inventory',
resultError,
'IMS products_add_to_site_inventory view didn''t generate the correct error.\nactual message = %s' %
resultError)
response.client = self.client
self.assertRedirects(response, reverse('ims:site_detail',
kwargs={'siteId':site.pk,}),
status_code = 302,
target_status_code = 200)
def test_products_add_to_site_inventory_get_with_invalid_site(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_get_with_invalid_site... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
productName = 'test product'
code = 'D11'
siteNumber = 1
product = ProductInformation(name = productName, code = code)
product.save()
response=self.client.get(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':siteNumber,}) +
'?' + urlencode({'code':code}),
follow=True)
self.assertRedirects(response, reverse('ims:sites'),
status_code = 302,
target_status_code = 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('Site %d does not exist' %
siteNumber, resultError,
'IMS products_add_to_site_inventory view didn''t generate the correct error when an invalid site was requested.\nactual message = %s' %
resultError)
def test_products_add_to_site_inventory_get_with_invalid_product(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_get_with_invalid_product... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
siteName = 'test site'
code = 'D11'
site = Site(name = siteName)
site.save()
response=self.client.get(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.number,}) +
'?' + urlencode({'code':code}),
follow=True)
self.assertRedirects(response, reverse('ims:products'),
status_code = 302,
target_status_code = 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('No valid products selected',
resultError,
'IMS products_add_to_site_inventory view didn''t generate the correct error when an invalid product was requested.\nactual message = %s' %
resultError)
def test_products_add_to_site_inventory_post(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_post... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
# create another product that has not been added to a site yet
productName = 'another product'
code = 'D11'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': [len(createdProducts)],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'Save Inventory':'Save Inventory',}
addItemDict = {}
addItemDict['codes'] = []
siteInventory = site.latest_inventory()
for index in range(len(siteInventory)):
addItemDict['codes'].append(siteInventory[index].information.pk)
addItemDict['form-%d-code' % index] = [siteInventory[index].information.pk]
addItemDict['form-%d-Quantity' % index] = [siteInventory[index].quantity]
postData.update(addItemDict)
request=self.factory.post(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = products_add_to_site_inventory(request, siteId = site.pk)
resultInfo = request.session['infoMessage']
successfullAdditions = re.findall('Successfully added product',
resultInfo,
re.M | re.DOTALL)
self.assertEqual(len(successfullAdditions), len(createdProducts))
response.client = self.client
self.assertRedirects(response, reverse('ims:site_detail',
kwargs={'siteId':site.pk,}),
status_code = 302,
target_status_code = 200)
def test_products_add_to_site_inventory_post_invalid_data(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_post_invalid_data... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
# create another product that has not been added to a site yet
productName = 'another product'
code = 'D11'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': [len(createdProducts)],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'Save Inventory':'Save Inventory',}
addItemDict = {}
addItemDict['codes'] = []
siteInventory = site.latest_inventory()
for index in range(len(siteInventory)):
addItemDict['codes'].append(siteInventory[index].information.pk)
addItemDict['form-%d-code' % index] = [siteInventory[index].information.pk]
addItemDict['form-%d-Quantity' % index] = ''
postData.update(addItemDict)
response=self.client.post(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}),
postData,
follow=False)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('More information required before the inventory can be saved',
resultWarning,
'IMS products_add_to_site_inventory view didn''t generate the correct warning.\nactual message = %s'
% resultWarning)
def test_products_add_to_site_inventory_post_without_add_inventoryitem_perm(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_post_without_add_inventoryitem_perm... '
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
# create another product that has not been added to a site yet
productName = 'another product'
code = 'D11'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': [len(createdProducts)],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'Save Inventory':'Save Inventory',}
addItemDict = {}
addItemDict['codes'] = []
siteInventory = site.latest_inventory()
for index in range(len(siteInventory)):
addItemDict['codes'].append(siteInventory[index].information.pk)
addItemDict['form-%d-code' % index] = [siteInventory[index].information.pk]
addItemDict['form-%d-Quantity' % index] = [siteInventory[index].quantity]
postData.update(addItemDict)
request=self.factory.post(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = products_add_to_site_inventory(request, siteId = site.pk)
resultError = request.session['errorMessage']
self.assertIn('You don''t have permission to add to site inventory',
resultError,
'IMS products_add_to_site_inventory view didn''t generate the correct error.\nactual message = %s'
% resultError)
response.client = self.client
self.assertRedirects(response, reverse('ims:site_detail',
kwargs={'siteId':site.pk,}),
status_code = 302,
target_status_code = 200)
def test_products_add_to_site_inventory_post_cancel(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_post_cancel... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
# create another product that has not been added to a site yet
productName = 'another product'
code = 'D11'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': [len(createdProducts)],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'Cancel':'Cancel',}
addItemDict = {}
addItemDict['codes'] = []
siteInventory = site.latest_inventory()
for index in range(len(siteInventory)):
addItemDict['codes'].append(siteInventory[index].information.pk)
addItemDict['form-%d-code' % index] = [siteInventory[index].information.pk]
addItemDict['form-%d-Quantity' % index] = [siteInventory[index].quantity]
postData.update(addItemDict)
response=self.client.post(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}),
postData,
follow=False)
self.assertRedirects(response, reverse('ims:site_detail',
kwargs={'siteId':site.pk,}),
status_code = 302,
target_status_code = 200)
class ImportSitesViewTests(TestCase):
"""
ims_tests for import_sites view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_import_sites_warning_with_file_and_perms(self):
print 'running ImportSitesViewTests.test_import_sites_warning_with_file_and_perms... '
perms = ['add_site', 'change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/sites_add_site1_site2_site3.xls'))as fp:
response=self.client.post(reverse('ims:import_sites'),
{'Import':'Import','file':fp},
follow=True)
queriedSites=Site.objects.all()
# check that we saved 3 sites
self.assertEqual(
queriedSites.count(),
3,
'Number of imported sites mismatch. Some sites didn''t get stored.')
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning, '',
'import_sites view generated a warning with a valid file and user.\nactual warning message = %s'
% resultWarning)
def test_import_sites_warning_file_with_dups(self):
print 'running ImportSitesViewTests.test_import_sites_warning_file_with_dups... '
perms = ['add_site', 'change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(
os.path.join(
APP_DIR,
'testData/sites_add_site1_site2_site3_site3.xls')) as fp:
response=self.client.post(reverse('ims:import_sites'),
{'Import':'Import','file':fp},
follow=True)
warningRe = '^.*Found duplicate site numbers.*$'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(re.match(warningRe,resultWarning),
'import_sites view generated incorrect warning when import contained duplicates.\nRE for part of desired Warning Message = %s\n\nactual warning message = %s'
% (warningRe, resultWarning))
def test_import_sites_warning_with_no_file_and_perms(self):
print 'running ImportSitesViewTests.test_import_sites_warning_with_no_file_and_perms... '
perms = ['add_site', 'change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:import_sites'),
{'Import':'Import'},
follow=True)
warning='No file selected'
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual(resultWarning, warning,
'import_sites view generated incorrect warning when no file was selected.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_import_sites_error_with_file_and_without_add_site_perm(self):
print 'running ImportSitesViewTests.test_import_sites_error_with_file_and_without_add_site_perm... '
perms = ['change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(
os.path.join(
APP_DIR,
'testData/sites_add_site1_site2_site3.xls')) as fp:
response=self.client.post(reverse('ims:import_sites'),
{'Import Sites':'Import','file':fp},
follow=True)
warning='You don''t have permission to import sites'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning, warning,
'import_sites view generated incorrect warning when user didn''t have add_site perms.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_import_sites_error_with_file_and_without_change_site_perm(self):
print 'running ImportSitesViewTests.test_import_sites_error_with_file_and_without_change_site_perm... '
perms = ['add_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/sites_add_site1_site2_site3.xls')) as fp:
response=self.client.post(reverse('ims:import_sites'),
{'Import Sites':'Import','file':fp},
follow=True)
warning='You don''t have permission to import sites'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning, warning,
'import_sites view generated incorrect warning when user didn''t have change_site perms.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
class ImportProductsViewTests(TestCase):
"""
ims_tests for import_products view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_import_products_error_with_file_and_perms(self):
print 'running ImportProductsViewTests.test_import_products_error_with_file_and_perms... '
perms = ['add_productinformation', 'change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_products'),
{'Import':'Import','file':fp},
follow=True)
queriedProducts=ProductInformation.objects.all()
# check that we saved 3 sites
self.assertEqual(queriedProducts.count(),
3,
'Number of imported products mismatch. Some products didn''t get stored. Found %d expected 3' % queriedProducts.count())
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning,
'',
'import_products view generated a warning with a valid file and user.\nactual warning message = %s'
% resultWarning)
def test_import_products_error_file_with_dups(self):
print 'running ImportProductsViewTests.test_import_products_error_file_with_dups... '
perms = ['add_productinformation', 'change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(
os.path.join(
APP_DIR,
'testData/products_add_prod1_prod2_prod3_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_products'),
{'Import':'Import','file':fp},
follow=True)
warningRe = '^.*Found duplicate product codes.*$'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(re.match(warningRe,resultWarning),
'import_products view generated incorrect warning when import contained duplicates.\nRE for part of desired Warning Message = %s\n\nactual warning message = %s'
% (warningRe, resultWarning))
def test_import_products_warning_with_no_file_and_perms(self):
print 'running ImportProductsViewTests.test_import_products_warning_with_no_file_and_perms... '
perms = ['add_productinformation', 'change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:import_products'),
{'Import':'Import'},
follow=True)
warning='No file selected'
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual(resultWarning,
warning,
'import_products view generated incorrect warning when no file was selected.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_import_products_error_with_file_and_without_add_productinformation_perm(self):
print 'running ImportProductsViewTests.test_import_products_error_with_file_and_without_add_productinformation_perm... '
perms = ['change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_products'),
{'Import':'Import','file':fp},
follow=True)
warning='You don''t have permission to import products'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning,
warning,
'import_products view generated incorrect warning when user didn''t have add_productinformation perms.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_import_products_error_with_file_and_without_change_productinformation_perm(self):
print 'running ImportProductsViewTests.test_import_products_error_with_file_and_without_change_productinformation_perm... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_products'),
{'Import':'Import','file':fp},
follow=True)
warning='You don''t have permission to import products'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning,
warning,
'import_products view generated incorrect warning when user didn''t have change_productinformation perms.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
class ImportInventoryViewTests(TestCase):
"""
ims_tests for import_inventory view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_import_inventory_error_with_file_and_perms(self):
print 'running ImportInventoryViewTests.test_import_inventory_error_with_file_and_perms... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with products and sites, so we can
# import inventory
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
ProductInformation.parse_product_information_from_xls(filename=filename,
modifier='none',
save=True)
with open(os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_inventory'),
{'Import':'Import','file':fp},
follow=True)
queriedInventory=InventoryItem.objects.all()
# check that we saved 3 sites
self.assertEqual(queriedInventory.count(),
9,
'Number of imported inventory items mismatch. Some inventory didn''t get stored.')
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning,
'',
'imports view generated a warning with a valid file and user.\nactual warning message = %s'
% resultWarning)
def test_import_inventory_error_file_with_dups(self):
print 'running ImportInventoryViewTests.test_import_inventory_error_file_with_dups... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with products and sites, so we can
# import inventory
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
ProductInformation.parse_product_information_from_xls(filename=filename,
modifier='none',
save=True)
with open(
os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3_dups.xls')) as fp:
response=self.client.post(reverse('ims:import_inventory'),
{'Import':'Import','file':fp},
follow=True)
warningRe = '^.*Found duplicate inventory items.*$'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(re.match(warningRe,resultWarning),
'import_inventory view generated incorrect warning when import contained duplicates.\nRE for part of desired Warning Message = %s\n\nactual warning message = %s'
% (warningRe, resultWarning))
def test_import_inventory_warning_with_no_file_and_perms(self):
print 'running ImportInventoryViewTests.test_import_inventory_warning_with_no_file_and_perms... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with products and sites, so we can
# import inventory
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
ProductInformation.parse_product_information_from_xls(filename=filename,
modifier='none',
save=True)
response=self.client.post(reverse('ims:import_inventory'),
{'Import':'Import',},
follow=True)
warning = 'No file selected'
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual(warning,
resultWarning,
'import_inventory view generated incorrect warning when no file was selected.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_import_inventory_error_with_file_and_without_add_inventoryitem_perm(self):
print 'running ImportInventoryViewTests.test_import_inventory_error_with_file_and_without_add_inventoryitem_perm...'
self.client.login(username='testUser', password='12345678')
# populate the database with products and sites, so we can
# import inventory
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
ProductInformation.parse_product_information_from_xls(filename=filename,
modifier='none',
save=True)
with open(os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_inventory'),
{'Import':'Import','file':fp},
follow=True)
warning = 'You don''t have permission to import inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(warning,
resultWarning,
'import_inventory view generated incorrect warning when user didn''t have add_inventoryitem perms.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
class SiteDeleteAllViewTests(TestCase):
"""
ims_tests for site_delete_all view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_site_delete_all_confirmed_with_perms(self):
print 'running SiteDeleteAllViewTests.test_site_delete_all_confirmed_with_perms... '
perms = ['delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Sites':'Delete All Sites'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
site_delete_all(request)
self.assertEqual(Site.objects.all().count(),
0,
'Did not delete all sites')
self.assertEqual(InventoryItem.objects.all().count(),
0,
'Did not delete all inventory')
def test_site_delete_all_confirmed_without_delete_site_perm(self):
print 'running SiteDeleteAllViewTests.test_site_delete_all_confirmed_without_delete_site_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Sites':'Delete All Sites'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
response=site_delete_all(request)
warning='You don''t have permission to delete sites or inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
('site_delete_all view didn''t generate the appropriate warning when requested to delete all sites without delete_site perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning)))
def test_site_delete_all_confirmed_without_delete_inventoryitem_perm(self):
print 'running SiteDeleteAllViewTests.test_site_delete_all_confirmed_without_delete_inventoryitem_perm... '
perms = ['delete_site',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Sites':'Delete All Sites'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites( numSites=20,
numProducts=5,
numItems=1)
response=site_delete_all(request)
warning='You don''t have permission to delete sites or inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
('site_delete_all view didn''t generate the appropriate warning when requested to delete all sites without delete_inventory perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning,resultWarning)))
def test_site_delete_all_canceled_with_perms(self):
print 'running SiteDeleteAllViewTests.test_site_delete_all_canceled_with_perms... '
perms = ['delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Cancel':'Cancel'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
(createdSites,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
site_delete_all(request)
self.assertEqual(Site.objects.all().count(),
len(createdSites),
'Deleted sites, should have canceled')
self.assertEqual(InventoryItem.objects.all().count(),
len(createdInventoryItems),
'Deleted inventory, should have canceled')
class ProductDeleteAllViewTests(TestCase):
"""
ims_tests for product_delete_all view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_product_delete_all_confirmed_with_perms(self):
print 'running ProductDeleteAllViewTests.test_product_delete_all_confirmed_with_perms... '
perms = ['delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Products':'Delete All Products'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
product_delete_all(request)
self.assertEqual(ProductInformation.objects.all().count(),
0,
'Did not delete all products')
self.assertEqual(InventoryItem.objects.all().count(),
0,
'Did not delete all inventory')
def test_product_delete_all_confirmed_without_delete_productinformation_perm(self):
print 'running ProductDeleteAllViewTests.test_product_delete_all_confirmed_without_delete_productinformation_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Products':'Delete All Products'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
response=product_delete_all(request)
warning='You don''t have permission to delete products or inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'product_delete_all view didn''t generate the appropriate warning when requested to delete all products without delete_productinformation perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_product_delete_all_confirmed_without_delete_inventoryitem_perm(self):
print 'running ProductDeleteAllViewTests.test_product_delete_all_confirmed_without_delete_inventoryitem_perm... '
perms = ['delete_productinformation',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Products':'Delete All Products'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
response=product_delete_all(request)
warning='You don''t have permission to delete products or inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'product_delete_all view didn''t generate the appropriate warning when requested to delete all products without delete_inventoryitem perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_product_delete_all_canceled_with_perms(self):
print 'running ProductDeleteAllViewTests.test_product_delete_all_canceled_with_perms... '
perms = ['delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Cancel':'Cancel'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
(createdSites,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
product_delete_all(request)
self.assertEqual(Site.objects.all().count(),
len(createdSites),
'Deleted products, should have canceled')
self.assertEqual(InventoryItem.objects.all().count(),
len(createdInventoryItems),
'Deleted inventory, should have canceled')
class InventoryDeleteAllViewTests(TestCase):
"""
ims_tests for product_delete_all view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_inventory_delete_all_confirmed_with_perms(self):
print 'running InventoryDeleteAllViewTests.test_inventory_delete_all_confirmed_with_perms... '
perms = ['delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Inventory':'Delete All Inventory'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
inventory_delete_all(request)
self.assertEqual(InventoryItem.objects.all().count(),
0,
'Did not delete all inventory')
def test_inventory_delete_all_confirmed_without_delete_inventoryitem_perm(self):
print 'running InventoryDeleteAllViewTests.test_inventory_delete_all_confirmed_without_delete_inventoryitem_perm... '
perms = []
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Inventory':'Delete All Inventory'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
response=inventory_delete_all(request)
warning='You don''t have permission to delete inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all inventory without delete_inventoryitem perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_inventory_delete_all_canceled_with_perms(self):
print 'running InventoryDeleteAllViewTests.test_inventory_delete_all_canceled_with_perms... '
perms = ['delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Cancel':'Cancel'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
(__,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
inventory_delete_all(request)
self.assertEqual(InventoryItem.objects.all().count(),
len(createdInventoryItems),
'Deleted inventory, should have canceled')
class ImportsViewTests(TestCase):
"""
ims_tests for Imports view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_delete_sites_warning_with_perms(self):
print 'running ImportsViewTests.test_delete_sites_warning_with_perms... '
perms = ['delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
warning=('Delete all %d sites? This will delete all inventory as well.'
% len(createdSites))
response=self.client.post(reverse('ims:imports'),
{'Delete Sites':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assert_(warning in resultWarning,
"imports view didn't generate the appropriate warning when requested to delete all sites with appropriate perms.\ndesired warning message = %s\nactual warning message = "
% resultWarning)
def test_delete_sites_error_without_delete_site_perm(self):
print 'running ImportsViewTests.test_delete_sites_error_without_delete_site_perm... '
perms = ['delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
warning='You don''t have permission to delete sites or inventory'
response=self.client.post(reverse('ims:imports'), {'Delete Sites':'Delete'}, follow=True)
resultWarning = get_announcement_from_response(response=response, cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all sites without delete_site perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_delete_sites_error_without_delete_inventoryitem_perm(self):
print 'running ImportsViewTests.test_delete_sites_error_without_delete_inventoryitem_perm... '
perms = ['delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
warning='You don''t have permission to delete sites or inventory'
response=self.client.post(reverse('ims:imports'),
{'Delete Sites':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all sites without delete_inventory perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning,resultWarning))
def test_export_sites(self):
print 'running ImportsViewTests.test_export_sites... '
# populate the database with some data
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=5,
numItems=1,
modifier='testUser')
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Export Sites':'All'},
follow=True)
parsedExportedSites,__=Site.parse_sites_from_xls(
file_contents=response.content,
save=False)
sortedParsedExportedSites=[]
for site in parsedExportedSites:
sortedParsedExportedSites.append(site.create_key_no_microseconds())
sortedParsedExportedSites.sort()
sortedCreatedSites=[]
for site in createdSites:
sortedCreatedSites.append(site.create_key_no_microseconds())
sortedCreatedSites.sort()
self.assertListEqual(sortedParsedExportedSites,
sortedCreatedSites,
'Sites exported to Excel don''t match the sites in the database')
def test_delete_products_warning_with_perms(self):
print'running ImportsViewTests.test_delete_products_warning_with_perms... '
perms = ['delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
warning=('Delete all %d products? This will delete all inventory as well.'
% len(createdProducts))
response=self.client.post(reverse('ims:imports'),
{'Delete Products':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all products with appropriate perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_delete_products_error_without_delete_productinformation_perm(self):
print 'running ImportsViewTests.test_delete_products_error_without_delete_productinformation_perm... '
perms = ['delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
warning='You don''t have permission to delete products or inventory'
response=self.client.post(reverse('ims:imports'),
{'Delete Products':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all products without delete_productinformation perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning,resultWarning))
def test_delete_products_error_without_delete_inventoryitem_perm(self):
print 'running ImportsViewTests.test_delete_products_error_without_delete_inventoryitem_perm... '
perms = ['delete_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
warning='You don''t have permission to delete products or inventory'
response=self.client.post(reverse('ims:imports'),
{'Delete Products':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all products without delete_inventory perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_export_products(self):
print 'running ImportsViewTests.test_export_products... '
# populate the database with some data
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=5,
numItems=1,
modifier='testUser')
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Export Products':'All'},
follow=True)
(parsedExportedProducts,
__)=ProductInformation.parse_product_information_from_xls(
file_contents=response.content,
save=True)
sortedParsedExportedProducts=[]
for product in parsedExportedProducts:
sortedParsedExportedProducts.append(product.create_key_no_microseconds())
sortedParsedExportedProducts.sort()
sortedCreatedProducts=[]
for product in createdProducts:
sortedCreatedProducts.append(product.create_key_no_microseconds())
sortedCreatedProducts.sort()
self.assertListEqual(sortedParsedExportedProducts,
sortedCreatedProducts,
'Products exported to Excel don''t match the products in the database')
def test_delete_inventory_warning_with_perms(self):
print 'running ImportsViewTests.test_delete_inventory_warning_with_perms... '
perms = ['delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(__,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
warning='Delete all %d inventory items?' % len(createdInventoryItems)
response=self.client.post(reverse('ims:imports'),
{'Delete Inventory':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all inventory with appropriate perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_delete_inventory_error_without_delete_inventory_perm(self):
print 'running ImportsViewTests.test_delete_inventory_error_without_delete_inventory_perm... '
perms = ['delete_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
warning='You don''t have permission to delete inventory'
response=self.client.post(reverse('ims:imports'),
{'Delete Inventory':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all inventory without delete_inventory perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_export_all_inventory(self):
print 'running ImportsViewTests.test_export_all_inventory... '
# populate the database with some data
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=5,
numItems=3,
modifier='testUser')
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Export All Inventory':'All'},
follow=True)
(parsedExportedInventory,
__)=InventoryItem.parse_inventory_from_xls(
file_contents=response.content,
save=False)
sortedParsedExportedInventory=[]
for item in parsedExportedInventory:
sortedParsedExportedInventory.append(item.create_key_no_pk_no_microseconds())
sortedParsedExportedInventory.sort()
sortedCreatedInventory=[]
for site in createdSites:
for item in site.inventoryitem_set.all():
sortedCreatedInventory.append(item.create_key_no_pk_no_microseconds())
sortedCreatedInventory.sort()
self.assertListEqual(sortedParsedExportedInventory,
sortedCreatedInventory,
'Inventory exported to Excel doesn''t match the inventory in the database')
def test_export_current_inventory(self):
print 'running ImportsViewTests.test_export_current_inventory... '
# populate the database with some data
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=5,
numItems=3,
modifier='testUser')
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Export Latest Inventory':'Current'},
follow=True)
(parsedExportedInventory,
__)=InventoryItem.parse_inventory_from_xls(
file_contents=response.content,
save=False)
sortedParsedExportedInventory=[]
for item in parsedExportedInventory:
sortedParsedExportedInventory.append(item.create_key_no_pk_no_microseconds())
sortedParsedExportedInventory.sort()
sortedCreatedInventory=[]
for site in createdSites:
for item in site.latest_inventory():
sortedCreatedInventory.append(item.create_key_no_pk_no_microseconds())
sortedCreatedInventory.sort()
self.assertListEqual(sortedParsedExportedInventory,
sortedCreatedInventory,
'Inventory exported to Excel doesn''t match the inventory in the database')
def test_backup(self):
print 'running ImportsViewTests.test_backup... '
# populate the database with some data
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=5,
numItems=3,
modifier='testUser')
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Backup':'Backup'},
follow=True)
try:
f = StringIO.StringIO(response.content)
zipArchive = zipfile.ZipFile(f, 'r')
backups = [filename for filename in zipArchive.namelist() if 'Backup' in filename]
self.assertTrue(len(backups) > 0,'No Backup spreadsheet in the archive')
if backups:
fileContents=zipArchive.open(backups[0],'r').read()
zipArchive.close()
(parsedBackedUpInventory,
__)=InventoryItem.parse_inventory_from_xls(
file_contents=fileContents,
save=False)
parsedBackedUpSites,__=Site.parse_sites_from_xls(
file_contents=fileContents,
save=False)
parsedBackedUpProducts,__=ProductInformation.parse_product_information_from_xls(
file_contents=fileContents,
save=False)
finally:
zipArchive.close()
f.close()
# Compare inventory
sortedParsedBackedUpInventory=[]
for item in parsedBackedUpInventory:
sortedParsedBackedUpInventory.append(item.create_key_no_pk_no_microseconds())
sortedParsedBackedUpInventory.sort()
sortedCreatedInventory=[]
for site in createdSites:
for item in site.inventoryitem_set.all():
sortedCreatedInventory.append(item.create_key_no_pk_no_microseconds())
sortedCreatedInventory.sort()
self.assertListEqual(sortedParsedBackedUpInventory,
sortedCreatedInventory,
'Inventory exported to Excel backup doesn''t match the inventory in the database')
# compare sites
sortedParsedBackedUpSites=[]
for site in parsedBackedUpSites:
sortedParsedBackedUpSites.append(site.create_key_no_microseconds())
sortedParsedBackedUpSites.sort()
sortedCreatedSites=[]
for site in createdSites:
sortedCreatedSites.append(site.create_key_no_microseconds())
sortedCreatedSites.sort()
self.assertListEqual(sortedParsedBackedUpSites,
sortedCreatedSites,
'Sites exported to Excel backup don''t match the sites in the database')
# compare products
sortedParsedBackedUpProducts=[]
for product in parsedBackedUpProducts:
sortedParsedBackedUpProducts.append(product.create_key_no_microseconds())
sortedParsedBackedUpProducts.sort()
sortedCreatedProducts=[]
for product in createdProducts:
sortedCreatedProducts.append(product.create_key_no_microseconds())
sortedCreatedProducts.sort()
self.assertListEqual(sortedParsedBackedUpProducts,
sortedCreatedProducts,
'Products exported to Excel backup don''t match the products in the database')
def test_restore_error_without_add_inventoryitem_perm(self):
print 'running ImportsViewTests.test_restore_error_without_add_inventoryitem_perm... '
perms = [
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without add_inventoryitem perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_change_inventoryitem_perm(self):
print 'running ImportsViewTests.test_restore_error_without_change_inventoryitem_perm... '
perms = ['add_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without change_inventoryitem perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_delete_inventoryitem_perm(self):
print 'running ImportsViewTests.test_restore_error_without_delete_inventoryitem_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without delete_inventoryitem perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_add_productinformation_perm(self):
print 'running ImportsViewTests.test_restore_error_without_add_productinformation_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without add_productinformation perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_change_productinformation_perm(self):
print 'running ImportsViewTests.test_restore_error_without_change_productinformation_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without change_productinformation perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_delete_productinformation_perm(self):
print 'running ImportsViewTests.test_restore_error_without_delete_productinformation_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without delete_productinformation perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_add_site_perm(self):
print 'running ImportsViewTests.test_restore_error_without_add_site_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without add_site perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_change_site_perm(self):
print 'running ImportsViewTests.test_restore_error_without_change_site_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without change_site perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_delete_site_perm(self):
print 'running ImportsViewTests.test_restore_error_without_delete_site_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without delete_site perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
class RestoreViewTests(TestCase):
"""
restore view ims_tests
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_restore_get_warning_with_perms(self):
print 'running RestoreViewTests.test_restore_get_warning_with_perms... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:restore'),
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
warning = 'Restoring the database will cause all current information to be replaced!!!'
self.assertEqual(warning,resultWarning,'restore view generated incorrect warning when user requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_get_error_without_perms(self):
print 'running RestoreViewTests.test_restore_get_warning_without_perms... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:restore'),
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'restore view generated incorrect warning when unauthorized user requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_info_with_perms(self):
print 'running RestoreViewTests.test_restore_info_with_perms... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site',
'add_productcategory',
'change_productcategory',
'delete_productcategory']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/Backup_3site_3prod_inventory10.zip')) as fp:
response=self.client.post(reverse('ims:restore'),
{'Restore':'Restore','file':fp},
format = 'multipart',
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="infonote")
warning = 'Successful restore of sites using "Backup_3site_3prod_inventory10.zip"<br/>Successful restore of categories using "Backup_3site_3prod_inventory10.zip"<br/>Successful restore of sites using "Backup_3site_3prod_inventory10.zip"<br/>Successful restore of products using "Backup_3site_3prod_inventory10.zip"<br/>Successful restore of inventory using "Backup_3site_3prod_inventory10.zip"<br/>'
self.assertEqual(warning,resultWarning,'restore view generated incorrect warning when user requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_warning_no_file_with_perms(self):
print 'running RestoreViewTests.test_restore_warning_no_file_with_perms... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site',
'add_productcategory',
'change_productcategory',
'delete_productcategory']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:restore'),
{'Restore':'Restore'},
format = 'multipart',
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
warning = 'No file selected'
self.assertEqual(warning,resultWarning,'restore view generated incorrect warning when user requested a database restore with no file selected.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_bad_file_with_perms(self):
print 'running RestoreViewTests.test_restore_error_bad_file_with_perms... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site',
'add_productcategory',
'change_productcategory',
'delete_productcategory']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/Backup_3site_3prod_inventory10.xls')) as fp:
response=self.client.post(reverse('ims:restore'),
{'Restore':'Restore','file':fp},
format = 'multipart',
follow=True)
resultError = get_announcement_from_response(response=response,
cls="errornote")
error = "Error while trying to restore database from backup archive:<br/>\"Backup_3site_3prod_inventory10.xls\".<br/><br/>Error Message:<br/> BadZipfile('File is not a zip file',)"
self.assertIn(error,resultError,'restore view generated incorrect error when user requested a database restore with an invalid file.\ndesired Error Message = %s\n\nactual error message = %s'
% (error, resultError)) | 52.234795 | 407 | 0.56355 | 22,801 | 252,503 | 6.015613 | 0.024867 | 0.021653 | 0.030096 | 0.040128 | 0.93001 | 0.920357 | 0.910923 | 0.896436 | 0.868192 | 0.837076 | 0 | 0.01664 | 0.351457 | 252,503 | 4,834 | 408 | 52.234795 | 0.820944 | 0.027057 | 0 | 0.794028 | 0 | 0.008397 | 0.235639 | 0.075439 | 0 | 0 | 0 | 0.000207 | 0.067646 | 0 | null | null | 0.039888 | 0.051551 | null | null | 0.042454 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
406a774ca5e30e6b5ec1d85b0ec5413c5da790c4 | 14,273 | py | Python | pynos/versions/ver_7/ver_7_1_0/yang/brocade_span.py | bdeetz/pynos | bd8a34e98f322de3fc06750827d8bbc3a0c00380 | [
"Apache-2.0"
] | 12 | 2015-09-21T23:56:09.000Z | 2018-03-30T04:35:32.000Z | pynos/versions/ver_7/ver_7_1_0/yang/brocade_span.py | bdeetz/pynos | bd8a34e98f322de3fc06750827d8bbc3a0c00380 | [
"Apache-2.0"
] | 10 | 2016-09-15T19:03:27.000Z | 2017-07-17T23:38:01.000Z | pynos/versions/ver_7/ver_7_1_0/yang/brocade_span.py | bdeetz/pynos | bd8a34e98f322de3fc06750827d8bbc3a0c00380 | [
"Apache-2.0"
] | 6 | 2015-08-14T08:05:23.000Z | 2022-02-03T15:33:54.000Z | #!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_span(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def monitor_session_session_number(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number = ET.SubElement(session, "session-number")
session_number.text = kwargs.pop('session_number')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_description(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
description = ET.SubElement(session, "description")
description.text = kwargs.pop('description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_source(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
source = ET.SubElement(span_command, "source")
source.text = kwargs.pop('source')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_src_tengigabitethernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
src_tengigabitethernet = ET.SubElement(span_command, "src-tengigabitethernet")
src_tengigabitethernet.text = kwargs.pop('src_tengigabitethernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_src_tengigabitethernet_val(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
src_tengigabitethernet_val = ET.SubElement(span_command, "src-tengigabitethernet-val")
src_tengigabitethernet_val.text = kwargs.pop('src_tengigabitethernet_val')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_destination(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
destination = ET.SubElement(span_command, "destination")
destination.text = kwargs.pop('destination')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_dest_tengigabitethernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
dest_tengigabitethernet = ET.SubElement(span_command, "dest-tengigabitethernet")
dest_tengigabitethernet.text = kwargs.pop('dest_tengigabitethernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_dest_tengigabitethernet_val(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
dest_tengigabitethernet_val = ET.SubElement(span_command, "dest-tengigabitethernet-val")
dest_tengigabitethernet_val.text = kwargs.pop('dest_tengigabitethernet_val')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_dest_vlan_val(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
dest_vlan_val = ET.SubElement(span_command, "dest-vlan-val")
dest_vlan_val.text = kwargs.pop('dest_vlan_val')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_direction(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
direction = ET.SubElement(span_command, "direction")
direction.text = kwargs.pop('direction')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_session_number(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number = ET.SubElement(session, "session-number")
session_number.text = kwargs.pop('session_number')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_description(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
description = ET.SubElement(session, "description")
description.text = kwargs.pop('description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_source(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
source = ET.SubElement(span_command, "source")
source.text = kwargs.pop('source')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_src_tengigabitethernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
src_tengigabitethernet = ET.SubElement(span_command, "src-tengigabitethernet")
src_tengigabitethernet.text = kwargs.pop('src_tengigabitethernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_src_tengigabitethernet_val(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
src_tengigabitethernet_val = ET.SubElement(span_command, "src-tengigabitethernet-val")
src_tengigabitethernet_val.text = kwargs.pop('src_tengigabitethernet_val')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_destination(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
destination = ET.SubElement(span_command, "destination")
destination.text = kwargs.pop('destination')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_dest_tengigabitethernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
dest_tengigabitethernet = ET.SubElement(span_command, "dest-tengigabitethernet")
dest_tengigabitethernet.text = kwargs.pop('dest_tengigabitethernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_dest_tengigabitethernet_val(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
dest_tengigabitethernet_val = ET.SubElement(span_command, "dest-tengigabitethernet-val")
dest_tengigabitethernet_val.text = kwargs.pop('dest_tengigabitethernet_val')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_dest_vlan_val(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
dest_vlan_val = ET.SubElement(span_command, "dest-vlan-val")
dest_vlan_val.text = kwargs.pop('dest_vlan_val')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def monitor_session_span_command_direction(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
direction = ET.SubElement(span_command, "direction")
direction.text = kwargs.pop('direction')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 47.105611 | 96 | 0.666153 | 1,544 | 14,273 | 5.968912 | 0.029793 | 0.122396 | 0.091146 | 0.064453 | 0.985894 | 0.985894 | 0.985894 | 0.985894 | 0.985894 | 0.985894 | 0 | 0 | 0.211518 | 14,273 | 303 | 97 | 47.105611 | 0.818909 | 0.043929 | 0 | 0.981481 | 1 | 0 | 0.189942 | 0.07769 | 0 | 0 | 0 | 0 | 0 | 1 | 0.097222 | false | 0 | 0.00463 | 0 | 0.199074 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
40b964448415f608cae54c386242e8dc6ee16f70 | 3,402 | py | Python | test/positive_tests/test_buy_ticket.py | PetrovAndrii/training | 4273ffd688b58af5c277a84fc8c57ad4ff0add04 | [
"Apache-2.0"
] | null | null | null | test/positive_tests/test_buy_ticket.py | PetrovAndrii/training | 4273ffd688b58af5c277a84fc8c57ad4ff0add04 | [
"Apache-2.0"
] | null | null | null | test/positive_tests/test_buy_ticket.py | PetrovAndrii/training | 4273ffd688b58af5c277a84fc8c57ad4ff0add04 | [
"Apache-2.0"
] | null | null | null |
from model.group_stations import Stations
def test_buy_ticket_full(app, data_groups):
stations = data_groups
app.session.login(username="uz.all.test@gmail.com", password="P@ssw0rd")
app.group.search_train(stations)
app.group.choice_train()
app.group.choice_types()
app.group.choice_wagon()
app.group.choice_plase()
app.group.doc_type_full(last_name="test", first_name="uz")
# app.group.pay(email_pay="uz.all.test@gmail.com")
def test_buy_ticket_child(app):
app.session.login(username="uz.all.test@gmail.com", password="P@ssw0rd")
app.group.search_train(Stations(from_station="Київ", to_station="Одеса"))
app.group.choice_train()
app.group.choice_types()
app.group.choice_wagon()
app.group.choice_plase()
app.group.doc_type_child(last_name="test", first_name="uz")
# app.group.pay(email_pay="uz.all.test@gmail.com")
def test_buy_ticket_student(app):
app.session.login(username="uz.all.test@gmail.com", password="P@ssw0rd")
app.group.search_train(Stations(from_station="Київ", to_station="Одеса"))
app.group.choice_train()
app.group.choice_types()
app.group.choice_wagon()
app.group.choice_plase()
app.group.doc_type_student(STUD="ХА11072388", last_name="Коломійцева", first_name="Тетяна")
if app.wd.find_elements_by_css_selector(".popup-canvas"):
app.wd.find_element_by_css_selector(".ok").click()
app.group.search_train(Stations(from_station="Київ", to_station="Одеса"))
app.group.choice_train()
app.group.choice_types()
app.group.choice_wagon()
app.group.choice_plase()
app.group.doc_type_student(STUD="ХА11072388", last_name="Коломійцева", first_name="Тетяна")
# app.group.pay(email_pay="uz.all.test@gmail.com")
def test_buy_ticket_beneficiary(app):
app.session.login(username="uz.all.test@gmail.com", password="P@ssw0rd")
app.group.search_train(Stations(from_station="Київ", to_station="Одеса"))
app.group.choice_train()
app.group.choice_types()
app.group.choice_wagon()
app.group.choice_plase()
app.group.doc_type_beneficiary(Num="В-І322262", last_name="Колісник", first_name="Наталія")
# app.group.pay(email_pay="uz.all.test@gmail.com")
def test_buy_ticket_accompanying(app):
app.session.login(username="uz.all.test@gmail.com", password="P@ssw0rd")
app.group.search_train(Stations(from_station="Київ", to_station="Одеса"))
app.group.choice_train()
app.group.choice_types()
app.group.choice_wagon()
app.group.choice_plase()
app.group.doc_type_beneficiary(Num="В-І322262", last_name="Колісник", first_name="Наталія")
app.group.doc_type_accompanying(last_name="test", first_name="uz")
# app.group.pay(email_pay="uz.all.test@gmail.com")
def test_buy_ticket_transfers_full(app):
app.session.login(username="uz.all.test@gmail.com", password="P@ssw0rd")
app.group.search_train(Stations(from_station="Київ", to_station="Одеса"))
app.group.search_transfer()
app.group.choice_train()
app.group.choice_types()
app.group.choice_wagon()
app.group.choice_plase()
app.group.doc_type_full(last_name="test", first_name="uz")
app.group.choice_types()
app.group.choice_wagon()
app.group.choice_plase()
app.group.doc_type_full(last_name="test", first_name="uz")
# app.group.pay(email_pay="uz.all.test@gmail.com")
| 38.659091 | 99 | 0.719871 | 507 | 3,402 | 4.587771 | 0.13215 | 0.185727 | 0.186586 | 0.072227 | 0.89166 | 0.89166 | 0.89166 | 0.89166 | 0.89166 | 0.89166 | 0 | 0.011394 | 0.122869 | 3,402 | 87 | 100 | 39.103448 | 0.768097 | 0.089653 | 0 | 0.78125 | 0 | 0 | 0.12188 | 0.040843 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09375 | false | 0.09375 | 0.015625 | 0 | 0.109375 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
90612042ba144c50f0efc8e0648788ca7fd80801 | 209,610 | py | Python | controlm_client/api/config_api.py | apsinha-equinix/controlm-client | f24e0f935c82306074f4e4025cf62c217348dc3f | [
"MIT"
] | 1 | 2021-12-02T08:49:25.000Z | 2021-12-02T08:49:25.000Z | controlm_client/api/config_api.py | apsinha-equinix/controlm-client | f24e0f935c82306074f4e4025cf62c217348dc3f | [
"MIT"
] | null | null | null | controlm_client/api/config_api.py | apsinha-equinix/controlm-client | f24e0f935c82306074f4e4025cf62c217348dc3f | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Control-M Services
Provides access to BMC Control-M Services # noqa: E501
OpenAPI spec version: 9.18.3
Contact: support@bmc.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from controlm_client.api_client import ApiClient
class ConfigApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_agent(self, ctm, body, **kwargs): # noqa: E501
"""add agent to Control-M Server # noqa: E501
Add an agent to Control-M Server. This command does not install or configure the agent. It only defines the agent in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_agent(ctm, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is going to be added to. (required)
:param AddAgentParams body: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_agent_with_http_info(ctm, body, **kwargs) # noqa: E501
else:
(data) = self.add_agent_with_http_info(ctm, body, **kwargs) # noqa: E501
return data
def add_agent_with_http_info(self, ctm, body, **kwargs): # noqa: E501
"""add agent to Control-M Server # noqa: E501
Add an agent to Control-M Server. This command does not install or configure the agent. It only defines the agent in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_agent_with_http_info(ctm, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is going to be added to. (required)
:param AddAgentParams body: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `add_agent`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/agent', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_host_to_hostgroup(self, ctm, hostgroup, agent, **kwargs): # noqa: E501
"""add agent to hostgroup # noqa: E501
Add an agent to hostgroup. Create the the hostgroup if it does not exist. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_host_to_hostgroup(ctm, hostgroup, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:param AgentInGroupParams agent: The hostname of the new agent (required)
:return: AgentsInGroupSuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_host_to_hostgroup_with_http_info(ctm, hostgroup, agent, **kwargs) # noqa: E501
else:
(data) = self.add_host_to_hostgroup_with_http_info(ctm, hostgroup, agent, **kwargs) # noqa: E501
return data
def add_host_to_hostgroup_with_http_info(self, ctm, hostgroup, agent, **kwargs): # noqa: E501
"""add agent to hostgroup # noqa: E501
Add an agent to hostgroup. Create the the hostgroup if it does not exist. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_host_to_hostgroup_with_http_info(ctm, hostgroup, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:param AgentInGroupParams agent: The hostname of the new agent (required)
:return: AgentsInGroupSuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'hostgroup', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_host_to_hostgroup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `add_host_to_hostgroup`") # noqa: E501
# verify the required parameter 'hostgroup' is set
if ('hostgroup' not in params or
params['hostgroup'] is None):
raise ValueError("Missing the required parameter `hostgroup` when calling `add_host_to_hostgroup`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `add_host_to_hostgroup`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'hostgroup' in params:
path_params['hostgroup'] = params['hostgroup'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'agent' in params:
body_params = params['agent']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/hostgroup/{hostgroup}/agent', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentsInGroupSuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_remote_host(self, ctm, **kwargs): # noqa: E501
"""add remote host to Control-M Server # noqa: E501
Add a remote host to Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_remote_host(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the remote host is going to be added to. (required)
:param AddRemoteHostParams data: The non default, advanced configuration data
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_remote_host_with_http_info(ctm, **kwargs) # noqa: E501
else:
(data) = self.add_remote_host_with_http_info(ctm, **kwargs) # noqa: E501
return data
def add_remote_host_with_http_info(self, ctm, **kwargs): # noqa: E501
"""add remote host to Control-M Server # noqa: E501
Add a remote host to Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_remote_host_with_http_info(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the remote host is going to be added to. (required)
:param AddRemoteHostParams data: The non default, advanced configuration data
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'data'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_remote_host" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `add_remote_host`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/remotehost', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_role(self, role_file, **kwargs): # noqa: E501
"""Add Authorization Role # noqa: E501
Add Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role(role_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file role_file: File with contenet of Role Data. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_role_with_http_info(role_file, **kwargs) # noqa: E501
else:
(data) = self.add_role_with_http_info(role_file, **kwargs) # noqa: E501
return data
def add_role_with_http_info(self, role_file, **kwargs): # noqa: E501
"""Add Authorization Role # noqa: E501
Add Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_with_http_info(role_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file role_file: File with contenet of Role Data. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role_file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role_file' is set
if ('role_file' not in params or
params['role_file'] is None):
raise ValueError("Missing the required parameter `role_file` when calling `add_role`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'role_file' in params:
local_var_files['roleFile'] = params['role_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/role', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_role_to_ldap_group(self, ldapgroup, role, **kwargs): # noqa: E501
"""Add a role to LDAP group # noqa: E501
Add a role to LDAP group so any user belong to the LDAP group will get all permissions defined in the role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_to_ldap_group(ldapgroup, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ldapgroup: Name of LDAP group (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_role_to_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501
else:
(data) = self.add_role_to_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501
return data
def add_role_to_ldap_group_with_http_info(self, ldapgroup, role, **kwargs): # noqa: E501
"""Add a role to LDAP group # noqa: E501
Add a role to LDAP group so any user belong to the LDAP group will get all permissions defined in the role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_to_ldap_group_with_http_info(ldapgroup, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ldapgroup: Name of LDAP group (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ldapgroup', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_role_to_ldap_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ldapgroup' is set
if ('ldapgroup' not in params or
params['ldapgroup'] is None):
raise ValueError("Missing the required parameter `ldapgroup` when calling `add_role_to_ldap_group`") # noqa: E501
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `add_role_to_ldap_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ldapgroup' in params:
path_params['ldapgroup'] = params['ldapgroup'] # noqa: E501
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/ldap/{ldapgroup}/role/{role}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_role_to_user(self, user, role, **kwargs): # noqa: E501
"""Add a role to user # noqa: E501
Add a role to user so that user will inherit role authorization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_to_user(user, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: Name of user (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_role_to_user_with_http_info(user, role, **kwargs) # noqa: E501
else:
(data) = self.add_role_to_user_with_http_info(user, role, **kwargs) # noqa: E501
return data
def add_role_to_user_with_http_info(self, user, role, **kwargs): # noqa: E501
"""Add a role to user # noqa: E501
Add a role to user so that user will inherit role authorization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_to_user_with_http_info(user, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: Name of user (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_role_to_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `add_role_to_user`") # noqa: E501
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `add_role_to_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/{user}/role/{role}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_secret(self, name_value, **kwargs): # noqa: E501
"""Add a new secret # noqa: E501
Add a new secret to the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_secret(name_value, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SecretKeyValue name_value: The new secret value (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_secret_with_http_info(name_value, **kwargs) # noqa: E501
else:
(data) = self.add_secret_with_http_info(name_value, **kwargs) # noqa: E501
return data
def add_secret_with_http_info(self, name_value, **kwargs): # noqa: E501
"""Add a new secret # noqa: E501
Add a new secret to the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_secret_with_http_info(name_value, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SecretKeyValue name_value: The new secret value (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name_value'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_secret" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name_value' is set
if ('name_value' not in params or
params['name_value'] is None):
raise ValueError("Missing the required parameter `name_value` when calling `add_secret`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'name_value' in params:
body_params = params['name_value']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/secret', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_server(self, body, **kwargs): # noqa: E501
"""add Control-M server to the system # noqa: E501
Add a Control-M Server. This command setting up new Control-M server in the system # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_server(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AddServerParams body: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_server_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_server_with_http_info(body, **kwargs) # noqa: E501
return data
def add_server_with_http_info(self, body, **kwargs): # noqa: E501
"""add Control-M server to the system # noqa: E501
Add a Control-M Server. This command setting up new Control-M server in the system # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_server_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AddServerParams body: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_server" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_server`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_user(self, user_file, **kwargs): # noqa: E501
"""Add user # noqa: E501
Add user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_user(user_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file user_file: File with contenet of user data. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_user_with_http_info(user_file, **kwargs) # noqa: E501
else:
(data) = self.add_user_with_http_info(user_file, **kwargs) # noqa: E501
return data
def add_user_with_http_info(self, user_file, **kwargs): # noqa: E501
"""Add user # noqa: E501
Add user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_user_with_http_info(user_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file user_file: File with contenet of user data. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_file' is set
if ('user_file' not in params or
params['user_file'] is None):
raise ValueError("Missing the required parameter `user_file` when calling `add_user`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'user_file' in params:
local_var_files['userFile'] = params['user_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def authorize_ssh_known_remotehost(self, ctm, remotehost, **kwargs): # noqa: E501
"""Authorize # noqa: E501
Authorized known ssh remote host. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.authorize_ssh_known_remotehost(ctm, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.authorize_ssh_known_remotehost_with_http_info(ctm, remotehost, **kwargs) # noqa: E501
else:
(data) = self.authorize_ssh_known_remotehost_with_http_info(ctm, remotehost, **kwargs) # noqa: E501
return data
def authorize_ssh_known_remotehost_with_http_info(self, ctm, remotehost, **kwargs): # noqa: E501
"""Authorize # noqa: E501
Authorized known ssh remote host. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.authorize_ssh_known_remotehost_with_http_info(ctm, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'remotehost'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method authorize_ssh_known_remotehost" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `authorize_ssh_known_remotehost`") # noqa: E501
# verify the required parameter 'remotehost' is set
if ('remotehost' not in params or
params['remotehost'] is None):
raise ValueError("Missing the required parameter `remotehost` when calling `authorize_ssh_known_remotehost`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'remotehost' in params:
path_params['remotehost'] = params['remotehost'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/remotehost/{remotehost}/authorize', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_user_password(self, user, **kwargs): # noqa: E501
"""Change user password # noqa: E501
Change user password # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_user_password(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: user name (required)
:param UserPassword password: The new password.
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.change_user_password_with_http_info(user, **kwargs) # noqa: E501
else:
(data) = self.change_user_password_with_http_info(user, **kwargs) # noqa: E501
return data
def change_user_password_with_http_info(self, user, **kwargs): # noqa: E501
"""Change user password # noqa: E501
Change user password # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_user_password_with_http_info(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: user name (required)
:param UserPassword password: The new password.
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user', 'password'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_user_password" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `change_user_password`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'password' in params:
body_params = params['password']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/user/{user}/password/adminUpdate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_run_as_user(self, ctm, run_as_user_data, **kwargs): # noqa: E501
"""Add a new Run-as user # noqa: E501
Add a new Run-as user to Control-M server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_run_as_user(ctm, run_as_user_data, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param RunAsUserData run_as_user_data: Run as user data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_run_as_user_with_http_info(ctm, run_as_user_data, **kwargs) # noqa: E501
else:
(data) = self.create_run_as_user_with_http_info(ctm, run_as_user_data, **kwargs) # noqa: E501
return data
def create_run_as_user_with_http_info(self, ctm, run_as_user_data, **kwargs): # noqa: E501
"""Add a new Run-as user # noqa: E501
Add a new Run-as user to Control-M server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_run_as_user_with_http_info(ctm, run_as_user_data, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param RunAsUserData run_as_user_data: Run as user data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'run_as_user_data'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_run_as_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `create_run_as_user`") # noqa: E501
# verify the required parameter 'run_as_user_data' is set
if ('run_as_user_data' not in params or
params['run_as_user_data'] is None):
raise ValueError("Missing the required parameter `run_as_user_data` when calling `create_run_as_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'run_as_user_data' in params:
body_params = params['run_as_user_data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/runasuser', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_agent(self, ctm, agent, **kwargs): # noqa: E501
"""delete an agent from Control-M Server # noqa: E501
Delete an agent from a Control-M Server. This will not shut the agent down. It only disconnects and removes it from the list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_agent(ctm, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is connected to. (required)
:param str agent: The name of the agent to delete. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_agent_with_http_info(ctm, agent, **kwargs) # noqa: E501
else:
(data) = self.delete_agent_with_http_info(ctm, agent, **kwargs) # noqa: E501
return data
def delete_agent_with_http_info(self, ctm, agent, **kwargs): # noqa: E501
"""delete an agent from Control-M Server # noqa: E501
Delete an agent from a Control-M Server. This will not shut the agent down. It only disconnects and removes it from the list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_agent_with_http_info(ctm, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is connected to. (required)
:param str agent: The name of the agent to delete. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `delete_agent`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `delete_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/agent/{agent}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_authorization_role(self, role, **kwargs): # noqa: E501
"""Delete Authorization Role # noqa: E501
Delete Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_authorization_role(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_authorization_role_with_http_info(role, **kwargs) # noqa: E501
else:
(data) = self.delete_authorization_role_with_http_info(role, **kwargs) # noqa: E501
return data
def delete_authorization_role_with_http_info(self, role, **kwargs): # noqa: E501
"""Delete Authorization Role # noqa: E501
Delete Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_authorization_role_with_http_info(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_authorization_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `delete_authorization_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/role/{role}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_host_from_group(self, ctm, hostgroup, host, **kwargs): # noqa: E501
"""delete an agent from a hostgroup # noqa: E501
Delete an agent from the specified hostgroup. If the group is empty it will also be deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_host_from_group(ctm, hostgroup, host, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:param str host: The agent to be deleted (required)
:return: AgentsInGroupSuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_host_from_group_with_http_info(ctm, hostgroup, host, **kwargs) # noqa: E501
else:
(data) = self.delete_host_from_group_with_http_info(ctm, hostgroup, host, **kwargs) # noqa: E501
return data
def delete_host_from_group_with_http_info(self, ctm, hostgroup, host, **kwargs): # noqa: E501
"""delete an agent from a hostgroup # noqa: E501
Delete an agent from the specified hostgroup. If the group is empty it will also be deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_host_from_group_with_http_info(ctm, hostgroup, host, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:param str host: The agent to be deleted (required)
:return: AgentsInGroupSuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'hostgroup', 'host'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_host_from_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `delete_host_from_group`") # noqa: E501
# verify the required parameter 'hostgroup' is set
if ('hostgroup' not in params or
params['hostgroup'] is None):
raise ValueError("Missing the required parameter `hostgroup` when calling `delete_host_from_group`") # noqa: E501
# verify the required parameter 'host' is set
if ('host' not in params or
params['host'] is None):
raise ValueError("Missing the required parameter `host` when calling `delete_host_from_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'hostgroup' in params:
path_params['hostgroup'] = params['hostgroup'] # noqa: E501
if 'host' in params:
path_params['host'] = params['host'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/hostgroup/{hostgroup}/agent/{host}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentsInGroupSuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_remote_host(self, ctm, remotehost, **kwargs): # noqa: E501
"""delete a remote host from Control-M Server # noqa: E501
Delete a remote host from a Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_remote_host(ctm, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host to delete. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_remote_host_with_http_info(ctm, remotehost, **kwargs) # noqa: E501
else:
(data) = self.delete_remote_host_with_http_info(ctm, remotehost, **kwargs) # noqa: E501
return data
def delete_remote_host_with_http_info(self, ctm, remotehost, **kwargs): # noqa: E501
"""delete a remote host from Control-M Server # noqa: E501
Delete a remote host from a Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_remote_host_with_http_info(ctm, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host to delete. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'remotehost'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_remote_host" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `delete_remote_host`") # noqa: E501
# verify the required parameter 'remotehost' is set
if ('remotehost' not in params or
params['remotehost'] is None):
raise ValueError("Missing the required parameter `remotehost` when calling `delete_remote_host`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'remotehost' in params:
path_params['remotehost'] = params['remotehost'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/remotehost/{remotehost}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_role_from_ldap_group(self, ldapgroup, role, **kwargs): # noqa: E501
"""Delete a role from LDAP group # noqa: E501
Delete a role from LDAP group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_role_from_ldap_group(ldapgroup, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ldapgroup: Name of LDAP group (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_role_from_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501
else:
(data) = self.delete_role_from_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501
return data
def delete_role_from_ldap_group_with_http_info(self, ldapgroup, role, **kwargs): # noqa: E501
"""Delete a role from LDAP group # noqa: E501
Delete a role from LDAP group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_role_from_ldap_group_with_http_info(ldapgroup, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ldapgroup: Name of LDAP group (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ldapgroup', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_role_from_ldap_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ldapgroup' is set
if ('ldapgroup' not in params or
params['ldapgroup'] is None):
raise ValueError("Missing the required parameter `ldapgroup` when calling `delete_role_from_ldap_group`") # noqa: E501
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `delete_role_from_ldap_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ldapgroup' in params:
path_params['ldapgroup'] = params['ldapgroup'] # noqa: E501
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/ldap/{ldapgroup}/role/{role}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_run_as_user(self, ctm, agent, user, **kwargs): # noqa: E501
"""delete Run-as user # noqa: E501
Delete Run-as user from Control-M server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_run_as_user(ctm, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str agent: The Control-M Agent (required)
:param str user: The user name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_run_as_user_with_http_info(ctm, agent, user, **kwargs) # noqa: E501
else:
(data) = self.delete_run_as_user_with_http_info(ctm, agent, user, **kwargs) # noqa: E501
return data
def delete_run_as_user_with_http_info(self, ctm, agent, user, **kwargs): # noqa: E501
"""delete Run-as user # noqa: E501
Delete Run-as user from Control-M server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_run_as_user_with_http_info(ctm, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str agent: The Control-M Agent (required)
:param str user: The user name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent', 'user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_run_as_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `delete_run_as_user`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `delete_run_as_user`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `delete_run_as_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/runasuser/{agent}/{user}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_secret(self, name, **kwargs): # noqa: E501
"""Delete an existing secret # noqa: E501
Delete an existing secret from the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_secret(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the secret to update (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_secret_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.delete_secret_with_http_info(name, **kwargs) # noqa: E501
return data
def delete_secret_with_http_info(self, name, **kwargs): # noqa: E501
"""Delete an existing secret # noqa: E501
Delete an existing secret from the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_secret_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the secret to update (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_secret" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_secret`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/secret/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_user(self, user, **kwargs): # noqa: E501
"""Delete user # noqa: E501
Delete user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: The user name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_user_with_http_info(user, **kwargs) # noqa: E501
else:
(data) = self.delete_user_with_http_info(user, **kwargs) # noqa: E501
return data
def delete_user_with_http_info(self, user, **kwargs): # noqa: E501
"""Delete user # noqa: E501
Delete user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user_with_http_info(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: The user name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `delete_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/{user}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def disable_agent(self, ctm, agent, **kwargs): # noqa: E501
"""disable agent from the Control-M Server # noqa: E501
Disable a Control-M agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_agent(ctm, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is connected too. (required)
:param str agent: The Control-M agent to be disabled. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.disable_agent_with_http_info(ctm, agent, **kwargs) # noqa: E501
else:
(data) = self.disable_agent_with_http_info(ctm, agent, **kwargs) # noqa: E501
return data
def disable_agent_with_http_info(self, ctm, agent, **kwargs): # noqa: E501
"""disable agent from the Control-M Server # noqa: E501
Disable a Control-M agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_agent_with_http_info(ctm, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is connected too. (required)
:param str agent: The Control-M agent to be disabled. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method disable_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `disable_agent`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `disable_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/agent/{agent}/disable', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def enable_agent(self, ctm, agent, **kwargs): # noqa: E501
"""enable agent from the Control-M Server # noqa: E501
Enable a Control-M agent. This command does not install or configure the agent. It only enable existing agent in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_agent(ctm, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is connected too. (required)
:param str agent: The Control-M agent to be enabled. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.enable_agent_with_http_info(ctm, agent, **kwargs) # noqa: E501
else:
(data) = self.enable_agent_with_http_info(ctm, agent, **kwargs) # noqa: E501
return data
def enable_agent_with_http_info(self, ctm, agent, **kwargs): # noqa: E501
"""enable agent from the Control-M Server # noqa: E501
Enable a Control-M agent. This command does not install or configure the agent. It only enable existing agent in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_agent_with_http_info(ctm, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is connected too. (required)
:param str agent: The Control-M agent to be enabled. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method enable_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `enable_agent`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `enable_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/agent/{agent}/enable', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def failover(self, ctm, **kwargs): # noqa: E501
"""Perform Manual Failover on a specified Control-M Server # noqa: E501
Perform Manual Failover on a specified Control-M Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.failover(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.failover_with_http_info(ctm, **kwargs) # noqa: E501
else:
(data) = self.failover_with_http_info(ctm, **kwargs) # noqa: E501
return data
def failover_with_http_info(self, ctm, **kwargs): # noqa: E501
"""Perform Manual Failover on a specified Control-M Server # noqa: E501
Perform Manual Failover on a specified Control-M Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.failover_with_http_info(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method failover" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `failover`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/failover', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_agent_parameters(self, ctm, agent, **kwargs): # noqa: E501
"""get agent parameters # noqa: E501
Get all the parameters of the specified Control-M Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agent_parameters(ctm, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is connected to. (required)
:param str agent: The name of the agent to query. (required)
:return: KeyValueListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_agent_parameters_with_http_info(ctm, agent, **kwargs) # noqa: E501
else:
(data) = self.get_agent_parameters_with_http_info(ctm, agent, **kwargs) # noqa: E501
return data
def get_agent_parameters_with_http_info(self, ctm, agent, **kwargs): # noqa: E501
"""get agent parameters # noqa: E501
Get all the parameters of the specified Control-M Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agent_parameters_with_http_info(ctm, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is connected to. (required)
:param str agent: The name of the agent to query. (required)
:return: KeyValueListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_agent_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `get_agent_parameters`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `get_agent_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/agent/{agent}/params', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='KeyValueListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_agents(self, ctm, **kwargs): # noqa: E501
"""get Control-M Server agents # noqa: E501
Get all the agents of the specified Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agents(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server to query. Optionally you can filter agent name of host or alias of the Control-M Agent (required)
:param str agent: Optionally case insensitive agent name filter of host or alias of the Control-M Agent. `ctm server:agents::get ControlM AgentName` returns all agents which names start with `agentname`
:return: AgentDetailsList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_agents_with_http_info(ctm, **kwargs) # noqa: E501
else:
(data) = self.get_agents_with_http_info(ctm, **kwargs) # noqa: E501
return data
def get_agents_with_http_info(self, ctm, **kwargs): # noqa: E501
"""get Control-M Server agents # noqa: E501
Get all the agents of the specified Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agents_with_http_info(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server to query. Optionally you can filter agent name of host or alias of the Control-M Agent (required)
:param str agent: Optionally case insensitive agent name filter of host or alias of the Control-M Agent. `ctm server:agents::get ControlM AgentName` returns all agents which names start with `agentname`
:return: AgentDetailsList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_agents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `get_agents`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
if 'agent' in params:
query_params.append(('agent', params['agent'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/agents', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentDetailsList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_authorization_roles(self, **kwargs): # noqa: E501
"""Get Authorization Roles # noqa: E501
Get Authorization Roles # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_authorization_roles(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name.
:param str description: The Role description.
:return: RoleHeaderList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_authorization_roles_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_authorization_roles_with_http_info(**kwargs) # noqa: E501
return data
def get_all_authorization_roles_with_http_info(self, **kwargs): # noqa: E501
"""Get Authorization Roles # noqa: E501
Get Authorization Roles # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_authorization_roles_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name.
:param str description: The Role description.
:return: RoleHeaderList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role', 'description'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_authorization_roles" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'role' in params:
query_params.append(('role', params['role'])) # noqa: E501
if 'description' in params:
query_params.append(('description', params['description'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/roles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoleHeaderList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_roles_associated_with_ldap(self, ldapgroup, **kwargs): # noqa: E501
"""Get Authorization Roles associated with an LDAP group # noqa: E501
Get Authorization Roles associated with an LDAP group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_roles_associated_with_ldap(ldapgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ldapgroup: Name of Ldap group (required)
:param str role: The Role name.
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_roles_associated_with_ldap_with_http_info(ldapgroup, **kwargs) # noqa: E501
else:
(data) = self.get_all_roles_associated_with_ldap_with_http_info(ldapgroup, **kwargs) # noqa: E501
return data
def get_all_roles_associated_with_ldap_with_http_info(self, ldapgroup, **kwargs): # noqa: E501
"""Get Authorization Roles associated with an LDAP group # noqa: E501
Get Authorization Roles associated with an LDAP group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_roles_associated_with_ldap_with_http_info(ldapgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ldapgroup: Name of Ldap group (required)
:param str role: The Role name.
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ldapgroup', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_roles_associated_with_ldap" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ldapgroup' is set
if ('ldapgroup' not in params or
params['ldapgroup'] is None):
raise ValueError("Missing the required parameter `ldapgroup` when calling `get_all_roles_associated_with_ldap`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ldapgroup' in params:
path_params['ldapgroup'] = params['ldapgroup'] # noqa: E501
query_params = []
if 'role' in params:
query_params.append(('role', params['role'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/ldap/{ldapgroup}/roles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_users(self, **kwargs): # noqa: E501
"""Get users # noqa: E501
Get users # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_users(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The user name.
:param str full_name: The user full name.
:param str description: The user description.
:return: list[UserHeader]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_users_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_users_with_http_info(**kwargs) # noqa: E501
return data
def get_all_users_with_http_info(self, **kwargs): # noqa: E501
"""Get users # noqa: E501
Get users # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The user name.
:param str full_name: The user full name.
:param str description: The user description.
:return: list[UserHeader]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'full_name', 'description'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'full_name' in params:
query_params.append(('fullName', params['full_name'])) # noqa: E501
if 'description' in params:
query_params.append(('description', params['description'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserHeader]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_hostgroups(self, ctm, **kwargs): # noqa: E501
"""get Control-M Server hostgroups # noqa: E501
Get all the hostgroups of the specified Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hostgroups(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the hostgroups belong to. (required)
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_hostgroups_with_http_info(ctm, **kwargs) # noqa: E501
else:
(data) = self.get_hostgroups_with_http_info(ctm, **kwargs) # noqa: E501
return data
def get_hostgroups_with_http_info(self, ctm, **kwargs): # noqa: E501
"""get Control-M Server hostgroups # noqa: E501
Get all the hostgroups of the specified Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hostgroups_with_http_info(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the hostgroups belong to. (required)
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_hostgroups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `get_hostgroups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/hostgroups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StringListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_hosts_in_group(self, ctm, hostgroup, **kwargs): # noqa: E501
"""get hostgroup agents # noqa: E501
Get the agents that compose the specified hostgroup # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hosts_in_group(ctm, hostgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:return: AgentsInGroupListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_hosts_in_group_with_http_info(ctm, hostgroup, **kwargs) # noqa: E501
else:
(data) = self.get_hosts_in_group_with_http_info(ctm, hostgroup, **kwargs) # noqa: E501
return data
def get_hosts_in_group_with_http_info(self, ctm, hostgroup, **kwargs): # noqa: E501
"""get hostgroup agents # noqa: E501
Get the agents that compose the specified hostgroup # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hosts_in_group_with_http_info(ctm, hostgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:return: AgentsInGroupListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'hostgroup'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_hosts_in_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `get_hosts_in_group`") # noqa: E501
# verify the required parameter 'hostgroup' is set
if ('hostgroup' not in params or
params['hostgroup'] is None):
raise ValueError("Missing the required parameter `hostgroup` when calling `get_hosts_in_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'hostgroup' in params:
path_params['hostgroup'] = params['hostgroup'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/hostgroup/{hostgroup}/agents', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentsInGroupListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_remote_host_properties(self, ctm, remotehost, **kwargs): # noqa: E501
"""get a remote host configuration from Control-M Server # noqa: E501
Get the remote host configuration properties from the Control-M Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_remote_host_properties(ctm, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host. (required)
:return: AddRemoteHostParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_remote_host_properties_with_http_info(ctm, remotehost, **kwargs) # noqa: E501
else:
(data) = self.get_remote_host_properties_with_http_info(ctm, remotehost, **kwargs) # noqa: E501
return data
def get_remote_host_properties_with_http_info(self, ctm, remotehost, **kwargs): # noqa: E501
"""get a remote host configuration from Control-M Server # noqa: E501
Get the remote host configuration properties from the Control-M Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_remote_host_properties_with_http_info(ctm, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host. (required)
:return: AddRemoteHostParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'remotehost'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_remote_host_properties" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `get_remote_host_properties`") # noqa: E501
# verify the required parameter 'remotehost' is set
if ('remotehost' not in params or
params['remotehost'] is None):
raise ValueError("Missing the required parameter `remotehost` when calling `get_remote_host_properties`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'remotehost' in params:
path_params['remotehost'] = params['remotehost'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/remotehost/{remotehost}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AddRemoteHostParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_remote_hosts(self, ctm, **kwargs): # noqa: E501
"""get Control-M Server remote hosts # noqa: E501
Get all the remote hosts of the specified Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_remote_hosts(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server to query. (required)
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_remote_hosts_with_http_info(ctm, **kwargs) # noqa: E501
else:
(data) = self.get_remote_hosts_with_http_info(ctm, **kwargs) # noqa: E501
return data
def get_remote_hosts_with_http_info(self, ctm, **kwargs): # noqa: E501
"""get Control-M Server remote hosts # noqa: E501
Get all the remote hosts of the specified Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_remote_hosts_with_http_info(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server to query. (required)
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_remote_hosts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `get_remote_hosts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/remotehosts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StringListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_role(self, role, **kwargs): # noqa: E501
"""Get Authorization Role # noqa: E501
Get Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name. (required)
:return: RoleData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_role_with_http_info(role, **kwargs) # noqa: E501
else:
(data) = self.get_role_with_http_info(role, **kwargs) # noqa: E501
return data
def get_role_with_http_info(self, role, **kwargs): # noqa: E501
"""Get Authorization Role # noqa: E501
Get Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_with_http_info(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name. (required)
:return: RoleData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `get_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/role/{role}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoleData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_role_associates(self, role, **kwargs): # noqa: E501
"""Get all authorization entities associated with role # noqa: E501
Get all authorization entities associated with role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_associates(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: role name. (required)
:return: list[AssociateData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_role_associates_with_http_info(role, **kwargs) # noqa: E501
else:
(data) = self.get_role_associates_with_http_info(role, **kwargs) # noqa: E501
return data
def get_role_associates_with_http_info(self, role, **kwargs): # noqa: E501
"""Get all authorization entities associated with role # noqa: E501
Get all authorization entities associated with role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_associates_with_http_info(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: role name. (required)
:return: list[AssociateData]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_role_associates" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `get_role_associates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/role/{role}/associates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[AssociateData]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_run_as_user(self, ctm, agent, user, **kwargs): # noqa: E501
"""Get Run-as user # noqa: E501
Get Run-as user details from Control-M server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_run_as_user(ctm, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str agent: The Control-M Agent (required)
:param str user: The user name (required)
:return: RunAsUserData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_run_as_user_with_http_info(ctm, agent, user, **kwargs) # noqa: E501
else:
(data) = self.get_run_as_user_with_http_info(ctm, agent, user, **kwargs) # noqa: E501
return data
def get_run_as_user_with_http_info(self, ctm, agent, user, **kwargs): # noqa: E501
"""Get Run-as user # noqa: E501
Get Run-as user details from Control-M server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_run_as_user_with_http_info(ctm, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str agent: The Control-M Agent (required)
:param str user: The user name (required)
:return: RunAsUserData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent', 'user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_run_as_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `get_run_as_user`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `get_run_as_user`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `get_run_as_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/runasuser/{agent}/{user}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RunAsUserData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_run_as_users_list(self, ctm, **kwargs): # noqa: E501
"""Get Run-as user list that match the requested search criteria. # noqa: E501
Get Run-as user list that match the requested search criteria from Control-M server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_run_as_users_list(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str user: The Run-as user.
:param str agent: The agent.
:return: RunAsUsersList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_run_as_users_list_with_http_info(ctm, **kwargs) # noqa: E501
else:
(data) = self.get_run_as_users_list_with_http_info(ctm, **kwargs) # noqa: E501
return data
def get_run_as_users_list_with_http_info(self, ctm, **kwargs): # noqa: E501
"""Get Run-as user list that match the requested search criteria. # noqa: E501
Get Run-as user list that match the requested search criteria from Control-M server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_run_as_users_list_with_http_info(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str user: The Run-as user.
:param str agent: The agent.
:return: RunAsUsersList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'user', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_run_as_users_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `get_run_as_users_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
if 'user' in params:
query_params.append(('user', params['user'])) # noqa: E501
if 'agent' in params:
query_params.append(('agent', params['agent'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/runasusers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RunAsUsersList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_server_parameters(self, ctm, **kwargs): # noqa: E501
"""get Control-M Server parameters # noqa: E501
Get all the parameters of the specified Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_server_parameters(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server to query. (required)
:return: KeyValueListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_server_parameters_with_http_info(ctm, **kwargs) # noqa: E501
else:
(data) = self.get_server_parameters_with_http_info(ctm, **kwargs) # noqa: E501
return data
def get_server_parameters_with_http_info(self, ctm, **kwargs): # noqa: E501
"""get Control-M Server parameters # noqa: E501
Get all the parameters of the specified Control-M Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_server_parameters_with_http_info(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server to query. (required)
:return: KeyValueListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_server_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `get_server_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/params', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='KeyValueListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_servers(self, **kwargs): # noqa: E501
"""get all the Control-M Servers name and hostname in the system # noqa: E501
Get the names and hostnames of all Control-M Servers in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_servers(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: CtmDetailsList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_servers_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_servers_with_http_info(**kwargs) # noqa: E501
return data
def get_servers_with_http_info(self, **kwargs): # noqa: E501
"""get all the Control-M Servers name and hostname in the system # noqa: E501
Get the names and hostnames of all Control-M Servers in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_servers_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: CtmDetailsList
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_servers" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/servers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CtmDetailsList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user(self, user, **kwargs): # noqa: E501
"""Get user # noqa: E501
Get user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: The user name. (required)
:return: UserData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_with_http_info(user, **kwargs) # noqa: E501
else:
(data) = self.get_user_with_http_info(user, **kwargs) # noqa: E501
return data
def get_user_with_http_info(self, user, **kwargs): # noqa: E501
"""Get user # noqa: E501
Get user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_with_http_info(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: The user name. (required)
:return: UserData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `get_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/{user}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_secrets(self, **kwargs): # noqa: E501
"""Get list of secret names # noqa: E501
Get the list of names of all the secrets in the vault # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_secrets(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_secrets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_secrets_with_http_info(**kwargs) # noqa: E501
return data
def list_secrets_with_http_info(self, **kwargs): # noqa: E501
"""Get list of secret names # noqa: E501
Get the list of names of all the secrets in the vault # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_secrets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_secrets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/secrets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StringListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ping_agent(self, ctm, agent, **kwargs): # noqa: E501
"""ping to the agent in the Control-M Server # noqa: E501
Ping a Control-M agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ping_agent(ctm, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str agent: The Control-M agent. (required)
:param PingAgentParams body:
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.ping_agent_with_http_info(ctm, agent, **kwargs) # noqa: E501
else:
(data) = self.ping_agent_with_http_info(ctm, agent, **kwargs) # noqa: E501
return data
def ping_agent_with_http_info(self, ctm, agent, **kwargs): # noqa: E501
"""ping to the agent in the Control-M Server # noqa: E501
Ping a Control-M agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ping_agent_with_http_info(ctm, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str agent: The Control-M agent. (required)
:param PingAgentParams body:
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ping_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `ping_agent`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `ping_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/agent/{agent}/ping', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_controlm_server(self, ctm, **kwargs): # noqa: E501
"""Delete Control-M server # noqa: E501
Delete Control-M server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_controlm_server(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: Control-M Server host name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_controlm_server_with_http_info(ctm, **kwargs) # noqa: E501
else:
(data) = self.remove_controlm_server_with_http_info(ctm, **kwargs) # noqa: E501
return data
def remove_controlm_server_with_http_info(self, ctm, **kwargs): # noqa: E501
"""Delete Control-M server # noqa: E501
Delete Control-M server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_controlm_server_with_http_info(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: Control-M Server host name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_controlm_server" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `remove_controlm_server`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_role_from_user(self, user, role, **kwargs): # noqa: E501
"""Remove a role from a user # noqa: E501
Remove a role from a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_role_from_user(user, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: Name of user (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_role_from_user_with_http_info(user, role, **kwargs) # noqa: E501
else:
(data) = self.remove_role_from_user_with_http_info(user, role, **kwargs) # noqa: E501
return data
def remove_role_from_user_with_http_info(self, user, role, **kwargs): # noqa: E501
"""Remove a role from a user # noqa: E501
Remove a role from a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_role_from_user_with_http_info(user, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: Name of user (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_role_from_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `remove_role_from_user`") # noqa: E501
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `remove_role_from_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/{user}/role/{role}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_agent_parameter(self, ctm, agent, name, body, **kwargs): # noqa: E501
"""set agent parameter # noqa: E501
Set the value of the specified parameter in the specified agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_agent_parameter(ctm, agent, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is connected to. (required)
:param str agent: The name of the agent to update. (required)
:param str name: The parameter name. (required)
:param Value body: The new parameter value. (required)
:return: KeyValue
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_agent_parameter_with_http_info(ctm, agent, name, body, **kwargs) # noqa: E501
else:
(data) = self.set_agent_parameter_with_http_info(ctm, agent, name, body, **kwargs) # noqa: E501
return data
def set_agent_parameter_with_http_info(self, ctm, agent, name, body, **kwargs): # noqa: E501
"""set agent parameter # noqa: E501
Set the value of the specified parameter in the specified agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_agent_parameter_with_http_info(ctm, agent, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server the agent is connected to. (required)
:param str agent: The name of the agent to update. (required)
:param str name: The parameter name. (required)
:param Value body: The new parameter value. (required)
:return: KeyValue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent', 'name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_agent_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `set_agent_parameter`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `set_agent_parameter`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_agent_parameter`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_agent_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/agent/{agent}/param/{name}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='KeyValue', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_system_param(self, name, new_value, **kwargs): # noqa: E501
"""set value of a an em system parameter # noqa: E501
Set value of an enterprise management system parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_system_param(name, new_value, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Parameter name (required)
:param Value new_value: Param new value (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_system_param_with_http_info(name, new_value, **kwargs) # noqa: E501
else:
(data) = self.set_system_param_with_http_info(name, new_value, **kwargs) # noqa: E501
return data
def set_system_param_with_http_info(self, name, new_value, **kwargs): # noqa: E501
"""set value of a an em system parameter # noqa: E501
Set value of an enterprise management system parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_system_param_with_http_info(name, new_value, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Parameter name (required)
:param Value new_value: Param new value (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'new_value'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_system_param" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_system_param`") # noqa: E501
# verify the required parameter 'new_value' is set
if ('new_value' not in params or
params['new_value'] is None):
raise ValueError("Missing the required parameter `new_value` when calling `set_system_param`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'new_value' in params:
body_params = params['new_value']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/em/param/{name}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def setasprimary(self, ctm, **kwargs): # noqa: E501
"""Set secondary server as Primary on a specified Control-M Server # noqa: E501
Set secondary server as Primary on a specified Control-M Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.setasprimary(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.setasprimary_with_http_info(ctm, **kwargs) # noqa: E501
else:
(data) = self.setasprimary_with_http_info(ctm, **kwargs) # noqa: E501
return data
def setasprimary_with_http_info(self, ctm, **kwargs): # noqa: E501
"""Set secondary server as Primary on a specified Control-M Server # noqa: E501
Set secondary server as Primary on a specified Control-M Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.setasprimary_with_http_info(ctm, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method setasprimary" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `setasprimary`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/setasprimary', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_run_as_user(self, ctm, agent, user, **kwargs): # noqa: E501
"""Test existed Run-as user # noqa: E501
Test existing Run-as user in Control-M server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_run_as_user(ctm, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str agent: The Control-M Agent (required)
:param str user: The user name (required)
:param RunAsUserDetailsData run_as_user_details_data: Run as user details data
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.test_run_as_user_with_http_info(ctm, agent, user, **kwargs) # noqa: E501
else:
(data) = self.test_run_as_user_with_http_info(ctm, agent, user, **kwargs) # noqa: E501
return data
def test_run_as_user_with_http_info(self, ctm, agent, user, **kwargs): # noqa: E501
"""Test existed Run-as user # noqa: E501
Test existing Run-as user in Control-M server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_run_as_user_with_http_info(ctm, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str agent: The Control-M Agent (required)
:param str user: The user name (required)
:param RunAsUserDetailsData run_as_user_details_data: Run as user details data
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent', 'user', 'run_as_user_details_data'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_run_as_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `test_run_as_user`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `test_run_as_user`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `test_run_as_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'run_as_user_details_data' in params:
body_params = params['run_as_user_details_data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/runasuser/{agent}/{user}/test', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_role(self, role, role_file, **kwargs): # noqa: E501
"""Update Authorization Role # noqa: E501
Update Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_role(role, role_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name. (required)
:param file role_file: File with contenet of Role Data. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_role_with_http_info(role, role_file, **kwargs) # noqa: E501
else:
(data) = self.update_role_with_http_info(role, role_file, **kwargs) # noqa: E501
return data
def update_role_with_http_info(self, role, role_file, **kwargs): # noqa: E501
"""Update Authorization Role # noqa: E501
Update Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_role_with_http_info(role, role_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name. (required)
:param file role_file: File with contenet of Role Data. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role', 'role_file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `update_role`") # noqa: E501
# verify the required parameter 'role_file' is set
if ('role_file' not in params or
params['role_file'] is None):
raise ValueError("Missing the required parameter `role_file` when calling `update_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'role_file' in params:
local_var_files['roleFile'] = params['role_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/role/{role}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_run_as_user(self, ctm, agent, user, run_as_user_details_data, **kwargs): # noqa: E501
"""Update Run-as user # noqa: E501
Update Run-as user details in Control-M server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_run_as_user(ctm, agent, user, run_as_user_details_data, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str agent: The Control-M Agent (required)
:param str user: The user name (required)
:param RunAsUserDetailsData run_as_user_details_data: Run as user details data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_run_as_user_with_http_info(ctm, agent, user, run_as_user_details_data, **kwargs) # noqa: E501
else:
(data) = self.update_run_as_user_with_http_info(ctm, agent, user, run_as_user_details_data, **kwargs) # noqa: E501
return data
def update_run_as_user_with_http_info(self, ctm, agent, user, run_as_user_details_data, **kwargs): # noqa: E501
"""Update Run-as user # noqa: E501
Update Run-as user details in Control-M server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_run_as_user_with_http_info(ctm, agent, user, run_as_user_details_data, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ctm: The Control-M Server. (required)
:param str agent: The Control-M Agent (required)
:param str user: The user name (required)
:param RunAsUserDetailsData run_as_user_details_data: Run as user details data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ctm', 'agent', 'user', 'run_as_user_details_data'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_run_as_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ctm' is set
if ('ctm' not in params or
params['ctm'] is None):
raise ValueError("Missing the required parameter `ctm` when calling `update_run_as_user`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `update_run_as_user`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `update_run_as_user`") # noqa: E501
# verify the required parameter 'run_as_user_details_data' is set
if ('run_as_user_details_data' not in params or
params['run_as_user_details_data'] is None):
raise ValueError("Missing the required parameter `run_as_user_details_data` when calling `update_run_as_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ctm' in params:
path_params['ctm'] = params['ctm'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'run_as_user_details_data' in params:
body_params = params['run_as_user_details_data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{ctm}/runasuser/{agent}/{user}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_secret(self, name, **kwargs): # noqa: E501
"""Update an existing secret # noqa: E501
Update an existing secret in the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_secret(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the secret to update (required)
:param SecretValue value: The new value for the secret
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_secret_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.update_secret_with_http_info(name, **kwargs) # noqa: E501
return data
def update_secret_with_http_info(self, name, **kwargs): # noqa: E501
"""Update an existing secret # noqa: E501
Update an existing secret in the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_secret_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the secret to update (required)
:param SecretValue value: The new value for the secret
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'value'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_secret" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `update_secret`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'value' in params:
body_params = params['value']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/secret/{name}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_user(self, user, user_file, **kwargs): # noqa: E501
"""Update user # noqa: E501
Update user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user(user, user_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: The user name. (required)
:param file user_file: File with contenet of user data. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_user_with_http_info(user, user_file, **kwargs) # noqa: E501
else:
(data) = self.update_user_with_http_info(user, user_file, **kwargs) # noqa: E501
return data
def update_user_with_http_info(self, user, user_file, **kwargs): # noqa: E501
"""Update user # noqa: E501
Update user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user_with_http_info(user, user_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: The user name. (required)
:param file user_file: File with contenet of user data. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user', 'user_file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `update_user`") # noqa: E501
# verify the required parameter 'user_file' is set
if ('user_file' not in params or
params['user_file'] is None):
raise ValueError("Missing the required parameter `user_file` when calling `update_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'user_file' in params:
local_var_files['userFile'] = params['user_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/{user}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.371726 | 210 | 0.603153 | 24,624 | 209,610 | 4.910291 | 0.012264 | 0.051873 | 0.023621 | 0.030369 | 0.986924 | 0.983575 | 0.977885 | 0.971516 | 0.96557 | 0.96031 | 0 | 0.016227 | 0.305567 | 209,610 | 5,191 | 211 | 40.379503 | 0.814434 | 0.335628 | 0 | 0.820864 | 1 | 0 | 0.193105 | 0.049925 | 0 | 0 | 0 | 0 | 0 | 1 | 0.036178 | false | 0.003512 | 0.001405 | 0 | 0.091675 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
90ba137d63321b3fef207fe8a44bd26393e767cb | 1,139 | py | Python | tests/test_dataframe.py | opardal/pySGS | 8887bddcb9aa39ec4fc8620e471d553135d5b6be | [
"MIT"
] | 66 | 2018-02-02T16:23:11.000Z | 2022-01-29T21:34:40.000Z | tests/test_dataframe.py | opardal/pySGS | 8887bddcb9aa39ec4fc8620e471d553135d5b6be | [
"MIT"
] | 22 | 2018-05-28T00:01:30.000Z | 2021-03-20T16:25:08.000Z | tests/test_dataframe.py | opardal/pySGS | 8887bddcb9aa39ec4fc8620e471d553135d5b6be | [
"MIT"
] | 16 | 2019-01-13T16:07:45.000Z | 2021-11-19T13:18:01.000Z | import pytest
from sgs.dataframe import dataframe
@pytest.mark.dataframe
def test_dataframe_one_ts():
df = dataframe(4, start="02/01/2018", end="31/01/2018")
assert df.shape == (20, 1)
@pytest.mark.dataframe
def test_dataframe_multiple_ts():
ts_codes = [12, 433]
df = dataframe(ts_codes, start="02/01/2018", end="31/01/2018")
assert df.shape == (23, 2)
@pytest.mark.dataframe
def test_dataframe_one_with_strict_as_false():
df = dataframe(20577, start='17/08/2019', end='18/08/2019')
assert df.shape == (1, 1)
@pytest.mark.dataframe
def test_dataframe_one_with_strict_as_true():
df = dataframe(20577, start='17/08/2019', end='18/08/2019', strict=True)
assert df.shape == (0, 1)
@pytest.mark.dataframe
def test_dataframe_multiple_with_strict_as_false():
ts_codes = [20577,20669]
df = dataframe(ts_codes, start='17/08/2019', end='18/08/2019')
assert df.shape == (1, 2)
@pytest.mark.dataframe
def test_dataframe_multiple_with_strict_as_true():
ts_codes = [20577,20669]
df = dataframe(ts_codes, start='17/08/2019', end='18/08/2019', strict=True)
assert df.shape == (0, 2)
| 29.973684 | 79 | 0.695347 | 182 | 1,139 | 4.153846 | 0.21978 | 0.063492 | 0.150794 | 0.174603 | 0.875661 | 0.851852 | 0.851852 | 0.797619 | 0.738095 | 0.738095 | 0 | 0.15036 | 0.147498 | 1,139 | 37 | 80 | 30.783784 | 0.628218 | 0 | 0 | 0.275862 | 0 | 0 | 0.105356 | 0 | 0 | 0 | 0 | 0 | 0.206897 | 1 | 0.206897 | false | 0 | 0.068966 | 0 | 0.275862 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
2918ddb479a76ab52bcac2b8b12832c4294f1d19 | 10,396 | py | Python | test/programytest/parser/template/node_tests/test_interval.py | ItsPhant/program-y | c2b211fcaf8cedc7d6d95a8ea9470a913efa1622 | [
"MIT"
] | null | null | null | test/programytest/parser/template/node_tests/test_interval.py | ItsPhant/program-y | c2b211fcaf8cedc7d6d95a8ea9470a913efa1622 | [
"MIT"
] | null | null | null | test/programytest/parser/template/node_tests/test_interval.py | ItsPhant/program-y | c2b211fcaf8cedc7d6d95a8ea9470a913efa1622 | [
"MIT"
] | 1 | 2020-02-21T17:58:05.000Z | 2020-02-21T17:58:05.000Z | import xml.etree.ElementTree as ET
from programy.parser.template.nodes.base import TemplateNode
from programy.parser.template.nodes.interval import TemplateIntervalNode
from programy.parser.template.nodes.word import TemplateWordNode
from programytest.parser.template.base import TemplateTestsBaseClass
class MockTemplateIntervalNode(TemplateIntervalNode):
def __init__(self):
TemplateIntervalNode.__init__(self)
def resolve_to_string(self, bot, clientid):
raise Exception("This is an error")
class TemplateIntervalNodeTests(TemplateTestsBaseClass):
def test_node_years(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
self.assertEqual("[INTERVAL]", node.to_string())
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("years")
node.interval_from = TemplateWordNode("Thu Oct 06 16:35:11 2014")
node.interval_to = TemplateWordNode("Fri Oct 07 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "2")
def test_node_months(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("months")
node.interval_from = TemplateWordNode("Thu Jul 6 16:35:11 2014")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "3")
def test_node_weeks(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("weeks")
node.interval_from = TemplateWordNode("Thu Oct 1 16:35:11 2016")
node.interval_to = TemplateWordNode("Fri Oct 14 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "1")
def test_node_days(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("days")
node.interval_from = TemplateWordNode("Thu Oct 6 16:35:11 2016")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "1")
def test_node_hours(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("hours")
node.interval_from = TemplateWordNode("Thu Oct 7 12:35:11 2016")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "4")
def test_node_minutes(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("minutes")
node.interval_from = TemplateWordNode("Thu Oct 7 16:33:09 2016")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "2")
def test_node_seconds(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("seconds")
node.interval_from = TemplateWordNode("Thu Oct 7 16:35:09 2016")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "2")
def test_node_microseconds(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("microseconds")
node.interval_from = TemplateWordNode("Thu Oct 7 16:35:09 2016")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:09 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "0")
def test_node_ymd(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("ymd")
node.interval_from = TemplateWordNode("Thu Jul 14 16:33:09 2014")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "2 years, 2 months, 23 days")
def test_node_hms(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("hms")
node.interval_from = TemplateWordNode("Thu Jul 14 16:33:09 2014")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "0 hours, 2 minutes, 2 seconds")
def test_node_unknown(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("unknown")
node.interval_from = TemplateWordNode("Thu Jul 14 16:33:09 2014")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "")
def test_node_ymdhms(self):
root = TemplateNode()
self.assertIsNotNone(root)
self.assertIsNotNone(root.children)
self.assertEqual(len(root.children), 0)
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("ymdhms")
node.interval_from = TemplateWordNode("Thu Jul 14 16:33:09 2014")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:11 2016")
self.assertIsNotNone(node)
root.append(node)
self.assertEqual(len(root.children), 1)
response = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(response)
self.assertEqual(response, "2 years, 2 months, 23 days, 0 hours, 2 minutes, 2 seconds")
def test_to_xml(self):
root = TemplateNode()
node = TemplateIntervalNode()
node.interval_format = TemplateWordNode("%c")
node.style = TemplateWordNode("years")
node.interval_from = TemplateWordNode("Thu Oct 6 16:35:11 2014")
node.interval_to = TemplateWordNode("Fri Oct 7 16:35:11 2016")
root.append(node)
xml = root.xml_tree(self.bot, self.clientid)
self.assertIsNotNone(xml)
xml_str = ET.tostring(xml, "utf-8").decode("utf-8")
self.assertEqual('<template><interval format="%c" style="years"><from>Thu Oct 6 16:35:11 2014</from><to>Fri Oct 7 16:35:11 2016</to></interval></template>', xml_str)
def test_node_exception_handling(self):
root = TemplateNode()
node = MockTemplateIntervalNode()
root.append(node)
result = root.resolve(self.bot, self.clientid)
self.assertIsNotNone(result)
self.assertEquals("", result) | 35.240678 | 173 | 0.6591 | 1,156 | 10,396 | 5.856401 | 0.079585 | 0.140325 | 0.081536 | 0.077991 | 0.863959 | 0.850222 | 0.833087 | 0.822304 | 0.800591 | 0.800591 | 0 | 0.045307 | 0.229319 | 10,396 | 295 | 174 | 35.240678 | 0.799675 | 0 | 0 | 0.721973 | 0 | 0.004484 | 0.096759 | 0.005194 | 0 | 0 | 0 | 0 | 0.399103 | 1 | 0.071749 | false | 0 | 0.022422 | 0 | 0.103139 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
29204f6231406c7e3ab126ac77b7bf9e697b6b18 | 135 | py | Python | Factorial/Python/aesptux/factorial_test.py | Mynogs/Algorithm-Implementations | 13a74821fc1f0f7becaa9fb63b98e94134936bdb | [
"MIT"
] | 1,184 | 2015-01-01T14:11:29.000Z | 2022-03-21T19:40:47.000Z | Factorial/Python/aesptux/factorial_test.py | Mynogs/Algorithm-Implementations | 13a74821fc1f0f7becaa9fb63b98e94134936bdb | [
"MIT"
] | 89 | 2015-01-01T15:49:17.000Z | 2021-12-05T19:11:38.000Z | Factorial/Python/aesptux/factorial_test.py | Mynogs/Algorithm-Implementations | 13a74821fc1f0f7becaa9fb63b98e94134936bdb | [
"MIT"
] | 388 | 2015-01-02T03:26:17.000Z | 2022-03-24T14:36:10.000Z | import factorial
print factorial.factorial_recursive(0)
print factorial.factorial_recursive(1)
print factorial.factorial_recursive(5)
| 22.5 | 38 | 0.866667 | 17 | 135 | 6.705882 | 0.411765 | 0.368421 | 0.605263 | 0.842105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02381 | 0.066667 | 135 | 5 | 39 | 27 | 0.880952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.25 | null | null | 0.75 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
293634d6bcd4a4035e850f96de7d9956f72098f4 | 205 | py | Python | mobiles/admin.py | saikatsingha/crawl_amazon | 0688e74a3ceef91abe805780d5f951f66ced75c4 | [
"MIT"
] | null | null | null | mobiles/admin.py | saikatsingha/crawl_amazon | 0688e74a3ceef91abe805780d5f951f66ced75c4 | [
"MIT"
] | null | null | null | mobiles/admin.py | saikatsingha/crawl_amazon | 0688e74a3ceef91abe805780d5f951f66ced75c4 | [
"MIT"
] | null | null | null | from django.contrib import admin
# Register your models here.
from .models import Product_type
from .models import Product_details
admin.site.register(Product_type)
admin.site.register(Product_details)
| 20.5 | 36 | 0.829268 | 29 | 205 | 5.724138 | 0.448276 | 0.120482 | 0.192771 | 0.277108 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.107317 | 205 | 9 | 37 | 22.777778 | 0.907104 | 0.126829 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
293f0854e52662ebc93aa398a10782cb11a4f356 | 49,564 | py | Python | DMPage.py | JunHong-1998/Tkinter-MCG-Calculator | 3edecd1d7fb9c9ee1207d99081bf4100c207a94f | [
"CC0-1.0"
] | null | null | null | DMPage.py | JunHong-1998/Tkinter-MCG-Calculator | 3edecd1d7fb9c9ee1207d99081bf4100c207a94f | [
"CC0-1.0"
] | null | null | null | DMPage.py | JunHong-1998/Tkinter-MCG-Calculator | 3edecd1d7fb9c9ee1207d99081bf4100c207a94f | [
"CC0-1.0"
] | null | null | null | from tkinter import *
from DMExp import*
import pygame
import time
import random
class DMPage(Frame):
MUTE = False
INFO = False
DIM = 0
def __init__(self, parent, controller):
Frame.__init__(self, parent)
pygame.mixer.init()
DBMWall = PhotoImage(file="DoubleMatrix.png")
DBLabel = Label(self, image=DBMWall)
DBLabel.image = DBMWall
DBLabel.place(x=-2, y=-2)
Info = PhotoImage(file="FrozenPop.png")
InfoPop = Label(self, image=Info)
InfoPop.image = Info
InfoPop.place(x=-2, y=-2)
InfoPop.lower()
Back = PhotoImage(file="FrozenBack.png")
BackBtn = Button(self, image=Back, bd=0, bg='#2357b5', command=lambda: BackAct())
BackBtn.image = Back
BackBtn.place(x=-2, y=-2)
Info = PhotoImage(file="FrozenInfo.png")
InfoBtn = Button(self, image=Info, bd=0, bg='#2357b5', command=lambda: InfoAct())
InfoBtn.image = Info
InfoBtn.place(x=48, y=-2)
Music = PhotoImage(file="FrozenMusic.png")
MusicBtn = Button(self, image=Music, bd=0, bg='#2357b5', command=lambda: MuteAct())
MusicBtn.image = Music
MusicBtn.place(x=98, y=-2)
MusicOff = PhotoImage(file="FrozenMusicOff.png")
MuteOff = Button(self, image=MusicOff, bd=0, bg='#2357b5', command=lambda: MuteAct())
MuteOff.image = MusicOff
MuteOff.place(x=98, y=-2)
MuteOff.lower()
Random = PhotoImage(file="FrozenRandom.png")
RandBtn = Button(self, image=Random, bd=0, bg="#2357b5", command=lambda: RandAct())
RandBtn.image = Random
RandBtn.place(x=148, y=-2)
Reset = PhotoImage(file="FrozenClear.png")
ResetBtn = Button(self, image=Reset, bd=0, bg="#2357b5", command=lambda: ResetAct())
ResetBtn.image = Reset
ResetBtn.place(x=198, y=-2)
Multiply = PhotoImage(file="MultiplyButton.png")
MultiplyBtn = Button(self, image=Multiply, bd=0, bg="#ff82fe", command=lambda: MultAct())
MultiplyBtn.image = Multiply
MultiplyBtn.place(x=572, y=149)
Plus = PhotoImage(file="PlusButton.png")
PlusBtn = Button(self, image=Plus, bd=0, bg="#ff82fe", command=lambda: PlusAct())
PlusBtn.image = Plus
PlusBtn.place(x=572, y=240)
Minus = PhotoImage(file="MinusButton.png")
MinusBtn = Button(self, image=Minus, bd=0, bg="#ff82fe", command=lambda: MinuAct())
MinusBtn.image = Minus
MinusBtn.place(x=572, y=331)
Reverse = PhotoImage(file="ReverseButton.png")
ReverseBtn = Button(self, image=Reverse, bd=0, bg="#8cb5ea", command=lambda: RevrAct())
ReverseBtn.image = Reverse
ReverseBtn.place(x=646, y=59)
def validate(string):
regex = re.compile(r"(\+|\-)?[0-9.]*$")
result = regex.match(string)
return (string == ""
or (string.count('+') <= 1
and string.count('-') <= 1
and string.count('.') <= 1
and result is not None
and result.group(0) != ""))
def on_validate(P):
return validate(P)
AEnt = Entry(self, background="#8cb5ea", font="-family {Segoe Print} -size 16", justify="center",validate="key")
AEnt.config(validatecommand=(AEnt.register(on_validate), '%P'))
AEnt.place(x=573, y=73, width=50, height=50)
BEnt = Entry(self, background="#8cb5ea", font="-family {Segoe Print} -size 16", justify="center",validate="key")
BEnt.config(validatecommand=(AEnt.register(on_validate), '%P'))
BEnt.place(x=741, y=73, width=50, height=50)
MA1aEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA1aEnt.config(validatecommand=(MA1aEnt.register(on_validate), '%P'))
MA1aEnt.place(x=193, y=116, width=50, height=50)
MA1bEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA1bEnt.config(validatecommand=(MA1bEnt.register(on_validate), '%P'))
MA1bEnt.place(x=273, y=116, width=50, height=50)
MA1cEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA1cEnt.config(validatecommand=(MA1cEnt.register(on_validate), '%P'))
MA1cEnt.place(x=353, y=116, width=50, height=50)
MA1dEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA1dEnt.config(validatecommand=(MA1dEnt.register(on_validate), '%P'))
MA1dEnt.place(x=433, y=116, width=50, height=50)
MA2aEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA2aEnt.config(validatecommand=(MA2aEnt.register(on_validate), '%P'))
MA2aEnt.place(x=193, y=201, width=50, height=50)
MA2bEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA2bEnt.config(validatecommand=(MA2aEnt.register(on_validate), '%P'))
MA2bEnt.place(x=273, y=201, width=50, height=50)
MA2cEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA2cEnt.config(validatecommand=(MA2cEnt.register(on_validate), '%P'))
MA2cEnt.place(x=353, y=201, width=50, height=50)
MA2dEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA2dEnt.config(validatecommand=(MA2dEnt.register(on_validate), '%P'))
MA2dEnt.place(x=433, y=201, width=50, height=50)
MA3aEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA3aEnt.config(validatecommand=(MA3aEnt.register(on_validate), '%P'))
MA3aEnt.place(x=193, y=286, width=50, height=50)
MA3bEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA3bEnt.config(validatecommand=(MA3aEnt.register(on_validate), '%P'))
MA3bEnt.place(x=273, y=286, width=50, height=50)
MA3cEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA3cEnt.config(validatecommand=(MA3cEnt.register(on_validate), '%P'))
MA3cEnt.place(x=353, y=286, width=50, height=50)
MA3dEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA3dEnt.config(validatecommand=(MA3dEnt.register(on_validate), '%P'))
MA3dEnt.place(x=433, y=286, width=50, height=50)
MA4aEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA4aEnt.config(validatecommand=(MA4aEnt.register(on_validate), '%P'))
MA4aEnt.place(x=193, y=371, width=50, height=50)
MA4bEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA4bEnt.config(validatecommand=(MA4aEnt.register(on_validate), '%P'))
MA4bEnt.place(x=273, y=371, width=50, height=50)
MA4cEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA4cEnt.config(validatecommand=(MA4cEnt.register(on_validate), '%P'))
MA4cEnt.place(x=353, y=371, width=50, height=50)
MA4dEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MA4dEnt.config(validatecommand=(MA4dEnt.register(on_validate), '%P'))
MA4dEnt.place(x=433, y=371, width=50, height=50)
MB1aEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB1aEnt.config(validatecommand=(MB1aEnt.register(on_validate), '%P'))
MB1aEnt.place(x=884, y=116, width=50, height=50)
MB1bEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB1bEnt.config(validatecommand=(MB1bEnt.register(on_validate), '%P'))
MB1bEnt.place(x=964, y=116, width=50, height=50)
MB1cEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB1cEnt.config(validatecommand=(MB1cEnt.register(on_validate), '%P'))
MB1cEnt.place(x=1044, y=116, width=50, height=50)
MB1dEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB1dEnt.config(validatecommand=(MB1dEnt.register(on_validate), '%P'))
MB1dEnt.place(x=1124, y=116, width=50, height=50)
MB2aEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB2aEnt.config(validatecommand=(MB2aEnt.register(on_validate), '%P'))
MB2aEnt.place(x=884, y=201, width=50, height=50)
MB2bEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB2bEnt.config(validatecommand=(MB2aEnt.register(on_validate), '%P'))
MB2bEnt.place(x=964, y=201, width=50, height=50)
MB2cEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB2cEnt.config(validatecommand=(MB2cEnt.register(on_validate), '%P'))
MB2cEnt.place(x=1044, y=201, width=50, height=50)
MB2dEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB2dEnt.config(validatecommand=(MB2dEnt.register(on_validate), '%P'))
MB2dEnt.place(x=1124, y=201, width=50, height=50)
MB3aEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB3aEnt.config(validatecommand=(MB3aEnt.register(on_validate), '%P'))
MB3aEnt.place(x=884, y=286, width=50, height=50)
MB3bEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB3bEnt.config(validatecommand=(MB3aEnt.register(on_validate), '%P'))
MB3bEnt.place(x=964, y=286, width=50, height=50)
MB3cEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB3cEnt.config(validatecommand=(MB3cEnt.register(on_validate), '%P'))
MB3cEnt.place(x=1044, y=286, width=50, height=50)
MB3dEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB3dEnt.config(validatecommand=(MB3dEnt.register(on_validate), '%P'))
MB3dEnt.place(x=1124, y=286, width=50, height=50)
MB4aEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB4aEnt.config(validatecommand=(MB4aEnt.register(on_validate), '%P'))
MB4aEnt.place(x=884, y=371, width=50, height=50)
MB4bEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB4bEnt.config(validatecommand=(MB4aEnt.register(on_validate), '%P'))
MB4bEnt.place(x=964, y=371, width=50, height=50)
MB4cEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB4cEnt.config(validatecommand=(MB4cEnt.register(on_validate), '%P'))
MB4cEnt.place(x=1044, y=371, width=50, height=50)
MB4dEnt = Entry(self, background="white", font="-family {Segoe Print} -size 16", justify="center",validate="key")
MB4dEnt.config(validatecommand=(MB4dEnt.register(on_validate), '%P'))
MB4dEnt.place(x=1124, y=371, width=50, height=50)
Ans1 = Label(self, bg='#79bcc5', anchor='c', justify="center")
Ans1.place(x=233, y=552, width=154, height=175)
Ans2 = Label(self, bg='#79bcc5', anchor='c', justify="center")
Ans2.place(x=438, y=552, width=154, height=175)
Ans3 = Label(self, bg='#79bcc5', anchor='c', justify="center")
Ans3.place(x=643, y=552, width=343, height=177)
Ans4 = Label(self, bg='#79bcc5', anchor='c', justify="center")
Ans4.place(x=1036, y=552, width=194, height=175)
AddBox = PhotoImage(file="PlusResult.png")
PlusR = Label(self, image=AddBox)
PlusR.image = AddBox
PlusR.place(x=-2, y=490)
MinusBox = PhotoImage(file="MinusResult.png")
MinusR = Label(self, image=MinusBox)
MinusR.image = MinusBox
MinusR.place(x=-2, y=490)
MultiBox = PhotoImage(file="MultiplyResult.png")
MultiR = Label(self, image=MultiBox)
MultiR.image = MultiBox
MultiR.place(x=-2, y=490)
def ResultLower():
r = (Ans1, Ans2, Ans3, Ans4, PlusR, MinusR, MultiR)
for i in range(7):
r[i].lower()
ResultLower()
def RevrAct():
MatA = (MA1aEnt.get(), MA1bEnt.get(), MA1cEnt.get(), MA1dEnt.get(),MA2aEnt.get(), MA2bEnt.get(), MA2cEnt.get(), MA2dEnt.get(),MA3aEnt.get(), MA3bEnt.get(), MA3cEnt.get(), MA3dEnt.get(),MA4aEnt.get(), MA4bEnt.get(), MA4cEnt.get(), MA4dEnt.get())
MatB = (MB1aEnt.get(), MB1bEnt.get(), MB1cEnt.get(), MB1dEnt.get(),MB2aEnt.get(), MB2bEnt.get(), MB2cEnt.get(), MB2dEnt.get(),MB3aEnt.get(), MB3bEnt.get(), MB3cEnt.get(), MB3dEnt.get(),MB4aEnt.get(), MB4bEnt.get(), MB4cEnt.get(), MB4dEnt.get())
EntA = (MA1aEnt, MA1bEnt, MA1cEnt, MA1dEnt, MA2aEnt, MA2bEnt, MA2cEnt, MA2dEnt,MA3aEnt, MA3bEnt, MA3cEnt, MA3dEnt,MA4aEnt, MA4bEnt, MA4cEnt, MA4dEnt)
EntB = (MB1aEnt, MB1bEnt, MB1cEnt, MB1dEnt, MB2aEnt, MB2bEnt, MB2cEnt, MB2dEnt,MB3aEnt, MB3bEnt, MB3cEnt, MB3dEnt,MB4aEnt, MB4bEnt, MB4cEnt, MB4dEnt)
value = (AEnt.get(), BEnt.get())
for i in range (16):
EntA[i].delete(0, END)
EntB[i].delete(0, END)
EntA[i].insert(1, MatB[i])
EntB[i].insert(1, MatA[i])
AEnt.delete(0, END)
BEnt.delete(0, END)
AEnt.insert(1, value[1])
BEnt.insert(1, value[0])
def FTest(x):
return x.lstrip('-').lstrip('+').replace('.', '', 1).isdigit()
def Avalue():
if FTest(AEnt.get()):
value = float(AEnt.get())
else:
value = 1
return value
def Bvalue():
if FTest(BEnt.get()):
value = float(BEnt.get())
else:
value = 1
return value
def TM():
if FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA1dEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MA2dEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MA3cEnt.get()) and FTest(MA3dEnt.get()) and FTest(MA4aEnt.get()) and FTest(MA4bEnt.get()) and FTest(MA4cEnt.get()) and FTest(MA4dEnt.get()) and \
FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB1dEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and FTest(MB2dEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and FTest(MB3cEnt.get()) and FTest(MB3dEnt.get()) and FTest(MB4aEnt.get()) and FTest(MB4bEnt.get()) and FTest(MB4cEnt.get()) and FTest(MB4dEnt.get()):
DMPage.DIM = 44
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA1dEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()), float(MA2dEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()),float(MA3cEnt.get()), float(MA3dEnt.get()), float(MA4aEnt.get()), float(MA4bEnt.get()), float(MA4cEnt.get()), float(MA4dEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB1dEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()), float(MB2dEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()), float(MB3cEnt.get()), float(MB3dEnt.get()),float(MB4aEnt.get()), float(MB4bEnt.get()), float(MB4cEnt.get()), float(MB4dEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MA3cEnt.get()) and FTest(MA4aEnt.get()) and FTest(MA4bEnt.get()) and FTest(MA4cEnt.get()) and not FTest(MA1dEnt.get()) and not FTest(MA2dEnt.get()) and not FTest(MA3dEnt.get()) and not FTest(MA4dEnt.get()) and\
FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and FTest(MB3cEnt.get()) and FTest(MB4aEnt.get()) and FTest(MB4bEnt.get()) and FTest(MB4cEnt.get()) and not FTest(MB1dEnt.get()) and not FTest(MB2dEnt.get()) and not FTest(MB3dEnt.get()) and not FTest(MB4dEnt.get()):
DMPage.DIM = 43
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()), float(MA3cEnt.get()), float(MA4aEnt.get()), float(MA4bEnt.get()), float(MA4cEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()), float(MB3cEnt.get()), float(MB4aEnt.get()), float(MB4bEnt.get()), float(MB4cEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MA4aEnt.get()) and FTest(MA4bEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and FTest(MB4aEnt.get()) and FTest(MB4bEnt.get()) and not FTest(MA1cEnt.get()) and not FTest(MA2cEnt.get()) and not FTest(MA3cEnt.get()) and not FTest(MA4cEnt.get()) and not FTest(MB1cEnt.get()) and not FTest(MB2cEnt.get()) and not FTest(MB3cEnt.get()) and not FTest(MB4cEnt.get()):
DMPage.DIM = 42
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()), float(MA4aEnt.get()), float(MA4bEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()), float(MB4aEnt.get()), float(MB4bEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA4aEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB4aEnt.get()) and not FTest(MA1bEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MA3bEnt.get()) and not FTest(MA4bEnt.get()) and not FTest(MB1bEnt.get()) and not FTest(MB2bEnt.get()) and not FTest(MB3bEnt.get()) and not FTest(MB4bEnt.get()):
DMPage.DIM = 41
MatA = (float(MA1aEnt.get()), float(MA2aEnt.get()), float(MA3aEnt.get()), float(MA4aEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB2aEnt.get()), float(MB3aEnt.get()), float(MB4aEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA1dEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MA2dEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MA3cEnt.get()) and FTest(MA3dEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB1dEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and FTest(MB2dEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and FTest(MB3cEnt.get()) and FTest(MB3dEnt.get()) and not FTest(MA4aEnt.get()) and not FTest(MA4bEnt.get()) and not FTest(MA4cEnt.get()) and not FTest(MA4dEnt.get()) and not FTest(MB4aEnt.get()) and not FTest(MB4bEnt.get()) and not FTest(MB4cEnt.get()) and not FTest(MB4dEnt.get()):
DMPage.DIM = 34
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA1dEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()), float(MA2dEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()), float(MA3cEnt.get()), float(MA3dEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB1dEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()), float(MB2dEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()), float(MB3cEnt.get()), float(MB3dEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MA3cEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and FTest(MB3cEnt.get()) and not FTest(MA1dEnt.get()) and not FTest(MA2dEnt.get()) and not FTest(MA3dEnt.get()) and not FTest(MA4aEnt.get()) and not FTest(MA4bEnt.get()) and not FTest(MA4cEnt.get()) and not FTest(MA4dEnt.get()) and not FTest(MB1dEnt.get()) and not FTest(MB2dEnt.get()) and not FTest(MB3dEnt.get()) and not FTest(MB4aEnt.get()) and not FTest(MB4bEnt.get()) and not FTest(MB4cEnt.get()) and not FTest(MB4dEnt.get()):
DMPage.DIM = 33
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()), float(MA3cEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()), float(MB3cEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and not FTest(MA1cEnt.get()) and not FTest(MA2cEnt.get()) and not FTest(MA3cEnt.get()) and not FTest(MA4aEnt.get()) and not FTest(MA4bEnt.get()) and not FTest(MB1cEnt.get()) and not FTest(MB2cEnt.get()) and not FTest(MB3cEnt.get()) and not FTest(MB4aEnt.get()) and not FTest(MB4bEnt.get()):
DMPage.DIM = 32
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA3aEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB3aEnt.get()) and not FTest(MA1bEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MA3bEnt.get()) and not FTest(MA4aEnt.get()) and not FTest(MB1bEnt.get()) and not FTest(MB2bEnt.get()) and not FTest(MB3bEnt.get()) and not FTest(MB4aEnt.get()):
DMPage.DIM = 31
MatA = (float(MA1aEnt.get()), float(MA2aEnt.get()), float(MA3aEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB2aEnt.get()), float(MB3aEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA1dEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MA2dEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB1dEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and FTest(MB2dEnt.get()) and not FTest(MA3aEnt.get()) and not FTest(MA3bEnt.get()) and not FTest(MA3cEnt.get()) and not FTest(MA3dEnt.get()) and not FTest(MB3aEnt.get()) and not FTest(MB3bEnt.get()) and not FTest(MB3cEnt.get()) and not FTest(MB3dEnt.get()):
DMPage.DIM = 24
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA1dEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()), float(MA2dEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB1dEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()), float(MB2dEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and not FTest(MA1dEnt.get()) and not FTest(MA2dEnt.get()) and not FTest(MA3aEnt.get()) and not FTest(MA3bEnt.get()) and not FTest(MA3cEnt.get()) and not FTest(MB1dEnt.get()) and not FTest(MB2dEnt.get()) and not FTest(MB3aEnt.get()) and not FTest(MB3bEnt.get()) and not FTest(MB3cEnt.get()):
DMPage.DIM = 23
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and not FTest(MA1cEnt.get()) and not FTest(MA2cEnt.get()) and not FTest(MA3aEnt.get()) and not FTest(MA3bEnt.get()) and not FTest(MB1cEnt.get()) and not FTest(MB2cEnt.get()) and not FTest(MB3aEnt.get()) and not FTest(MB3bEnt.get()):
DMPage.DIM = 22
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA2aEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB2aEnt.get()) and not FTest(MA1bEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MB3aEnt.get()) and not FTest(MB1bEnt.get()) and not FTest(MB2bEnt.get()) and not FTest(MB3aEnt.get()):
DMPage.DIM = 21
MatA = (float(MA1aEnt.get()), float(MA2aEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB2aEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA1dEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB1dEnt.get()) and not FTest(MA2aEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MA2cEnt.get()) and not FTest(MA2dEnt.get()) and not FTest(MB2aEnt.get()) and not FTest(MB2bEnt.get()) and not FTest(MB2cEnt.get()) and not FTest(MB2dEnt.get()):
DMPage.DIM = 14
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA1dEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB1dEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and not FTest(MA1dEnt.get()) and not FTest(MA2aEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MA2cEnt.get()) and not FTest(MB1dEnt.get()) and not FTest(MB2aEnt.get()) and not FTest(MB2bEnt.get()) and not FTest(MB2cEnt.get()):
DMPage.DIM = 13
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and not FTest(MA1cEnt.get()) and not FTest(MA2aEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MB1cEnt.get()) and not FTest(MB2aEnt.get()) and not FTest(MB2bEnt.get()):
DMPage.DIM = 12
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MB1aEnt.get()) and not FTest(MA1bEnt.get()) and not FTest(MA2aEnt.get()):
DMPage.DIM = 11
MatA = float(MA1aEnt.get())
MatB = float(MB1aEnt.get())
else:
DMPage.DIM = 0
MatA = 0
MatB = 0
TMat = DoubleMatrix(MatA, MatB, DMPage.DIM, Avalue(), Bvalue())
return TMat
def TMM():
if FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA1dEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MA2dEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MA3cEnt.get()) and FTest(MA3dEnt.get()) and FTest(MA4aEnt.get()) and FTest(MA4bEnt.get()) and FTest(MA4cEnt.get()) and FTest(MA4dEnt.get()) and \
FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB1dEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and FTest(MB2dEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and FTest(MB3cEnt.get()) and FTest(MB3dEnt.get()) and FTest(MB4aEnt.get()) and FTest(MB4bEnt.get()) and FTest(MB4cEnt.get()) and FTest(MB4dEnt.get()):
DMPage.DIM = 44
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA1dEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()), float(MA2dEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()),float(MA3cEnt.get()), float(MA3dEnt.get()), float(MA4aEnt.get()), float(MA4bEnt.get()), float(MA4cEnt.get()), float(MA4dEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB1dEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()), float(MB2dEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()), float(MB3cEnt.get()), float(MB3dEnt.get()),float(MB4aEnt.get()), float(MB4bEnt.get()), float(MB4cEnt.get()), float(MB4dEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MA3cEnt.get()) and FTest(MA4aEnt.get()) and FTest(MA4bEnt.get()) and FTest(MA4cEnt.get()) and not FTest(MA1dEnt.get()) and not FTest(MA2dEnt.get()) and not FTest(MA3dEnt.get()) and not FTest(MA4dEnt.get()) and\
FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB1dEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and FTest(MB2dEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and FTest(MB3cEnt.get()) and FTest(MB3dEnt.get()) and not FTest(MB4aEnt.get()) and not FTest(MB4bEnt.get()) and not FTest(MB4cEnt.get()) and not FTest(MB4dEnt.get()):
DMPage.DIM = 43
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()), float(MA3cEnt.get()), float(MA4aEnt.get()), float(MA4bEnt.get()), float(MA4cEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB1dEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()), float(MB2dEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()), float(MB3cEnt.get()), float(MB3dEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MA4aEnt.get()) and FTest(MA4bEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB1dEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and FTest(MB2dEnt.get()) and not FTest(MA1cEnt.get()) and not FTest(MA2cEnt.get()) and not FTest(MA3cEnt.get()) and not FTest(MA4cEnt.get()) and not FTest(MB3aEnt.get()) and not FTest(MB3bEnt.get()) and not FTest(MB3cEnt.get()) and not FTest(MB3dEnt.get()):
DMPage.DIM = 42
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()), float(MA4aEnt.get()), float(MA4bEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB1dEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()), float(MB2dEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA4aEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB1dEnt.get()) and not FTest(MA1bEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MA3bEnt.get()) and not FTest(MA4bEnt.get()) and not FTest(MB2aEnt.get()) and not FTest(MB2bEnt.get()) and not FTest(MB2cEnt.get()) and not FTest(MB2dEnt.get()):
DMPage.DIM = 41
MatA = (float(MA1aEnt.get()), float(MA2aEnt.get()), float(MA3aEnt.get()), float(MA4aEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB1dEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA1dEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MA2dEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MA3cEnt.get()) and FTest(MA3dEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and FTest(MB3cEnt.get()) and FTest(MB4aEnt.get()) and FTest(MB4bEnt.get()) and FTest(MB4cEnt.get()) and not FTest(MA4aEnt.get()) and not FTest(MA4bEnt.get()) and not FTest(MA4cEnt.get()) and not FTest(MA4dEnt.get()) and not FTest(MB1dEnt.get()) and not FTest(MB2dEnt.get()) and not FTest(MB3dEnt.get()) and not FTest(MB4dEnt.get()):
DMPage.DIM = 34
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA1dEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()), float(MA2dEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()), float(MA3cEnt.get()), float(MA3dEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()), float(MB3cEnt.get()), float(MB4aEnt.get()), float(MB4bEnt.get()), float(MB4cEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MA3cEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and FTest(MB3cEnt.get()) and not FTest(MA1dEnt.get()) and not FTest(MA2dEnt.get()) and not FTest(MA3dEnt.get()) and not FTest(MA4aEnt.get()) and not FTest(MA4bEnt.get()) and not FTest(MA4cEnt.get()) and not FTest(MA4dEnt.get()) and not FTest(MB1dEnt.get()) and not FTest(MB2dEnt.get()) and not FTest(MB3dEnt.get()) and not FTest(MB4aEnt.get()) and not FTest(MB4bEnt.get()) and not FTest(MB4cEnt.get()) and not FTest(MB4dEnt.get()):
DMPage.DIM = 33
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()), float(MA3cEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()), float(MB3cEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA3aEnt.get()) and FTest(MA3bEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB2cEnt.get()) and not FTest(MA1cEnt.get()) and not FTest(MA2cEnt.get()) and not FTest(MA3cEnt.get()) and not FTest(MA4aEnt.get()) and not FTest(MA4bEnt.get()) and not FTest(MB1dEnt.get()) and not FTest(MB2dEnt.get()) and not FTest(MB3aEnt.get()) and not FTest(MB3bEnt.get()) and not FTest(MB3cEnt.get()):
DMPage.DIM = 32
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA3aEnt.get()), float(MA3bEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB2cEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA3aEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB1cEnt.get()) and not FTest(MA1bEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MA3bEnt.get()) and not FTest(MA4aEnt.get()) and not FTest(MB1dEnt.get()) and not FTest(MB2aEnt.get()) and not FTest(MB2bEnt.get()) and not FTest(MB2cEnt.get()):
DMPage.DIM = 31
MatA = (float(MA1aEnt.get()), float(MA2aEnt.get()), float(MA3aEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB1cEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA1dEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MA2dEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and FTest(MB4aEnt.get()) and FTest(MB4bEnt.get()) and not FTest(MA3aEnt.get()) and not FTest(MA3bEnt.get()) and not FTest(MA3cEnt.get()) and not FTest(MA3dEnt.get()) and not FTest(MB1cEnt.get()) and not FTest(MB2cEnt.get()) and not FTest(MB3cEnt.get()) and not FTest(MB4cEnt.get()):
DMPage.DIM = 24
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA1dEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()), float(MA2dEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()), float(MB4aEnt.get()), float(MB4bEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MA2cEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB3bEnt.get()) and not FTest(MA1dEnt.get()) and not FTest(MA2dEnt.get()) and not FTest(MA3aEnt.get()) and not FTest(MA3bEnt.get()) and not FTest(MA3cEnt.get()) and not FTest(MB1cEnt.get()) and not FTest(MB2cEnt.get()) and not FTest(MB3cEnt.get()) and not FTest(MB4aEnt.get()) and not FTest(MB4bEnt.get()):
DMPage.DIM = 23
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()), float(MA2cEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()), float(MB3aEnt.get()), float(MB3bEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA2aEnt.get()) and FTest(MA2bEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB2bEnt.get()) and not FTest(MA1cEnt.get()) and not FTest(MA2cEnt.get()) and not FTest(MA3aEnt.get()) and not FTest(MA3bEnt.get()) and not FTest(MB1cEnt.get()) and not FTest(MB2cEnt.get()) and not FTest(MB3aEnt.get()) and not FTest(MB3bEnt.get()):
DMPage.DIM = 22
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA2aEnt.get()), float(MA2bEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()), float(MB2aEnt.get()), float(MB2bEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA2aEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB1bEnt.get()) and not FTest(MA1bEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MB3aEnt.get()) and not FTest(MB1cEnt.get()) and not FTest(MB2aEnt.get()) and not FTest(MB2bEnt.get()):
DMPage.DIM = 21
MatA = (float(MA1aEnt.get()), float(MA2aEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB1bEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MA1dEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB3aEnt.get()) and FTest(MB4aEnt.get()) and not FTest(MA2aEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MA2cEnt.get()) and not FTest(MA2dEnt.get()) and not FTest(MB1bEnt.get()) and not FTest(MB2bEnt.get()) and not FTest(MB3bEnt.get()) and not FTest(MB4bEnt.get()):
DMPage.DIM = 14
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()), float(MA1dEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB2aEnt.get()), float(MB3aEnt.get()), float(MB4aEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MA1cEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB2aEnt.get()) and FTest(MB3aEnt.get()) and not FTest(MA1dEnt.get()) and not FTest(MA2aEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MA2cEnt.get()) and not FTest(MB1bEnt.get()) and not FTest(MB2bEnt.get()) and not FTest(MB3bEnt.get()) and not FTest(MB4aEnt.get()):
DMPage.DIM = 13
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()), float(MA1cEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB2aEnt.get()), float(MB3aEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MA1bEnt.get()) and FTest(MB1aEnt.get()) and FTest(MB2aEnt.get()) and not FTest(MA1cEnt.get()) and not FTest(MA2aEnt.get()) and not FTest(MA2bEnt.get()) and not FTest(MB1bEnt.get()) and not FTest(MB2bEnt.get()) and not FTest(MB3aEnt.get()):
DMPage.DIM = 12
MatA = (float(MA1aEnt.get()), float(MA1bEnt.get()))
MatB = (float(MB1aEnt.get()), float(MB2aEnt.get()))
elif FTest(MA1aEnt.get()) and FTest(MB1aEnt.get()) and not FTest(MA1bEnt.get()) and not FTest(MA2aEnt.get()):
DMPage.DIM = 11
MatA = float(MA1aEnt.get())
MatB = float(MB1aEnt.get())
else:
DMPage.DIM = 0
MatA = 0
MatB = 0
TMat = DoubleMatrix(MatA, MatB, DMPage.DIM, Avalue(), Bvalue())
return TMat
def UpdateAns():
Ans1.update()
Ans2.update()
Ans3.update()
Ans4.update()
def AnsLift():
a = (Ans1, Ans2, Ans3, Ans4)
for i in range(4):
a[i].lift()
def PlusAct():
ResultLower()
PlusR.lift()
AnsLift()
Ans1.configure(font=("Segoe Print", 14), anchor='c', text=TM().MatA())
Ans2.configure(font=("Segoe Print", 14), anchor='c', text=TM().MatB())
Ans3.configure(font=("Segoe Print", 10), anchor='c', text=TM().AddEXP())
Ans4.configure(font=("Segoe Print", 12), anchor='c', text=TM().Add())
UpdateAns()
def MinuAct():
ResultLower()
MinusR.lift()
AnsLift()
Ans1.configure(font=("Segoe Print", 14), anchor='c', text=TM().MatA())
Ans2.configure(font=("Segoe Print", 14), anchor='c', text=TM().MatB())
Ans3.configure(font=("Segoe Print", 10), anchor='c', text=TM().MinusEXP())
Ans4.configure(font=("Segoe Print", 12), anchor='c', text=TM().Minus())
UpdateAns()
def MultAct():
ResultLower()
MultiR.lift()
AnsLift()
Ans1.configure(font=("Segoe Print", 14), anchor='c', text=TMM().MatA())
Ans2.configure(font=("Segoe Print", 14), anchor='c', text=TMM().MatBM())
Ans3.configure(font=("Arial Narrow", 7), anchor='c', text=TMM().MultiplyEXP())
Ans4.configure(font=("Menlo", 10), anchor='c', text=TMM().Multiply())
UpdateAns()
def BackAct():
BackS = pygame.mixer.Sound("FrozenBack.wav")
BackS.play()
pygame.mixer.music.load("MenuBG.ogg")
pygame.mixer.music.play(-1)
controller.show_frame("MatrixPage")
def InfoAct():
if DMPage.INFO == False:
DMPage.INFO = True
if DMPage.MUTE == False:
InfoS = pygame.mixer.Sound("FrozenInfo.wav")
InfoS.play()
InfoPop.lift()
InfoBtn.lift()
else:
DMPage.INFO = False
InfoPop.lower()
def ResetAct():
if DMPage.MUTE == False:
ClearS = pygame.mixer.Sound("FrozenReset.wav")
ClearS.play()
time.sleep(2)
EntA = (MA1aEnt, MA1bEnt, MA1cEnt, MA1dEnt, MA2aEnt, MA2bEnt, MA2cEnt, MA2dEnt, MA3aEnt, MA3bEnt, MA3cEnt, MA3dEnt,MA4aEnt, MA4bEnt, MA4cEnt, MA4dEnt)
EntB = (MB1aEnt, MB1bEnt, MB1cEnt, MB1dEnt, MB2aEnt, MB2bEnt, MB2cEnt, MB2dEnt, MB3aEnt, MB3bEnt, MB3cEnt, MB3dEnt,MB4aEnt, MB4bEnt, MB4cEnt, MB4dEnt)
for i in range(16):
EntA[i].delete(0, END)
EntB[i].delete(0, END)
AEnt.delete(0, END)
BEnt.delete(0, END)
def MuteAct():
if DMPage.MUTE == True:
DMPage.MUTE = False
pygame.mixer.music.load("FrozenBackground.ogg")
pygame.mixer.music.play(-1)
MuteOff.lower()
else:
DMPage.MUTE = True
pygame.mixer.music.stop()
MuteOff.lift()
def RandAct():
if DMPage.MUTE == False:
RandomS = pygame.mixer.Sound("FrozenRandom.wav")
RandomS.play()
runRandAct()
def runRandAct():
time.sleep(2)
EntA = (MA1aEnt, MA1bEnt, MA1cEnt, MA1dEnt, MA2aEnt, MA2bEnt, MA2cEnt, MA2dEnt, MA3aEnt, MA3bEnt, MA3cEnt, MA3dEnt,MA4aEnt, MA4bEnt, MA4cEnt, MA4dEnt)
EntB = (MB1aEnt, MB1bEnt, MB1cEnt, MB1dEnt, MB2aEnt, MB2bEnt, MB2cEnt, MB2dEnt, MB3aEnt, MB3bEnt, MB3cEnt, MB3dEnt,MB4aEnt, MB4bEnt, MB4cEnt, MB4dEnt)
for i in range(16):
EntA[i].delete(0, END)
EntB[i].delete(0, END)
x = random.randrange(1,4)
if x==1:
for i in range(2):
EntA[i].insert(1, random.randrange(-9, 10))
EntB[i].insert(1, random.randrange(-9, 10))
for i in range(4,6):
EntA[i].insert(1, random.randrange(-9, 10))
EntB[i].insert(1, random.randrange(-9, 10))
elif x==2:
for i in range(3):
EntA[i].insert(1, random.randrange(-9, 10))
EntB[i].insert(1, random.randrange(-9, 10))
for i in range(4,7):
EntA[i].insert(1, random.randrange(-9, 10))
EntB[i].insert(1, random.randrange(-9, 10))
for i in range(8,11):
EntA[i].insert(1, random.randrange(-9, 10))
EntB[i].insert(1, random.randrange(-9, 10))
else:
for i in range(16):
EntA[i].insert(1, random.randrange(-9, 10))
EntB[i].insert(1, random.randrange(-9, 10)) | 102.404959 | 870 | 0.620975 | 6,593 | 49,564 | 4.661611 | 0.043683 | 0.118696 | 0.131711 | 0.109325 | 0.824266 | 0.821859 | 0.79251 | 0.767098 | 0.75724 | 0.757044 | 0 | 0.052818 | 0.195142 | 49,564 | 484 | 871 | 102.404959 | 0.717613 | 0 | 0 | 0.384298 | 0 | 0 | 0.044538 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043388 | false | 0 | 0.010331 | 0.004132 | 0.076446 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2954d0695f1e67671f39fbd097c6641a727b6af8 | 40 | py | Python | compass/__init__.py | piecakes/compass | bd795d74154f4896e23c44e9d5bf98b5c9c7470d | [
"MIT"
] | null | null | null | compass/__init__.py | piecakes/compass | bd795d74154f4896e23c44e9d5bf98b5c9c7470d | [
"MIT"
] | null | null | null | compass/__init__.py | piecakes/compass | bd795d74154f4896e23c44e9d5bf98b5c9c7470d | [
"MIT"
] | null | null | null | from compass.compass import get_bearing
| 20 | 39 | 0.875 | 6 | 40 | 5.666667 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 40 | 1 | 40 | 40 | 0.944444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 1 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
2954d824373c774274861235b4a87e7c137226b7 | 9,445 | py | Python | test/dlc_tests/ecs/tensorflow/inference/test_ecs_tensorflow_inference.py | whn09/deep-learning-containers | 45493c141ad7bcadc4293f3efa5b1ad983f942b7 | [
"Apache-2.0"
] | 1 | 2022-03-17T00:41:27.000Z | 2022-03-17T00:41:27.000Z | test/dlc_tests/ecs/tensorflow/inference/test_ecs_tensorflow_inference.py | nam-nd-d3/deep-learning-containers | 4c03f5cf7eda3276c6a03516e6823872bd8eeb42 | [
"Apache-2.0"
] | 19 | 2022-01-12T16:00:45.000Z | 2022-03-31T14:33:29.000Z | test/dlc_tests/ecs/tensorflow/inference/test_ecs_tensorflow_inference.py | ashahba/deep-learning-containers | 48c3948b3d11f4fe2aac6bb25e5d82230d777076 | [
"Apache-2.0"
] | 1 | 2021-03-03T23:51:53.000Z | 2021-03-03T23:51:53.000Z | import pytest
import test.test_utils.ecs as ecs_utils
import test.test_utils.ec2 as ec2_utils
from test.test_utils import (
get_tensorflow_model_name,
request_tensorflow_inference,
request_tensorflow_inference_nlp,
is_nightly_context,
)
from test.test_utils import ECS_AML2_CPU_USWEST2, ECS_AML2_GPU_USWEST2, ECS_AML2_NEURON_USWEST2, ECS_AML2_GRAVITON_CPU_USWEST2
@pytest.mark.model("half_plus_two")
@pytest.mark.parametrize("ecs_instance_type", ["c5.4xlarge"], indirect=True)
@pytest.mark.parametrize("ecs_ami", [ECS_AML2_CPU_USWEST2], indirect=True)
def test_ecs_tensorflow_inference_cpu(tensorflow_inference, ecs_container_instance, region, cpu_only):
__ecs_tensorflow_inference_cpu(tensorflow_inference, ecs_container_instance, region)
@pytest.mark.model("half_plus_two")
@pytest.mark.parametrize("ecs_instance_type", ["c6g.4xlarge"], indirect=True)
@pytest.mark.parametrize("ecs_ami", [ECS_AML2_GRAVITON_CPU_USWEST2], indirect=True)
def test_ecs_tensorflow_inference_graviton_cpu(tensorflow_inference_graviton, ecs_container_instance, region, cpu_only):
__ecs_tensorflow_inference_cpu(tensorflow_inference_graviton, ecs_container_instance, region)
def __ecs_tensorflow_inference_cpu(tensorflow_inference, ecs_container_instance, region):
worker_instance_id, ecs_cluster_arn = ecs_container_instance
public_ip_address = ec2_utils.get_public_ip(worker_instance_id, region=region)
model_name = "saved_model_half_plus_two"
service_name = task_family = revision = None
try:
service_name, task_family, revision = ecs_utils.setup_ecs_inference_service(
tensorflow_inference, "tensorflow", ecs_cluster_arn, model_name, worker_instance_id, region=region
)
model_name = get_tensorflow_model_name("cpu", model_name)
inference_result = request_tensorflow_inference(model_name, ip_address=public_ip_address)
assert inference_result, f"Failed to perform inference at IP address: {public_ip_address}"
finally:
ecs_utils.tear_down_ecs_inference_service(ecs_cluster_arn, service_name, task_family, revision)
@pytest.mark.integration("elastic_inference")
@pytest.mark.model("half_plus_two")
@pytest.mark.parametrize("ecs_instance_type", ["c5.4xlarge"], indirect=True)
@pytest.mark.parametrize("ecs_ami", [ECS_AML2_CPU_USWEST2], indirect=True)
@pytest.mark.parametrize("ei_accelerator_type", ["eia1.large"], indirect=True)
def test_ecs_tensorflow_inference_eia(
tensorflow_inference_eia, ecs_container_instance, ei_accelerator_type, region
):
worker_instance_id, ecs_cluster_arn = ecs_container_instance
public_ip_address = ec2_utils.get_public_ip(worker_instance_id, region=region)
model_name = "saved_model_half_plus_two"
service_name = task_family = revision = None
try:
service_name, task_family, revision = ecs_utils.setup_ecs_inference_service(
tensorflow_inference_eia,
"tensorflow",
ecs_cluster_arn,
model_name,
worker_instance_id,
ei_accelerator_type,
region=region,
)
model_name = get_tensorflow_model_name("eia", model_name)
inference_result = request_tensorflow_inference(model_name, ip_address=public_ip_address)
assert inference_result, f"Failed to perform inference at IP address: {public_ip_address}"
finally:
ecs_utils.tear_down_ecs_inference_service(ecs_cluster_arn, service_name, task_family, revision)
@pytest.mark.model("simple")
@pytest.mark.parametrize("ecs_instance_type", ["inf1.2xlarge"], indirect=True)
@pytest.mark.parametrize("ecs_ami", [ECS_AML2_NEURON_USWEST2], indirect=True)
def test_ecs_tensorflow_inference_neuron(tensorflow_inference_neuron, ecs_container_instance, region):
worker_instance_id, ecs_cluster_arn = ecs_container_instance
public_ip_address = ec2_utils.get_public_ip(worker_instance_id, region=region)
num_neurons = ec2_utils.get_instance_num_inferentias(worker_instance_id)
model_name = "simple"
service_name = task_family = revision = None
try:
service_name, task_family, revision = ecs_utils.setup_ecs_inference_service(
tensorflow_inference_neuron,
"tensorflow",
ecs_cluster_arn,
model_name,
worker_instance_id,
num_neurons=num_neurons,
region=region,
)
model_name = get_tensorflow_model_name("neuron", model_name)
inference_result = request_tensorflow_inference(model_name, ip_address=public_ip_address, inference_string="'{\"instances\": [[1.0, 2.0, 5.0]]}'")
assert inference_result, f"Failed to perform inference at IP address: {public_ip_address}"
finally:
ecs_utils.tear_down_ecs_inference_service(ecs_cluster_arn, service_name, task_family, revision)
@pytest.mark.model("half_plus_two")
@pytest.mark.parametrize("ecs_instance_type", ["p3.8xlarge"], indirect=True)
@pytest.mark.parametrize("ecs_ami", [ECS_AML2_GPU_USWEST2], indirect=True)
def test_ecs_tensorflow_inference_gpu(tensorflow_inference, ecs_container_instance, region, gpu_only):
worker_instance_id, ecs_cluster_arn = ecs_container_instance
public_ip_address = ec2_utils.get_public_ip(worker_instance_id, region=region)
num_gpus = ec2_utils.get_instance_num_gpus(worker_instance_id)
model_name = "saved_model_half_plus_two"
service_name = task_family = revision = None
try:
service_name, task_family, revision = ecs_utils.setup_ecs_inference_service(
tensorflow_inference,
"tensorflow",
ecs_cluster_arn,
model_name,
worker_instance_id,
num_gpus=num_gpus,
region=region,
)
model_name = get_tensorflow_model_name("gpu", model_name)
inference_result = request_tensorflow_inference(model_name, ip_address=public_ip_address)
assert inference_result, f"Failed to perform inference at IP address: {public_ip_address}"
finally:
ecs_utils.tear_down_ecs_inference_service(ecs_cluster_arn, service_name, task_family, revision)
@pytest.mark.skipif(not is_nightly_context(), reason="Running additional model in nightly context only")
@pytest.mark.model("albert")
@pytest.mark.parametrize("ecs_instance_type", ["c5.4xlarge"], indirect=True)
@pytest.mark.parametrize("ecs_ami", [ECS_AML2_CPU_USWEST2], indirect=True)
def test_ecs_tensorflow_inference_cpu_nlp(tensorflow_inference, ecs_container_instance, region, cpu_only):
__ecs_tensorflow_inference_cpu_nlp(tensorflow_inference, ecs_container_instance, region)
#@pytest.mark.skipif(not is_nightly_context(), reason="Running additional model in nightly context only")
@pytest.mark.model("albert")
@pytest.mark.parametrize("ecs_instance_type", ["c6g.4xlarge"], indirect=True)
@pytest.mark.parametrize("ecs_ami", [ECS_AML2_GRAVITON_CPU_USWEST2], indirect=True)
def test_ecs_tensorflow_inference_graviton_cpu_nlp(tensorflow_inference_graviton, ecs_container_instance, region, cpu_only):
__ecs_tensorflow_inference_cpu_nlp(tensorflow_inference_graviton, ecs_container_instance, region)
def __ecs_tensorflow_inference_cpu_nlp(tensorflow_inference, ecs_container_instance, region):
worker_instance_id, ecs_cluster_arn = ecs_container_instance
public_ip_address = ec2_utils.get_public_ip(worker_instance_id, region=region)
model_name = "albert"
service_name = task_family = revision = None
try:
service_name, task_family, revision = ecs_utils.setup_ecs_inference_service(
tensorflow_inference, "tensorflow", ecs_cluster_arn, model_name, worker_instance_id, region=region
)
model_name = get_tensorflow_model_name("cpu", model_name)
inference_result = request_tensorflow_inference_nlp(model_name, ip_address=public_ip_address)
assert inference_result, f"Failed to perform inference at IP address: {public_ip_address}"
finally:
ecs_utils.tear_down_ecs_inference_service(ecs_cluster_arn, service_name, task_family, revision)
@pytest.mark.skipif(not is_nightly_context(), reason="Running additional model in nightly context only")
@pytest.mark.model("albert")
@pytest.mark.parametrize("ecs_instance_type", ["p3.8xlarge"], indirect=True)
@pytest.mark.parametrize("ecs_ami", [ECS_AML2_GPU_USWEST2], indirect=True)
def test_ecs_tensorflow_inference_gpu_nlp(tensorflow_inference, ecs_container_instance, region, gpu_only):
worker_instance_id, ecs_cluster_arn = ecs_container_instance
public_ip_address = ec2_utils.get_public_ip(worker_instance_id, region=region)
num_gpus = ec2_utils.get_instance_num_gpus(worker_instance_id)
model_name = "albert"
service_name = task_family = revision = None
try:
service_name, task_family, revision = ecs_utils.setup_ecs_inference_service(
tensorflow_inference,
"tensorflow",
ecs_cluster_arn,
model_name,
worker_instance_id,
num_gpus=num_gpus,
region=region,
)
model_name = get_tensorflow_model_name("gpu", model_name)
inference_result = request_tensorflow_inference_nlp(model_name, ip_address=public_ip_address)
assert inference_result, f"Failed to perform inference at IP address: {public_ip_address}"
finally:
ecs_utils.tear_down_ecs_inference_service(ecs_cluster_arn, service_name, task_family, revision)
| 49.710526 | 154 | 0.770566 | 1,231 | 9,445 | 5.441105 | 0.077173 | 0.11914 | 0.050164 | 0.056435 | 0.925649 | 0.904897 | 0.899522 | 0.892953 | 0.872947 | 0.858465 | 0 | 0.007199 | 0.146956 | 9,445 | 189 | 155 | 49.973545 | 0.824128 | 0.011011 | 0 | 0.710692 | 0 | 0 | 0.113919 | 0.00803 | 0 | 0 | 0 | 0 | 0.037736 | 1 | 0.062893 | false | 0 | 0.031447 | 0 | 0.09434 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
46765300472599a6421c30499dacd5ed9bef589f | 30,176 | py | Python | pinball/master/thrift_lib/ttypes.py | DotModus/pinball | deeb4ec20bbd000ad44f7b44e6a7c0fa900dbbea | [
"Apache-2.0"
] | 1,143 | 2015-03-06T22:10:53.000Z | 2022-02-23T21:16:47.000Z | pinball/master/thrift_lib/ttypes.py | DotModus/pinball | deeb4ec20bbd000ad44f7b44e6a7c0fa900dbbea | [
"Apache-2.0"
] | 70 | 2015-03-06T00:44:39.000Z | 2019-05-01T13:15:10.000Z | pinball/master/thrift_lib/ttypes.py | Betterment/pinball | 11120b54fcc25b2857631a5de65a1195ffcffb5c | [
"Apache-2.0"
] | 169 | 2015-03-09T21:27:12.000Z | 2022-03-19T08:09:13.000Z | #
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class ErrorCode:
UNKNOWN = 0
VERSION_CONFLICT = 1
NOT_FOUND = 2
INPUT_ERROR = 3
_VALUES_TO_NAMES = {
0: "UNKNOWN",
1: "VERSION_CONFLICT",
2: "NOT_FOUND",
3: "INPUT_ERROR",
}
_NAMES_TO_VALUES = {
"UNKNOWN": 0,
"VERSION_CONFLICT": 1,
"NOT_FOUND": 2,
"INPUT_ERROR": 3,
}
class Token:
"""
Attributes:
- version
- name
- owner
- expirationTime
- priority
- data
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'version', None, None, ), # 1
(2, TType.STRING, 'name', None, None, ), # 2
(3, TType.STRING, 'owner', None, None, ), # 3
(4, TType.I64, 'expirationTime', None, None, ), # 4
(5, TType.DOUBLE, 'priority', None, 0, ), # 5
(6, TType.STRING, 'data', None, None, ), # 6
)
def __init__(self, version=None, name=None, owner=None, expirationTime=None, priority=thrift_spec[5][4], data=None,):
self.version = version
self.name = name
self.owner = owner
self.expirationTime = expirationTime
self.priority = priority
self.data = data
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.version = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.owner = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.expirationTime = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.DOUBLE:
self.priority = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.data = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Token')
if self.version is not None:
oprot.writeFieldBegin('version', TType.I64, 1)
oprot.writeI64(self.version)
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 3)
oprot.writeString(self.owner)
oprot.writeFieldEnd()
if self.expirationTime is not None:
oprot.writeFieldBegin('expirationTime', TType.I64, 4)
oprot.writeI64(self.expirationTime)
oprot.writeFieldEnd()
if self.priority is not None:
oprot.writeFieldBegin('priority', TType.DOUBLE, 5)
oprot.writeDouble(self.priority)
oprot.writeFieldEnd()
if self.data is not None:
oprot.writeFieldBegin('data', TType.STRING, 6)
oprot.writeString(self.data)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.version is None:
raise TProtocol.TProtocolException(message='Required field version is unset!')
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TokenMasterException(TException):
"""
Attributes:
- errorCode
- errorMessage
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'errorCode', None, None, ), # 1
(2, TType.STRING, 'errorMessage', None, None, ), # 2
)
def __init__(self, errorCode=None, errorMessage=None,):
self.errorCode = errorCode
self.errorMessage = errorMessage
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.errorCode = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.errorMessage = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TokenMasterException')
if self.errorCode is not None:
oprot.writeFieldBegin('errorCode', TType.I32, 1)
oprot.writeI32(self.errorCode)
oprot.writeFieldEnd()
if self.errorMessage is not None:
oprot.writeFieldBegin('errorMessage', TType.STRING, 2)
oprot.writeString(self.errorMessage)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ArchiveRequest:
"""
Attributes:
- tokens
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'tokens', (TType.STRUCT,(Token, Token.thrift_spec)), None, ), # 1
)
def __init__(self, tokens=None,):
self.tokens = tokens
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.tokens = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in xrange(_size0):
_elem5 = Token()
_elem5.read(iprot)
self.tokens.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ArchiveRequest')
if self.tokens is not None:
oprot.writeFieldBegin('tokens', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.tokens))
for iter6 in self.tokens:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GroupRequest:
"""
Attributes:
- namePrefix
- groupSuffix
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'namePrefix', None, None, ), # 1
(2, TType.STRING, 'groupSuffix', None, None, ), # 2
)
def __init__(self, namePrefix=None, groupSuffix=None,):
self.namePrefix = namePrefix
self.groupSuffix = groupSuffix
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.namePrefix = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.groupSuffix = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GroupRequest')
if self.namePrefix is not None:
oprot.writeFieldBegin('namePrefix', TType.STRING, 1)
oprot.writeString(self.namePrefix)
oprot.writeFieldEnd()
if self.groupSuffix is not None:
oprot.writeFieldBegin('groupSuffix', TType.STRING, 2)
oprot.writeString(self.groupSuffix)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GroupResponse:
"""
Attributes:
- counts
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'counts', (TType.STRING,None,TType.I32,None), None, ), # 1
)
def __init__(self, counts=None,):
self.counts = counts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.counts = {}
(_ktype8, _vtype9, _size7 ) = iprot.readMapBegin()
for _i11 in xrange(_size7):
_key12 = iprot.readString();
_val13 = iprot.readI32();
self.counts[_key12] = _val13
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GroupResponse')
if self.counts is not None:
oprot.writeFieldBegin('counts', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.counts))
for kiter14,viter15 in self.counts.items():
oprot.writeString(kiter14)
oprot.writeI32(viter15)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ModifyRequest:
"""
Attributes:
- updates
- deletes
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'updates', (TType.STRUCT,(Token, Token.thrift_spec)), None, ), # 1
(2, TType.LIST, 'deletes', (TType.STRUCT,(Token, Token.thrift_spec)), None, ), # 2
)
def __init__(self, updates=None, deletes=None,):
self.updates = updates
self.deletes = deletes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.updates = []
(_etype19, _size16) = iprot.readListBegin()
for _i20 in xrange(_size16):
_elem21 = Token()
_elem21.read(iprot)
self.updates.append(_elem21)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.deletes = []
(_etype25, _size22) = iprot.readListBegin()
for _i26 in xrange(_size22):
_elem27 = Token()
_elem27.read(iprot)
self.deletes.append(_elem27)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ModifyRequest')
if self.updates is not None:
oprot.writeFieldBegin('updates', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.updates))
for iter28 in self.updates:
iter28.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.deletes is not None:
oprot.writeFieldBegin('deletes', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.deletes))
for iter29 in self.deletes:
iter29.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ModifyResponse:
"""
Attributes:
- updates
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'updates', (TType.STRUCT,(Token, Token.thrift_spec)), None, ), # 1
)
def __init__(self, updates=None,):
self.updates = updates
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.updates = []
(_etype33, _size30) = iprot.readListBegin()
for _i34 in xrange(_size30):
_elem35 = Token()
_elem35.read(iprot)
self.updates.append(_elem35)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ModifyResponse')
if self.updates is not None:
oprot.writeFieldBegin('updates', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.updates))
for iter36 in self.updates:
iter36.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Query:
"""
Attributes:
- namePrefix
- maxTokens
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'namePrefix', None, None, ), # 1
(2, TType.I32, 'maxTokens', None, None, ), # 2
)
def __init__(self, namePrefix=None, maxTokens=None,):
self.namePrefix = namePrefix
self.maxTokens = maxTokens
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.namePrefix = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.maxTokens = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Query')
if self.namePrefix is not None:
oprot.writeFieldBegin('namePrefix', TType.STRING, 1)
oprot.writeString(self.namePrefix)
oprot.writeFieldEnd()
if self.maxTokens is not None:
oprot.writeFieldBegin('maxTokens', TType.I32, 2)
oprot.writeI32(self.maxTokens)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class QueryRequest:
"""
Attributes:
- queries
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'queries', (TType.STRUCT,(Query, Query.thrift_spec)), None, ), # 1
)
def __init__(self, queries=None,):
self.queries = queries
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.queries = []
(_etype40, _size37) = iprot.readListBegin()
for _i41 in xrange(_size37):
_elem42 = Query()
_elem42.read(iprot)
self.queries.append(_elem42)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('QueryRequest')
if self.queries is not None:
oprot.writeFieldBegin('queries', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.queries))
for iter43 in self.queries:
iter43.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class QueryResponse:
"""
Attributes:
- tokens
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'tokens', (TType.LIST,(TType.STRUCT,(Token, Token.thrift_spec))), None, ), # 1
)
def __init__(self, tokens=None,):
self.tokens = tokens
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.tokens = []
(_etype47, _size44) = iprot.readListBegin()
for _i48 in xrange(_size44):
_elem49 = []
(_etype53, _size50) = iprot.readListBegin()
for _i54 in xrange(_size50):
_elem55 = Token()
_elem55.read(iprot)
_elem49.append(_elem55)
iprot.readListEnd()
self.tokens.append(_elem49)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('QueryResponse')
if self.tokens is not None:
oprot.writeFieldBegin('tokens', TType.LIST, 1)
oprot.writeListBegin(TType.LIST, len(self.tokens))
for iter56 in self.tokens:
oprot.writeListBegin(TType.STRUCT, len(iter56))
for iter57 in iter56:
iter57.write(oprot)
oprot.writeListEnd()
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class QueryAndOwnRequest:
"""
Attributes:
- owner
- expirationTime
- query
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'owner', None, None, ), # 1
(2, TType.I64, 'expirationTime', None, None, ), # 2
(3, TType.STRUCT, 'query', (Query, Query.thrift_spec), None, ), # 3
)
def __init__(self, owner=None, expirationTime=None, query=None,):
self.owner = owner
self.expirationTime = expirationTime
self.query = query
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.owner = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.expirationTime = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.query = Query()
self.query.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('QueryAndOwnRequest')
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 1)
oprot.writeString(self.owner)
oprot.writeFieldEnd()
if self.expirationTime is not None:
oprot.writeFieldBegin('expirationTime', TType.I64, 2)
oprot.writeI64(self.expirationTime)
oprot.writeFieldEnd()
if self.query is not None:
oprot.writeFieldBegin('query', TType.STRUCT, 3)
self.query.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class QueryAndOwnResponse:
"""
Attributes:
- tokens
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'tokens', (TType.STRUCT,(Token, Token.thrift_spec)), None, ), # 1
)
def __init__(self, tokens=None,):
self.tokens = tokens
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.tokens = []
(_etype61, _size58) = iprot.readListBegin()
for _i62 in xrange(_size58):
_elem63 = Token()
_elem63.read(iprot)
self.tokens.append(_elem63)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('QueryAndOwnResponse')
if self.tokens is not None:
oprot.writeFieldBegin('tokens', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.tokens))
for iter64 in self.tokens:
iter64.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 30.791837 | 188 | 0.643425 | 3,453 | 30,176 | 5.377063 | 0.061685 | 0.01912 | 0.034416 | 0.033931 | 0.805569 | 0.761135 | 0.740938 | 0.729466 | 0.712178 | 0.705822 | 0 | 0.014718 | 0.236711 | 30,176 | 979 | 189 | 30.823289 | 0.791386 | 0.020248 | 0 | 0.70828 | 1 | 0 | 0.027909 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.10828 | false | 0 | 0.005096 | 0.04586 | 0.24586 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d3bed3943b0763f7a5cf6f6d811317f531ecb746 | 1,847 | py | Python | codes/lab-edwardzcn/modern-rsa-chosen-cipher/3_3_2.py | CSU-CS-WIKI/cryptology | 1ab0a5d0548806659773878a694fcdf6bc809b38 | [
"MIT"
] | null | null | null | codes/lab-edwardzcn/modern-rsa-chosen-cipher/3_3_2.py | CSU-CS-WIKI/cryptology | 1ab0a5d0548806659773878a694fcdf6bc809b38 | [
"MIT"
] | null | null | null | codes/lab-edwardzcn/modern-rsa-chosen-cipher/3_3_2.py | CSU-CS-WIKI/cryptology | 1ab0a5d0548806659773878a694fcdf6bc809b38 | [
"MIT"
] | null | null | null | # Your code
if __name__ == "__main__":
# n = 365943683662593036114951067995682721517067555047322405005561707577808973352384148654059210398249026104057099508844294973796827515301553824087584375017323021871380571271454677988083231822824922251476778725940546778171084237860433593104487410611333801230696041295956261651266146677879782816091639230440231434675643196468292496482129270479939685639031652822357396020385871745450095727977893787860446521391509835372146658793817105666845359355692342161933263550700967709917895920462740484324174000477716806953417727083566519055902293572993855908591604483403579864863223345121195550240175643003622139838894357293215224677361753055036945578247360182306789545857306635399938740959619424740843356712230243710061905602692985777301216579661735286918574099056550122685182408403686023
# e = 65537
# c = 69450402210900728575226161253116245567260530990709625122118756925023116826502143177771098156518730184714440037106701892924986870455719289263496322484502132098146442727676261732288871499106594292140125893907420888125037625910479378978264157082302461737134670776486295600297461647462144573540270130426057443902731064901020626138779020374579158874134257978636853138476354803435486040011823218379981000495525369200419783552001835237404973297618283040677951920346680179216164028923970502543841322605053167459879604000156181690628553756061314027425038852818500415743803687220268296329994436541032246861726007670511621803963578334460936324096537696079293868054629721556154057552043471462267834671012227614276944451951423856664367847922623625358633806254798800919267246077555398
# Your code
print(c)
# print(8081701021792478827192341361679241579138945149849530786194925914828179940392//2)
# 根据反馈然后计算
print(8081701021792478827192341361679241579138945149849530786194925914828179940392//2)
| 153.916667 | 781 | 0.953438 | 22 | 1,847 | 79.681818 | 0.681818 | 0.009127 | 0.093554 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.951848 | 0.033027 | 1,847 | 11 | 782 | 167.909091 | 0.029675 | 0.907417 | 0 | 0 | 0 | 0 | 0.04908 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0.666667 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
d3c7a5bccafdc0d1494a48d4ffd5db23ea97d049 | 37,954 | py | Python | hpat/tests/test_dataframe.py | AlexanderKalistratov/hpat | be1c9cdbd26c55162bad4bb6dfe77af176584d40 | [
"BSD-2-Clause"
] | 1 | 2022-02-21T06:49:03.000Z | 2022-02-21T06:49:03.000Z | hpat/tests/test_dataframe.py | kozlov-alexey/sdc | f1a48b3388713da2f96719d7003e7a400953f21e | [
"BSD-2-Clause"
] | 2 | 2019-10-11T16:49:03.000Z | 2019-10-14T22:05:50.000Z | hpat/tests/test_dataframe.py | kozlov-alexey/sdc | f1a48b3388713da2f96719d7003e7a400953f21e | [
"BSD-2-Clause"
] | null | null | null | import unittest
import platform
import random
import string
import platform
import pandas as pd
import numpy as np
import numba
import hpat
from hpat.tests.test_utils import (count_array_REPs, count_parfor_REPs, count_parfor_OneDs,
count_array_OneDs, dist_IR_contains, get_start_end)
from hpat.tests.gen_test_data import ParquetGenerator
from numba.config import IS_32BITS
@hpat.jit
def inner_get_column(df):
# df2 = df[['A', 'C']]
# df2['D'] = np.ones(3)
return df.A
COL_IND = 0
class TestDataFrame(unittest.TestCase):
def test_create1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
return df.A
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
def test_create_cond1(self):
def test_impl(A, B, c):
if c:
df = pd.DataFrame({'A': A})
else:
df = pd.DataFrame({'A': B})
return df.A
hpat_func = hpat.jit(test_impl)
n = 11
A = np.ones(n)
B = np.arange(n) + 1.0
c = 0
pd.testing.assert_series_equal(hpat_func(A, B, c), test_impl(A, B, c))
c = 2
pd.testing.assert_series_equal(hpat_func(A, B, c), test_impl(A, B, c))
@unittest.skip('Implement feature to create DataFrame without column names')
def test_create_without_column_names(self):
def test_impl():
df = pd.DataFrame([100, 200, 300, 400, 200, 100])
return df
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(), test_impl())
def test_unbox1(self):
def test_impl(df):
return df.A
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.random.ranf(n)})
pd.testing.assert_series_equal(hpat_func(df), test_impl(df))
@unittest.skip("needs properly refcounted dataframes")
def test_unbox2(self):
def test_impl(df, cond):
n = len(df)
if cond:
df['A'] = np.arange(n) + 2.0
return df.A
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
pd.testing.assert_series_equal(hpat_func(df.copy(), True), test_impl(df.copy(), True))
pd.testing.assert_series_equal(hpat_func(df.copy(), False), test_impl(df.copy(), False))
@unittest.skip('Implement feature to create DataFrame without column names')
def test_unbox_without_column_names(self):
def test_impl(df):
return df
df = pd.DataFrame([100, 200, 300, 400, 200, 100])
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_box1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.arange(n)})
return df
hpat_func = hpat.jit(test_impl)
n = 11
do_check = False if platform.system() == 'Windows' and not IS_32BITS else True
pd.testing.assert_frame_equal(hpat_func(n), test_impl(n), check_dtype=do_check)
def test_box2(self):
def test_impl():
df = pd.DataFrame({'A': [1, 2, 3], 'B': ['a', 'bb', 'ccc']})
return df
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(), test_impl())
@unittest.skip("pending df filter support")
def test_box3(self):
def test_impl(df):
df = df[df.A != 'dd']
return df
hpat_func = hpat.jit(test_impl)
df = pd.DataFrame({'A': ['aa', 'bb', 'cc']})
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_box_categorical(self):
def test_impl(df):
df['A'] = df['A'] + 1
return df
hpat_func = hpat.jit(test_impl)
df = pd.DataFrame({'A': [1, 2, 3],
'B': pd.Series(['N', 'Y', 'Y'],
dtype=pd.api.types.CategoricalDtype(['N', 'Y']))})
pd.testing.assert_frame_equal(hpat_func(df.copy(deep=True)), test_impl(df))
def test_box_dist_return(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.arange(n)})
return df
hpat_func = hpat.jit(distributed={'df'})(test_impl)
n = 11
hres, res = hpat_func(n), test_impl(n)
self.assertEqual(count_array_OneDs(), 3)
self.assertEqual(count_parfor_OneDs(), 2)
dist_sum = hpat.jit(
lambda a: hpat.distributed_api.dist_reduce(
a, np.int32(hpat.distributed_api.Reduce_Type.Sum.value)))
dist_sum(1) # run to compile
np.testing.assert_allclose(dist_sum(hres.A.sum()), res.A.sum())
np.testing.assert_allclose(dist_sum(hres.B.sum()), res.B.sum())
def test_len1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n, np.int64), 'B': np.random.ranf(n)})
return len(df)
hpat_func = hpat.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
def test_shape1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n, np.int64), 'B': np.random.ranf(n)})
return df.shape
hpat_func = hpat.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
def test_column_getitem1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
Ac = df['A'].values
return Ac.sum()
hpat_func = hpat.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
self.assertEqual(count_parfor_OneDs(), 1)
def test_column_list_getitem1(self):
def test_impl(df):
return df[['A', 'C']]
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame(
{'A': np.arange(n), 'B': np.ones(n), 'C': np.random.ranf(n)})
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_filter1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + n, 'B': np.arange(n)**2})
df1 = df[df.A > .5]
return df1.B.sum()
hpat_func = hpat.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
def test_filter2(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + n, 'B': np.arange(n)**2})
df1 = df.loc[df.A > .5]
return np.sum(df1.B)
hpat_func = hpat.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
def test_filter3(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + n, 'B': np.arange(n)**2})
df1 = df.iloc[(df.A > .5).values]
return np.sum(df1.B)
hpat_func = hpat.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
def test_iloc1(self):
def test_impl(df, n):
return df.iloc[1:n].B.values
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
np.testing.assert_array_equal(hpat_func(df, n), test_impl(df, n))
def test_iloc2(self):
def test_impl(df, n):
return df.iloc[np.array([1, 4, 9])].B.values
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
np.testing.assert_array_equal(hpat_func(df, n), test_impl(df, n))
def test_iloc3(self):
def test_impl(df):
return df.iloc[:, 1].values
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
np.testing.assert_array_equal(hpat_func(df), test_impl(df))
@unittest.skip("TODO: support A[[1,2,3]] in Numba")
def test_iloc4(self):
def test_impl(df, n):
return df.iloc[[1, 4, 9]].B.values
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
np.testing.assert_array_equal(hpat_func(df, n), test_impl(df, n))
def test_iloc5(self):
# test iloc with global value
def test_impl(df):
return df.iloc[:, COL_IND].values
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
np.testing.assert_array_equal(hpat_func(df), test_impl(df))
def test_loc1(self):
def test_impl(df):
return df.loc[:, 'B'].values
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
np.testing.assert_array_equal(hpat_func(df), test_impl(df))
def test_iat1(self):
def test_impl(n):
df = pd.DataFrame({'B': np.ones(n), 'A': np.arange(n) + n})
return df.iat[3, 1]
hpat_func = hpat.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
def test_iat2(self):
def test_impl(df):
return df.iat[3, 1]
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'B': np.ones(n), 'A': np.arange(n) + n})
self.assertEqual(hpat_func(df), test_impl(df))
def test_iat3(self):
def test_impl(df, n):
return df.iat[n - 1, 1]
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'B': np.ones(n), 'A': np.arange(n) + n})
self.assertEqual(hpat_func(df, n), test_impl(df, n))
def test_iat_set1(self):
def test_impl(df, n):
df.iat[n - 1, 1] = n**2
return df.A # return the column to check column aliasing
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'B': np.ones(n), 'A': np.arange(n) + n})
df2 = df.copy()
pd.testing.assert_series_equal(hpat_func(df, n), test_impl(df2, n))
def test_iat_set2(self):
def test_impl(df, n):
df.iat[n - 1, 1] = n**2
return df # check df aliasing/boxing
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'B': np.ones(n), 'A': np.arange(n) + n})
df2 = df.copy()
pd.testing.assert_frame_equal(hpat_func(df, n), test_impl(df2, n))
def test_set_column1(self):
# set existing column
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n, np.int64), 'B': np.arange(n) + 3.0})
df['A'] = np.arange(n)
return df
hpat_func = hpat.jit(test_impl)
n = 11
do_check = False if platform.system() == 'Windows' and not IS_32BITS else True
pd.testing.assert_frame_equal(hpat_func(n), test_impl(n), check_dtype=do_check)
def test_set_column_reflect4(self):
# set existing column
def test_impl(df, n):
df['A'] = np.arange(n)
hpat_func = hpat.jit(test_impl)
n = 11
df1 = pd.DataFrame({'A': np.ones(n, np.int64), 'B': np.arange(n) + 3.0})
df2 = df1.copy()
hpat_func(df1, n)
test_impl(df2, n)
do_check = False if platform.system() == 'Windows' and not IS_32BITS else True
pd.testing.assert_frame_equal(df1, df2, check_dtype=do_check)
def test_set_column_new_type1(self):
# set existing column with a new type
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.arange(n) + 3.0})
df['A'] = np.arange(n)
return df
hpat_func = hpat.jit(test_impl)
n = 11
do_check = False if platform.system() == 'Windows' and not IS_32BITS else True
pd.testing.assert_frame_equal(hpat_func(n), test_impl(n), check_dtype=do_check)
def test_set_column2(self):
# create new column
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.arange(n) + 1.0})
df['C'] = np.arange(n)
return df
hpat_func = hpat.jit(test_impl)
n = 11
do_check = False if platform.system() == 'Windows' and not IS_32BITS else True
pd.testing.assert_frame_equal(hpat_func(n), test_impl(n), check_dtype=do_check)
def test_set_column_reflect3(self):
# create new column
def test_impl(df, n):
df['C'] = np.arange(n)
hpat_func = hpat.jit(test_impl)
n = 11
df1 = pd.DataFrame({'A': np.ones(n, np.int64), 'B': np.arange(n) + 3.0})
df2 = df1.copy()
hpat_func(df1, n)
test_impl(df2, n)
do_check = False if platform.system() == 'Windows' and not IS_32BITS else True
pd.testing.assert_frame_equal(df1, df2, check_dtype=do_check)
def test_set_column_bool1(self):
def test_impl(df):
df['C'] = df['A'][df['B']]
hpat_func = hpat.jit(test_impl)
df = pd.DataFrame({'A': [1, 2, 3], 'B': [True, False, True]})
df2 = df.copy()
test_impl(df2)
hpat_func(df)
pd.testing.assert_series_equal(df.C, df2.C)
def test_set_column_reflect1(self):
def test_impl(df, arr):
df['C'] = arr
return df.C.sum()
hpat_func = hpat.jit(test_impl)
n = 11
arr = np.random.ranf(n)
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
hpat_func(df, arr)
self.assertIn('C', df)
np.testing.assert_almost_equal(df.C.values, arr)
def test_set_column_reflect2(self):
def test_impl(df, arr):
df['C'] = arr
return df.C.sum()
hpat_func = hpat.jit(test_impl)
n = 11
arr = np.random.ranf(n)
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
df2 = df.copy()
np.testing.assert_almost_equal(hpat_func(df, arr), test_impl(df2, arr))
def test_df_values1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.arange(n)})
return df.values
hpat_func = hpat.jit(test_impl)
n = 11
np.testing.assert_array_equal(hpat_func(n), test_impl(n))
def test_df_values2(self):
def test_impl(df):
return df.values
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.ones(n), 'B': np.arange(n)})
np.testing.assert_array_equal(hpat_func(df), test_impl(df))
def test_df_values_parallel1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.arange(n)})
return df.values.sum()
hpat_func = hpat.jit(test_impl)
n = 11
np.testing.assert_array_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
def test_df_apply(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)})
B = df.apply(lambda r: r.A + r.B, axis=1)
return df.B.sum()
n = 121
hpat_func = hpat.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
def test_df_apply_branch(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)})
B = df.apply(lambda r: r.A < 10 and r.B > 20, axis=1)
return df.B.sum()
n = 121
hpat_func = hpat.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
def test_df_describe(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.float32),
'B': np.arange(n)})
#df.A[0:1] = np.nan
return df.describe()
hpat_func = hpat.jit(test_impl)
n = 1001
hpat_func(n)
# XXX: test actual output
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
def test_sort_values(self):
def test_impl(df):
df.sort_values('A', inplace=True)
return df.B.values
n = 1211
np.random.seed(2)
df = pd.DataFrame({'A': np.random.ranf(n), 'B': np.arange(n), 'C': np.random.ranf(n)})
hpat_func = hpat.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(df.copy()), test_impl(df))
def test_sort_values_copy(self):
def test_impl(df):
df2 = df.sort_values('A')
return df2.B.values
n = 1211
np.random.seed(2)
df = pd.DataFrame({'A': np.random.ranf(n), 'B': np.arange(n), 'C': np.random.ranf(n)})
hpat_func = hpat.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(df.copy()), test_impl(df))
def test_sort_values_single_col(self):
def test_impl(df):
df.sort_values('A', inplace=True)
return df.A.values
n = 1211
np.random.seed(2)
df = pd.DataFrame({'A': np.random.ranf(n)})
hpat_func = hpat.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(df.copy()), test_impl(df))
def test_sort_values_single_col_str(self):
def test_impl(df):
df.sort_values('A', inplace=True)
return df.A.values
n = 1211
random.seed(2)
str_vals = []
for _ in range(n):
k = random.randint(1, 30)
val = ''.join(random.choices(string.ascii_uppercase + string.digits, k=k))
str_vals.append(val)
df = pd.DataFrame({'A': str_vals})
hpat_func = hpat.jit(test_impl)
self.assertTrue((hpat_func(df.copy()) == test_impl(df)).all())
def test_sort_values_str(self):
def test_impl(df):
df.sort_values('A', inplace=True)
return df.B.values
n = 1211
random.seed(2)
str_vals = []
str_vals2 = []
for i in range(n):
k = random.randint(1, 30)
val = ''.join(random.choices(string.ascii_uppercase + string.digits, k=k))
str_vals.append(val)
val = ''.join(random.choices(string.ascii_uppercase + string.digits, k=k))
str_vals2.append(val)
df = pd.DataFrame({'A': str_vals, 'B': str_vals2})
# use mergesort for stability, in str generation equal keys are more probable
sorted_df = df.sort_values('A', inplace=False, kind='mergesort')
hpat_func = hpat.jit(test_impl)
self.assertTrue((hpat_func(df) == sorted_df.B.values).all())
def test_sort_parallel_single_col(self):
# create `kde.parquet` file
ParquetGenerator.gen_kde_pq()
# TODO: better parallel sort test
def test_impl():
df = pd.read_parquet('kde.parquet')
df.sort_values('points', inplace=True)
res = df.points.values
return res
hpat_func = hpat.jit(locals={'res:return': 'distributed'})(test_impl)
save_min_samples = hpat.hiframes.sort.MIN_SAMPLES
try:
hpat.hiframes.sort.MIN_SAMPLES = 10
res = hpat_func()
self.assertTrue((np.diff(res) >= 0).all())
finally:
# restore global val
hpat.hiframes.sort.MIN_SAMPLES = save_min_samples
def test_df_isna1(self):
'''Verify DataFrame.isna implementation for various types of data'''
def test_impl(df):
return df.isna()
hpat_func = hpat.jit(test_impl)
# TODO: add column with datetime values when test_series_datetime_isna1 is fixed
df = pd.DataFrame({'A': [1.0, 2.0, np.nan, 1.0],
'B': [np.inf, 5, np.nan, 6],
'C': ['aa', 'b', None, 'ccc'],
'D': [None, 'dd', '', None]})
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_df_astype_str1(self):
'''Verifies DataFrame.astype implementation converting various types to string'''
def test_impl(df):
return df.astype(str)
hpat_func = hpat.jit(test_impl)
# TODO: add column with float values when test_series_astype_float_to_str1 is fixed
df = pd.DataFrame({'A': [-1, 2, 11, 5, 0, -7],
'B': ['aa', 'bb', 'cc', 'dd', '', 'fff']
})
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_df_astype_float1(self):
'''Verifies DataFrame.astype implementation converting various types to float'''
def test_impl(df):
return df.astype(np.float64)
hpat_func = hpat.jit(test_impl)
# TODO: uncomment column with string values when test_series_astype_str_to_float64 is fixed
df = pd.DataFrame({'A': [-1, 2, 11, 5, 0, -7],
# 'B': ['3.24', '1E+05', '-1', '-1.3E-01', 'nan', 'inf'],
'C': [3.24, 1E+05, -1, -1.3E-01, np.nan, np.inf]
})
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_df_astype_int1(self):
'''Verifies DataFrame.astype implementation converting various types to int'''
def test_impl(df):
return df.astype(np.int32)
hpat_func = hpat.jit(test_impl)
n = 6
# TODO: uncomment column with string values when test_series_astype_str_to_int32 is fixed
df = pd.DataFrame({'A': np.ones(n, dtype=np.int64),
'B': np.arange(n, dtype=np.int32),
# 'C': ['-1', '2', '3', '0', '-7', '99'],
'D': np.arange(float(n), dtype=np.float32)
})
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_sort_parallel(self):
# create `kde.parquet` file
ParquetGenerator.gen_kde_pq()
# TODO: better parallel sort test
def test_impl():
df = pd.read_parquet('kde.parquet')
df['A'] = df.points.astype(np.float64)
df.sort_values('points', inplace=True)
res = df.A.values
return res
hpat_func = hpat.jit(locals={'res:return': 'distributed'})(test_impl)
save_min_samples = hpat.hiframes.sort.MIN_SAMPLES
try:
hpat.hiframes.sort.MIN_SAMPLES = 10
res = hpat_func()
self.assertTrue((np.diff(res) >= 0).all())
finally:
# restore global val
hpat.hiframes.sort.MIN_SAMPLES = save_min_samples
def test_itertuples(self):
def test_impl(df):
res = 0.0
for r in df.itertuples():
res += r[1]
return res
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.ones(n, np.int64)})
self.assertEqual(hpat_func(df), test_impl(df))
def test_itertuples_str(self):
def test_impl(df):
res = ""
for r in df.itertuples():
res += r[1]
return res
hpat_func = hpat.jit(test_impl)
n = 3
df = pd.DataFrame({'A': ['aa', 'bb', 'cc'], 'B': np.ones(n, np.int64)})
self.assertEqual(hpat_func(df), test_impl(df))
def test_itertuples_order(self):
def test_impl(n):
res = 0.0
df = pd.DataFrame({'B': np.arange(n), 'A': np.ones(n, np.int64)})
for r in df.itertuples():
res += r[1]
return res
hpat_func = hpat.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
def test_itertuples_analysis(self):
"""tests array analysis handling of generated tuples, shapes going
through blocks and getting used in an array dimension
"""
def test_impl(n):
res = 0
df = pd.DataFrame({'B': np.arange(n), 'A': np.ones(n, np.int64)})
for r in df.itertuples():
if r[1] == 2:
A = np.ones(r[1])
res += len(A)
return res
hpat_func = hpat.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
@unittest.skipIf(platform.system() == 'Windows', "Attribute 'dtype' are different int64 and int32")
def test_df_head1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.arange(n)})
return df.head(3)
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_frame_equal(hpat_func(n), test_impl(n))
def test_pct_change1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.arange(n) + 1})
return df.pct_change(3)
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_frame_equal(hpat_func(n), test_impl(n))
def test_mean1(self):
# TODO: non-numeric columns should be ignored automatically
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.arange(n) + 1})
return df.mean()
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
def test_median1(self):
# TODO: non-numeric columns should be ignored automatically
def test_impl(n):
df = pd.DataFrame({'A': 2 ** np.arange(n), 'B': np.arange(n) + 1.0})
return df.median()
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
def test_std1(self):
# TODO: non-numeric columns should be ignored automatically
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.arange(n) + 1})
return df.std()
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
def test_var1(self):
# TODO: non-numeric columns should be ignored automatically
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.arange(n) + 1})
return df.var()
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
def test_max1(self):
# TODO: non-numeric columns should be ignored automatically
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.arange(n) + 1})
return df.max()
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
def test_min1(self):
# TODO: non-numeric columns should be ignored automatically
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.arange(n) + 1})
return df.min()
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
def test_sum1(self):
# TODO: non-numeric columns should be ignored automatically
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.arange(n) + 1})
return df.sum()
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
def test_prod1(self):
# TODO: non-numeric columns should be ignored automatically
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.arange(n) + 1})
return df.prod()
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
def test_count1(self):
# TODO: non-numeric columns should be ignored automatically
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.arange(n) + 1})
return df.count()
hpat_func = hpat.jit(test_impl)
n = 11
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
def test_df_fillna1(self):
def test_impl(df):
return df.fillna(5.0)
df = pd.DataFrame({'A': [1.0, 2.0, np.nan, 1.0]})
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_df_fillna_str1(self):
def test_impl(df):
return df.fillna("dd")
df = pd.DataFrame({'A': ['aa', 'b', None, 'ccc']})
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_df_fillna_inplace1(self):
def test_impl(A):
A.fillna(11.0, inplace=True)
return A
df = pd.DataFrame({'A': [1.0, 2.0, np.nan, 1.0]})
df2 = df.copy()
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df2))
def test_df_reset_index1(self):
def test_impl(df):
return df.reset_index(drop=True)
df = pd.DataFrame({'A': [1.0, 2.0, np.nan, 1.0]})
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_df_reset_index_inplace1(self):
def test_impl():
df = pd.DataFrame({'A': [1.0, 2.0, np.nan, 1.0]})
df.reset_index(drop=True, inplace=True)
return df
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(), test_impl())
def test_df_dropna1(self):
def test_impl(df):
return df.dropna()
df = pd.DataFrame({'A': [1.0, 2.0, np.nan, 1.0], 'B': [4, 5, 6, 7]})
hpat_func = hpat.jit(test_impl)
out = test_impl(df).reset_index(drop=True)
h_out = hpat_func(df)
pd.testing.assert_frame_equal(out, h_out)
def test_df_dropna2(self):
def test_impl(df):
return df.dropna()
df = pd.DataFrame({'A': [1.0, 2.0, np.nan, 1.0]})
hpat_func = hpat.jit(test_impl)
out = test_impl(df).reset_index(drop=True)
h_out = hpat_func(df)
pd.testing.assert_frame_equal(out, h_out)
def test_df_dropna_inplace1(self):
# TODO: fix error when no df is returned
def test_impl(df):
df.dropna(inplace=True)
return df
df = pd.DataFrame({'A': [1.0, 2.0, np.nan, 1.0], 'B': [4, 5, 6, 7]})
df2 = df.copy()
hpat_func = hpat.jit(test_impl)
out = test_impl(df).reset_index(drop=True)
h_out = hpat_func(df2)
pd.testing.assert_frame_equal(out, h_out)
def test_df_dropna_str1(self):
def test_impl(df):
return df.dropna()
df = pd.DataFrame({'A': [1.0, 2.0, 4.0, 1.0], 'B': ['aa', 'b', None, 'ccc']})
hpat_func = hpat.jit(test_impl)
out = test_impl(df).reset_index(drop=True)
h_out = hpat_func(df)
pd.testing.assert_frame_equal(out, h_out)
def test_df_drop1(self):
def test_impl(df):
return df.drop(columns=['A'])
df = pd.DataFrame({'A': [1.0, 2.0, np.nan, 1.0], 'B': [4, 5, 6, 7]})
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_df_drop_inplace2(self):
# test droping after setting the column
def test_impl(df):
df2 = df[['A', 'B']]
df2['D'] = np.ones(3)
df2.drop(columns=['D'], inplace=True)
return df2
df = pd.DataFrame({'A': [1, 2, 3], 'B': [2, 3, 4]})
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_df_drop_inplace1(self):
def test_impl(df):
df.drop('A', axis=1, inplace=True)
return df
df = pd.DataFrame({'A': [1.0, 2.0, np.nan, 1.0], 'B': [4, 5, 6, 7]})
df2 = df.copy()
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df2))
def test_isin_df1(self):
def test_impl(df, df2):
return df.isin(df2)
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
df2 = pd.DataFrame({'A': np.arange(n), 'C': np.arange(n)**2})
df2.A[n // 2:] = n
pd.testing.assert_frame_equal(hpat_func(df, df2), test_impl(df, df2))
@unittest.skip("needs dict typing in Numba")
def test_isin_dict1(self):
def test_impl(df):
vals = {'A': [2, 3, 4], 'C': [4, 5, 6]}
return df.isin(vals)
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_isin_list1(self):
def test_impl(df):
vals = [2, 3, 4]
return df.isin(vals)
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
pd.testing.assert_frame_equal(hpat_func(df), test_impl(df))
def test_append1(self):
def test_impl(df, df2):
return df.append(df2, ignore_index=True)
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
df2 = pd.DataFrame({'A': np.arange(n), 'C': np.arange(n)**2})
df2.A[n // 2:] = n
pd.testing.assert_frame_equal(hpat_func(df, df2), test_impl(df, df2))
def test_append2(self):
def test_impl(df, df2, df3):
return df.append([df2, df3], ignore_index=True)
hpat_func = hpat.jit(test_impl)
n = 11
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
df2 = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
df2.A[n // 2:] = n
df3 = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)**2})
pd.testing.assert_frame_equal(
hpat_func(df, df2, df3), test_impl(df, df2, df3))
def test_concat_columns1(self):
def test_impl(S1, S2):
return pd.concat([S1, S2], axis=1)
hpat_func = hpat.jit(test_impl)
S1 = pd.Series([4, 5])
S2 = pd.Series([6., 7.])
# TODO: support int as column name
pd.testing.assert_frame_equal(
hpat_func(S1, S2),
test_impl(S1, S2).rename(columns={0: '0', 1: '1'}))
def test_var_rename(self):
# tests df variable replacement in hiframes_untyped where inlining
# can cause extra assignments and definition handling errors
# TODO: inline freevar
def test_impl():
df = pd.DataFrame({'A': [1, 2, 3], 'B': [2, 3, 4]})
# TODO: df['C'] = [5,6,7]
df['C'] = np.ones(3)
return inner_get_column(df)
hpat_func = hpat.jit(test_impl)
pd.testing.assert_series_equal(hpat_func(), test_impl(), check_names=False)
@unittest.skip("Implement getting columns attribute")
def test_dataframe_columns_attribute(self):
def test_impl():
df = pd.DataFrame({'A': [1, 2, 3], 'B': [2, 3, 4]})
return df.columns
hpat_func = hpat.jit(test_impl)
np.testing.assert_array_equal(hpat_func(), test_impl())
@unittest.skip("Implement getting columns attribute")
def test_dataframe_columns_iterator(self):
def test_impl():
df = pd.DataFrame({'A': [1, 2, 3], 'B': [2, 3, 4]})
return [column for column in df.columns]
hpat_func = hpat.jit(test_impl)
np.testing.assert_array_equal(hpat_func(), test_impl())
@unittest.skip("Implement set_index for DataFrame")
def test_dataframe_set_index(self):
def test_impl():
df = pd.DataFrame({'month': [1, 4, 7, 10],
'year': [2012, 2014, 2013, 2014],
'sale': [55, 40, 84, 31]})
return df.set_index('month')
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(), test_impl())
@unittest.skip("Implement sort_index for DataFrame")
def test_dataframe_sort_index(self):
def test_impl():
df = pd.DataFrame({'A': [1, 2, 3, 4, 5]}, index=[100, 29, 234, 1, 150])
return df.sort_index()
hpat_func = hpat.jit(test_impl)
pd.testing.assert_frame_equal(hpat_func(), test_impl())
@unittest.skip("Implement iterrows for DataFrame")
def test_dataframe_iterrows(self):
def test_impl(df):
print(df.iterrows())
return [row for _, row in df.iterrows()]
df = pd.DataFrame({'A': [1, 2, 3], 'B': [0.2, 0.5, 0.001], 'C': ['a', 'bb', 'ccc']})
hpat_func = hpat.jit(test_impl)
np.testing.assert_array_equal(hpat_func(df), test_impl(df))
@unittest.skip("Support parameter axis=1")
def test_dataframe_axis_param(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n)})
return df.sum(axis=1)
n = 100
hpat_func = hpat.jit(test_impl)
pd.testing.assert_series_equal(hpat_func(n), test_impl(n))
if __name__ == "__main__":
unittest.main()
| 34.884191 | 103 | 0.561127 | 5,571 | 37,954 | 3.642075 | 0.070723 | 0.110005 | 0.052785 | 0.069492 | 0.82691 | 0.808477 | 0.784426 | 0.754411 | 0.722178 | 0.703401 | 0 | 0.029344 | 0.291563 | 37,954 | 1,087 | 104 | 34.916283 | 0.725268 | 0.05836 | 0 | 0.650416 | 0 | 0 | 0.025775 | 0 | 0 | 0 | 0 | 0.00184 | 0.137931 | 1 | 0.224732 | false | 0 | 0.014269 | 0.033294 | 0.349584 | 0.001189 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
310f715b4276c5d6433e2ece22e561b1e8c4801b | 191 | py | Python | cygraph/graph_/__init__.py | lol-cubes/cygraph | b8dbfdcfdb81579181a382311649d166b04c768e | [
"MIT"
] | 4 | 2020-08-28T21:33:59.000Z | 2020-12-28T17:20:14.000Z | cygraph/graph_/__init__.py | lol-cubes/cygraph | b8dbfdcfdb81579181a382311649d166b04c768e | [
"MIT"
] | null | null | null | cygraph/graph_/__init__.py | lol-cubes/cygraph | b8dbfdcfdb81579181a382311649d166b04c768e | [
"MIT"
] | 4 | 2020-10-03T13:14:56.000Z | 2021-06-09T03:31:20.000Z | """Graph data strucutre implementations.
"""
from cygraph.graph_.dynamic_graph import DynamicGraph
from cygraph.graph_.static_graph import StaticGraph
from cygraph.graph_.graph import Graph
| 27.285714 | 53 | 0.837696 | 24 | 191 | 6.458333 | 0.458333 | 0.212903 | 0.309677 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094241 | 191 | 6 | 54 | 31.833333 | 0.895954 | 0.193717 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
31220511025bdf8a6ef052ce48002dd0c7cf8b6c | 134 | py | Python | test/iotest/basic/std-out-err.py | gwk/glossy | 6976ca4fd1efc09d9cd670b1fe37817c05b4b529 | [
"CC0-1.0"
] | 7 | 2019-05-04T00:51:38.000Z | 2021-12-10T15:36:31.000Z | test/iotest/basic/std-out-err.py | gwk/glossy | 6976ca4fd1efc09d9cd670b1fe37817c05b4b529 | [
"CC0-1.0"
] | 1 | 2016-08-12T19:09:43.000Z | 2016-08-12T19:09:43.000Z | test/basic/std-out-err.py | gwk/iotest | bb5386c8d2e96cf99ca840fc512008ef786c4805 | [
"CC0-1.0"
] | 1 | 2016-07-30T22:38:08.000Z | 2016-07-30T22:38:08.000Z | #!/usr/bin/env python3
from sys import stderr, stdout
print('this is std out.', file=stdout)
print('this is std err.', file=stderr)
| 19.142857 | 38 | 0.708955 | 23 | 134 | 4.130435 | 0.695652 | 0.231579 | 0.315789 | 0.357895 | 0.421053 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008696 | 0.141791 | 134 | 6 | 39 | 22.333333 | 0.817391 | 0.156716 | 0 | 0 | 0 | 0 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0.666667 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 8 |
315a2405d31ee763959d4f12bd19e0e8e00cadfa | 5,831 | py | Python | pynos/versions/ver_6/ver_6_0_1/yang/brocade_openflow.py | bdeetz/pynos | bd8a34e98f322de3fc06750827d8bbc3a0c00380 | [
"Apache-2.0"
] | 84 | 2017-10-25T15:49:21.000Z | 2021-11-28T21:25:54.000Z | data/train/python/315a2405d31ee763959d4f12bd19e0e8e00cadfabrocade_openflow.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 10 | 2016-09-15T19:03:27.000Z | 2017-07-17T23:38:01.000Z | data/train/python/315a2405d31ee763959d4f12bd19e0e8e00cadfabrocade_openflow.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 24 | 2017-11-22T08:31:00.000Z | 2022-03-27T01:22:31.000Z | #!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_openflow(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def openflow_controller_controller_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
openflow_controller = ET.SubElement(config, "openflow-controller", xmlns="urn:brocade.com:mgmt:brocade-openflow")
controller_name = ET.SubElement(openflow_controller, "controller-name")
controller_name.text = kwargs.pop('controller_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def openflow_controller_connection_address_controller_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
openflow_controller = ET.SubElement(config, "openflow-controller", xmlns="urn:brocade.com:mgmt:brocade-openflow")
controller_name_key = ET.SubElement(openflow_controller, "controller-name")
controller_name_key.text = kwargs.pop('controller_name')
connection_address = ET.SubElement(openflow_controller, "connection-address")
controller_address = ET.SubElement(connection_address, "controller-address")
controller_address.text = kwargs.pop('controller_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def openflow_controller_connection_address_connection_method(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
openflow_controller = ET.SubElement(config, "openflow-controller", xmlns="urn:brocade.com:mgmt:brocade-openflow")
controller_name_key = ET.SubElement(openflow_controller, "controller-name")
controller_name_key.text = kwargs.pop('controller_name')
connection_address = ET.SubElement(openflow_controller, "connection-address")
connection_method = ET.SubElement(connection_address, "connection-method")
connection_method.text = kwargs.pop('connection_method')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def openflow_controller_connection_address_connection_port(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
openflow_controller = ET.SubElement(config, "openflow-controller", xmlns="urn:brocade.com:mgmt:brocade-openflow")
controller_name_key = ET.SubElement(openflow_controller, "controller-name")
controller_name_key.text = kwargs.pop('controller_name')
connection_address = ET.SubElement(openflow_controller, "connection-address")
connection_port = ET.SubElement(connection_address, "connection-port")
connection_port.text = kwargs.pop('connection_port')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def openflow_controller_controller_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
openflow_controller = ET.SubElement(config, "openflow-controller", xmlns="urn:brocade.com:mgmt:brocade-openflow")
controller_name = ET.SubElement(openflow_controller, "controller-name")
controller_name.text = kwargs.pop('controller_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def openflow_controller_connection_address_controller_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
openflow_controller = ET.SubElement(config, "openflow-controller", xmlns="urn:brocade.com:mgmt:brocade-openflow")
controller_name_key = ET.SubElement(openflow_controller, "controller-name")
controller_name_key.text = kwargs.pop('controller_name')
connection_address = ET.SubElement(openflow_controller, "connection-address")
controller_address = ET.SubElement(connection_address, "controller-address")
controller_address.text = kwargs.pop('controller_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def openflow_controller_connection_address_connection_method(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
openflow_controller = ET.SubElement(config, "openflow-controller", xmlns="urn:brocade.com:mgmt:brocade-openflow")
controller_name_key = ET.SubElement(openflow_controller, "controller-name")
controller_name_key.text = kwargs.pop('controller_name')
connection_address = ET.SubElement(openflow_controller, "connection-address")
connection_method = ET.SubElement(connection_address, "connection-method")
connection_method.text = kwargs.pop('connection_method')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def openflow_controller_connection_address_connection_port(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
openflow_controller = ET.SubElement(config, "openflow-controller", xmlns="urn:brocade.com:mgmt:brocade-openflow")
controller_name_key = ET.SubElement(openflow_controller, "controller-name")
controller_name_key.text = kwargs.pop('controller_name')
connection_address = ET.SubElement(openflow_controller, "connection-address")
connection_port = ET.SubElement(connection_address, "connection-port")
connection_port.text = kwargs.pop('connection_port')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 49.837607 | 121 | 0.698336 | 604 | 5,831 | 6.513245 | 0.06457 | 0.210473 | 0.097611 | 0.106762 | 0.965938 | 0.965938 | 0.965938 | 0.965938 | 0.965938 | 0.965938 | 0 | 0 | 0.190533 | 5,831 | 117 | 122 | 49.837607 | 0.833475 | 0.047848 | 0 | 0.948718 | 1 | 0 | 0.202651 | 0.05375 | 0 | 0 | 0 | 0 | 0 | 1 | 0.115385 | false | 0 | 0.012821 | 0 | 0.24359 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
315c9a1add3e6a8633870d3976b9c121256ed6fd | 38 | py | Python | b.py | DILIREBALI/test | 386c6c5466ab48f596f85b411db23b4f6b0e5a82 | [
"Apache-2.0"
] | null | null | null | b.py | DILIREBALI/test | 386c6c5466ab48f596f85b411db23b4f6b0e5a82 | [
"Apache-2.0"
] | null | null | null | b.py | DILIREBALI/test | 386c6c5466ab48f596f85b411db23b4f6b0e5a82 | [
"Apache-2.0"
] | null | null | null | def like():
pass
return 'like'
| 9.5 | 16 | 0.552632 | 5 | 38 | 4.2 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.315789 | 38 | 3 | 17 | 12.666667 | 0.807692 | 0 | 0 | 0 | 0 | 0 | 0.114286 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0.333333 | 0 | 0 | 0.666667 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 7 |
317e9c2461130b2becd6630c350610a3150b9e78 | 23,765 | py | Python | game.py | XiangSshou/AI_plays_snake | 55df9308cac8f28f07a8f00b9420d77eb3c0eaab | [
"MIT"
] | null | null | null | game.py | XiangSshou/AI_plays_snake | 55df9308cac8f28f07a8f00b9420d77eb3c0eaab | [
"MIT"
] | null | null | null | game.py | XiangSshou/AI_plays_snake | 55df9308cac8f28f07a8f00b9420d77eb3c0eaab | [
"MIT"
] | null | null | null | import pygame
import colors as col
import pickle
from Arena import *
from snake import *
import time
import argparse
from input import *
if __name__ == "__main__":
# command line argument parser
ap = argparse.ArgumentParser()
ap.add_argument('-i', '--input', required=True, help='relative path of the saved pickle file')
ap.add_argument('-s', '--start', type=int, help='relative start of the saved snakes')
ap.add_argument('-v', '--vsAI', action="store_true", help='Play with AI')
ap.add_argument('-f', '--fool', action="store_true", help='Use baseline opponent')
ap.add_argument('-t', '--test', action="store_true", help='Test mode')
args = vars(ap.parse_args())
# loading the saved snakes
if args['test']:
file = open(args['input'], 'rb')
snakes = pickle.load(file)
generation = 0
start = 0;
if args['start'] is not None:
start = args['start']
generation += start
file.close()
# pygame initialization
pygame.init()
pygame.font.init()
myfont = pygame.font.SysFont('Bitstream Vera Serif', 20)
screen = pygame.display.set_mode((width, height))
# seed generated so that each snake sees same set of foods for performance comparison
arena = Arena(width, height, block_length)
win1 = 0
win2 = 0
for i in range(100):
text = 'Generation : '+('fool' if args['fool'] else str(generation))+' vs '+str(generation+1)
pygame.display.set_caption(text)
seed = random.random()
t_snake = snake(width, height, brainLayer, block_length,
random_weights=False, random_bases=False)
t_snake.Brain.weights = snakes[start].Brain.weights
t_snake.Brain.bases = snakes[start].Brain.bases
t_snake2 = snake(width, height, brainLayer, block_length, head_x=width-30, head_y=height-30,
random_weights=False, random_bases=False)
t_snake2.Brain.weights = snakes[start].Brain.weights
t_snake2.Brain.bases = snakes[start].Brain.bases
random.seed(seed)
nextFood = arena.newFood(t_snake.list)
t_snake.Brain.setNextFood(nextFood)
t_snake2.Brain.setNextFood(nextFood)
screen = arena.setup(screen, col.bg, col.gray)
screen = arena.drawFood(screen, col.food)
screen = t_snake.draw(screen, col.snake1)
screen = t_snake2.draw(screen, col.snake2)
pygame.display.update()
# checkloop = False
while t_snake.isAlive() and t_snake2.isAlive():
# checking for key presses and close button presses and pause-continue funcionality
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_p:
pressed = True
while pressed:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_c:
pressed = False
if event.type == pygame.KEYDOWN and event.key == pygame.K_q:
t_snake.crash_wall = True
t_snake.crash_body = True
if event.type == pygame.QUIT:
pygame.quit()
quit()
# getting result from the neural network
if not args['fool']:
result = t_snake.Brain.decision_from_nn(t_snake.head_x, t_snake.head_y, t_snake.list, t_snake2.list, t_snake.direction)
else:
result = t_snake.Brain.decision_from_fool(t_snake.head_x, t_snake.head_y, t_snake.list, t_snake2.list, t_snake.direction)
# moving the snake
# print(result)
alive = t_snake.move(result, t_snake2)
if not alive:
t_snake.score *= 0.7
if t_snake.crash_wall and t_snake.crash_body:
print('killed. Score : Snake[1]', t_snake.score,':',t_snake2.score,'Snake[2]')
elif t_snake.crash_wall and not t_snake.crash_body:
print('Snake[1] crashed on wall,', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
elif t_snake.crash_snake:
print('Snake[1] crashed on Snake[1],', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
else:
print('Snake[1] crashed on body,', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
time.sleep(2)
break
if (t_snake.head_x, t_snake.head_y) == arena.food:
t_snake.steps_taken = 0
t_snake.toIncrease = True
t_snake.score += 10
nextFood = arena.newFood(t_snake.list + t_snake2.list)
t_snake.Brain.setNextFood(nextFood)
t_snake2.Brain.setNextFood(nextFood)
screen = arena.setup(screen, col.bg, col.gray)
screen = arena.drawFood(screen, col.food)
screen = t_snake.draw(screen, col.snake1)
screen = t_snake2.draw(screen, col.snake2)
pygame.display.update()
# getting result from the neural network
result = t_snake2.Brain.decision_from_nn(
t_snake2.head_x, t_snake2.head_y, t_snake2.list, t_snake.list, t_snake2.direction)
# moving the snake
# print(result)
alive = t_snake2.move(result, t_snake)
if not alive:
t_snake2.score *= 0.7
if t_snake2.crash_wall and t_snake2.crash_body:
print('killed. Score : Snake[1]', t_snake.score,':',t_snake2.score,'Snake[2]')
elif t_snake2.crash_wall and not t_snake2.crash_body:
print('Snake[2] crashed on wall,', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
elif t_snake2.crash_snake:
print('Snake[2] crashed on Snake[1],', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
else:
print('Snake[2] crashed on body,', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
time.sleep(2)
break
if (t_snake2.head_x, t_snake2.head_y) == arena.food:
t_snake2.steps_taken = 0
t_snake2.toIncrease = True
t_snake2.score += 10
nextFood = arena.newFood(t_snake.list + t_snake2.list)
t_snake.Brain.setNextFood(nextFood)
t_snake2.Brain.setNextFood(nextFood)
screen = arena.setup(screen, col.bg, col.gray)
screen = arena.drawFood(screen, col.food)
screen = t_snake.draw(screen, col.snake1)
screen = t_snake2.draw(screen, col.snake2)
pygame.display.update()
if t_snake.score<t_snake2.score:
win2+=1
else:
win1+=1
print("Baseline wins", win1, "AI wins", win2)
pygame.quit()
quit()
elif not args['vsAI']:
file = open(args['input'], 'rb')
snakes = pickle.load(file)
generation = 0
if args['start'] is not None:
start = args['start']
snakes = snakes[start:]
generation += start
file.close()
# pygame initialization
pygame.init()
pygame.font.init()
myfont = pygame.font.SysFont('Bitstream Vera Serif', 20)
screen = pygame.display.set_mode((width, height))
# seed generated so that each snake sees same set of foods for performance comparison
seed = random.random()
arena = Arena(width, height, block_length)
for i in range(len(snakes)-1):
text = 'Generation : '+('fool' if args['fool'] else str(generation))+' vs '+str(generation+1)
pygame.display.set_caption(text)
print('---- Snake[1]: Generation:', 'fool' if args['fool'] else generation,' vs Snake[2]: Generation:', generation+1,'----')
t_snake = snake(width, height, brainLayer, block_length,
random_weights=False, random_bases=False)
t_snake.Brain.weights = snakes[i].Brain.weights
t_snake.Brain.bases = snakes[i].Brain.bases
t_snake2 = snake(width, height, brainLayer, block_length, head_x=width-30, head_y=height-30,
random_weights=False, random_bases=False)
t_snake2.Brain.weights = snakes[i+1].Brain.weights
t_snake2.Brain.bases = snakes[i+1].Brain.bases
random.seed(seed)
nextFood = arena.newFood(t_snake.list)
t_snake.Brain.setNextFood(nextFood)
t_snake2.Brain.setNextFood(nextFood)
screen = arena.setup(screen, col.bg, col.gray)
screen = arena.drawFood(screen, col.food)
screen = t_snake.draw(screen, col.snake1)
screen = t_snake2.draw(screen, col.snake2)
pygame.display.update()
# checkloop = False
while t_snake.isAlive() and t_snake2.isAlive():
# checking for key presses and close button presses and pause-continue funcionality
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_p:
pressed = True
while pressed:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_c:
pressed = False
if event.type == pygame.KEYDOWN and event.key == pygame.K_q:
t_snake.crash_wall = True
t_snake.crash_body = True
if event.type == pygame.QUIT:
pygame.quit()
quit()
# getting result from the neural network
if not args['fool']:
result = t_snake.Brain.decision_from_nn(t_snake.head_x, t_snake.head_y, t_snake.list, t_snake2.list, t_snake.direction)
else:
result = t_snake.Brain.decision_from_fool(t_snake.head_x, t_snake.head_y, t_snake.list, t_snake2.list, t_snake.direction)
# moving the snake
# print(result)
alive = t_snake.move(result, t_snake2)
# # checking for loops made by snake
# if t_snake.steps_taken > (len(t_snake.list)/5*100):
# if not checkloop:
# checkloop = True
# any_point = (t_snake.head_x, t_snake.head_y)
# times = 0
# if (t_snake.head_x, t_snake.head_y) == any_point:
# times += 1
# if times > 4:
# t_snake.crash_wall = True
# t_snake.crash_body = True
# alive = False
# else:
# checkloop = False
if not alive:
t_snake.score *= 0.7
if t_snake.crash_wall and t_snake.crash_body:
print('killed. Score : Snake[1]', t_snake.score,':',t_snake2.score,'Snake[2]')
elif t_snake.crash_wall and not t_snake.crash_body:
print('Snake[1] crashed on wall,', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
elif t_snake.crash_snake:
print('Snake[1] crashed on Snake[1],', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
else:
print('Snake[1] crashed on body,', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
time.sleep(2)
break
if (t_snake.head_x, t_snake.head_y) == arena.food:
t_snake.steps_taken = 0
t_snake.toIncrease = True
t_snake.score += 10
nextFood = arena.newFood(t_snake.list + t_snake2.list)
t_snake.Brain.setNextFood(nextFood)
t_snake2.Brain.setNextFood(nextFood)
screen = arena.setup(screen, col.bg, col.gray)
screen = arena.drawFood(screen, col.food)
screen = t_snake.draw(screen, col.snake1)
screen = t_snake2.draw(screen, col.snake2)
pygame.display.update()
# getting result from the neural network
result = t_snake2.Brain.decision_from_nn(
t_snake2.head_x, t_snake2.head_y, t_snake2.list, t_snake.list, t_snake2.direction)
# moving the snake
# print(result)
alive = t_snake2.move(result, t_snake)
# # checking for loops made by snake
# if t_snake2.steps_taken > (len(t_snake2.list)/5*100):
# if not checkloop:
# checkloop = True
# any_point = (t_snake2.head_x, t_snake2.head_y)
# times = 0
# if (t_snake2.head_x, t_snake2.head_y) == any_point:
# times += 1
# if times > 4:
# t_snake2.crash_wall = True
# t_snake2.crash_body = True
# alive = False
# else:
# checkloop = False
if not alive:
t_snake2.score *= 0.7
if t_snake2.crash_wall and t_snake2.crash_body:
print('killed. Score : Snake[1]', t_snake.score,':',t_snake2.score,'Snake[2]')
elif t_snake2.crash_wall and not t_snake2.crash_body:
print('Snake[2] crashed on wall,', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
elif t_snake2.crash_snake:
print('Snake[2] crashed on Snake[1],', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
else:
print('Snake[2] crashed on body,', 'Score : Snake[1]', int(t_snake.score),':',int(t_snake2.score),'Snake[2]')
time.sleep(2)
break
if (t_snake2.head_x, t_snake2.head_y) == arena.food:
t_snake2.steps_taken = 0
t_snake2.toIncrease = True
t_snake2.score += 10
nextFood = arena.newFood(t_snake.list + t_snake2.list)
t_snake.Brain.setNextFood(nextFood)
t_snake2.Brain.setNextFood(nextFood)
screen = arena.setup(screen, col.bg, col.gray)
screen = arena.drawFood(screen, col.food)
screen = t_snake.draw(screen, col.snake1)
screen = t_snake2.draw(screen, col.snake2)
pygame.display.update()
time.sleep(0.03)
generation += 1
pygame.quit()
quit()
else:
file = open(args['input'], 'rb')
snakes = pickle.load(file)
generation = 0
if args['start'] is not None:
start = args['start']
generation += start
file.close()
# pygame initialization
pygame.init()
pygame.font.init()
myfont = pygame.font.SysFont('Bitstream Vera Serif', 20)
screen = pygame.display.set_mode((width, height))
life = 1
arena = Arena(width, height, block_length)
while True:
text = 'YOU vs Generation: '+str(generation+1)
pygame.display.set_caption(text)
seed = random.random()
t_snake = snake(width, height, brainLayer, block_length, head_x=width-30, head_y=height-30,
random_weights=False, random_bases=False)
t_snake.Brain.weights = snakes[generation].Brain.weights
t_snake.Brain.bases = snakes[generation].Brain.bases
t_snake2 = snake(width, height, brainLayer, block_length,
random_weights=False, random_bases=False)
random.seed(seed)
nextFood = arena.newFood(t_snake.list + t_snake2.list)
t_snake.Brain.setNextFood(nextFood)
screen = arena.setup(screen, col.bg, col.gray)
screen = arena.drawFood(screen, col.food)
screen = t_snake.draw(screen, col.snake1)
screen = t_snake2.draw(screen, col.snake2)
pygame.display.update()
checkloop = False
while t_snake.isAlive() and t_snake2.isAlive():
# getting result from the neural network
result = t_snake.Brain.decision_from_nn(
t_snake.head_x, t_snake.head_y, t_snake.list, t_snake2.list, t_snake.direction)
# moving the snake
# print(result)
alive = t_snake.move(result, t_snake2)
# checking for loops made by snake
if t_snake.steps_taken > (len(t_snake.list)/5*100):
if not checkloop:
checkloop = True
any_point = (t_snake.head_x, t_snake.head_y)
times = 0
if (t_snake.head_x, t_snake.head_y) == any_point:
times += 1
if times > 4:
t_snake.crash_wall = True
t_snake.crash_body = True
alive = False
else:
checkloop = False
if not alive:
t_snake.score *= 0.7
if t_snake.crash_wall and t_snake.crash_body:
print('killed. Score : YOU', t_snake2.score,':',t_snake.score,'AI')
elif t_snake.crash_wall and not t_snake.crash_body:
print('AI crashed on wall,', 'Score : YOU', int(t_snake2.score),':',int(t_snake.score),'AI')
elif t_snake.crash_snake:
print('AI crashed on YOU,', 'Score : YOU', int(t_snake2.score),':',int(t_snake.score),'AI')
else:
print('AI crashed on body,', 'Score : YOU', int(t_snake2.score),':',int(t_snake.score),'AI')
time.sleep(2)
break
if (t_snake.head_x, t_snake.head_y) == arena.food:
t_snake.score += 10
t_snake.steps_taken = 0
t_snake.toIncrease = True
nextFood = arena.newFood(t_snake.list + t_snake2.list)
t_snake.Brain.setNextFood(nextFood)
screen = arena.setup(screen, col.bg, col.gray)
screen = arena.drawFood(screen, col.food)
screen = t_snake.draw(screen, col.snake1)
screen = t_snake2.draw(screen, col.snake2)
pygame.display.update()
pygame.display.update()
# checking for key presses and close button presses and pause-continue funcionality
# checking for key presses and close button presses and pause-continue funcionality
result = 1
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_p:
pressed = True
while pressed:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_c:
pressed = False
if event.type == pygame.KEYDOWN and event.key == pygame.K_q:
t_snake.crash_wall = True
t_snake.crash_body = True
if event.type == pygame.KEYDOWN and event.key == pygame.K_UP:
result = t_snake2.dirToRes('north');
pressed = False
if event.type == pygame.KEYDOWN and event.key == pygame.K_DOWN:
result = t_snake2.dirToRes('south');
pressed = False
if event.type == pygame.KEYDOWN and event.key == pygame.K_LEFT:
result = t_snake2.dirToRes('west');
pressed = False
if event.type == pygame.KEYDOWN and event.key == pygame.K_RIGHT:
result = t_snake2.dirToRes('east');
pressed = False
if event.type == pygame.QUIT:
pygame.quit()
quit()
alive = t_snake2.move(result, t_snake)
# checking for loops made by snake
if t_snake2.steps_taken > (len(t_snake2.list)/5*100):
if not checkloop:
checkloop = True
any_point = (t_snake2.head_x, t_snake2.head_y)
times = 0
if (t_snake2.head_x, t_snake2.head_y) == any_point:
times += 1
if times > 4:
t_snake2.crash_wall = True
t_snake2.crash_body = True
alive = False
else:
checkloop = False
if not alive:
t_snake2.score *= 0.7
if t_snake2.crash_wall and t_snake2.crash_body:
print('killed. Score : YOU', t_snake2.score,':',t_snake.score,'AI')
elif t_snake2.crash_wall and not t_snake2.crash_body:
print('YOU crashed on wall,', 'Score : YOU', int(t_snake2.score),':',int(t_snake.score),'AI')
elif t_snake2.crash_snake:
print('YOU crashed on AI,', 'Score : YOU', int(t_snake2.score),':',int(t_snake.score),'AI')
else:
print('YOU crashed on body,', 'Score : YOU', int(t_snake2.score),':',int(t_snake.score),'AI')
time.sleep(2)
break
if (t_snake2.head_x, t_snake2.head_y) == arena.food:
t_snake2.score += 10
t_snake2.steps_taken = 0
t_snake2.toIncrease = True
nextFood = arena.newFood(t_snake.list + t_snake2.list)
t_snake.Brain.setNextFood(nextFood)
screen = arena.setup(screen, col.bg, col.gray)
screen = arena.drawFood(screen, col.food)
screen = t_snake.draw(screen, col.snake1)
screen = t_snake2.draw(screen, col.snake2)
pygame.display.update()
pygame.display.update()
time.sleep(0.1)
life += 1
pygame.quit()
quit()
| 52.461369 | 141 | 0.513697 | 2,742 | 23,765 | 4.283005 | 0.071116 | 0.077657 | 0.029036 | 0.021458 | 0.916298 | 0.916298 | 0.898501 | 0.883004 | 0.875426 | 0.871339 | 0 | 0.021387 | 0.382201 | 23,765 | 452 | 142 | 52.577434 | 0.778504 | 0.079992 | 0 | 0.808901 | 0 | 0 | 0.068339 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.020942 | 0 | 0.020942 | 0.068063 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
31ac4040ce0205a48ef194b18305ff8a774f7e3f | 35 | py | Python | python/testData/inspections/RedundantParenthesesInTuples_after.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/inspections/RedundantParenthesesInTuples_after.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/inspections/RedundantParenthesesInTuples_after.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | print("%d%s%s" % ((1,) + ("", ""))) | 35 | 35 | 0.257143 | 5 | 35 | 1.8 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033333 | 0.142857 | 35 | 1 | 35 | 35 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
9ecf409354659e1ec41b274ab3f994a61a81e93a | 45,031 | py | Python | sdk/python/pulumi_azure/network/firewall.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 109 | 2018-06-18T00:19:44.000Z | 2022-02-20T05:32:57.000Z | sdk/python/pulumi_azure/network/firewall.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 663 | 2018-06-18T21:08:46.000Z | 2022-03-31T20:10:11.000Z | sdk/python/pulumi_azure/network/firewall.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 41 | 2018-07-19T22:37:38.000Z | 2022-03-14T10:56:26.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['FirewallArgs', 'Firewall']
@pulumi.input_type
class FirewallArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
firewall_policy_id: Optional[pulumi.Input[str]] = None,
ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input['FirewallIpConfigurationArgs']]]] = None,
location: Optional[pulumi.Input[str]] = None,
management_ip_configuration: Optional[pulumi.Input['FirewallManagementIpConfigurationArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
sku_tier: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
threat_intel_mode: Optional[pulumi.Input[str]] = None,
virtual_hub: Optional[pulumi.Input['FirewallVirtualHubArgs']] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Firewall resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the resource. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: A list of DNS servers that the Azure Firewall will direct DNS traffic to the for name resolution.
:param pulumi.Input[str] firewall_policy_id: The ID of the Firewall Policy applied to this Firewall.
:param pulumi.Input[Sequence[pulumi.Input['FirewallIpConfigurationArgs']]] ip_configurations: An `ip_configuration` block as documented below.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input['FirewallManagementIpConfigurationArgs'] management_ip_configuration: A `management_ip_configuration` block as documented below, which allows force-tunnelling of traffic to be performed by the firewall. Adding or removing this block or changing the `subnet_id` in an existing block forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Firewall. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] private_ip_ranges: A list of SNAT private CIDR IP ranges, or the special string `IANAPrivateRanges`, which indicates Azure Firewall does not SNAT when the destination IP address is a private range per IANA RFC 1918.
:param pulumi.Input[str] sku_name: Sku name of the Firewall. Possible values are `AZFW_Hub` and `AZFW_VNet`. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_tier: Sku tier of the Firewall. Possible values are `Premium` and `Standard`. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] threat_intel_mode: The operation mode for threat intelligence-based filtering. Possible values are: `Off`, `Alert`,`Deny` and `""`(empty string). Defaults to `Alert`.
:param pulumi.Input['FirewallVirtualHubArgs'] virtual_hub: A `virtual_hub` block as documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: Specifies the availability zones in which the Azure Firewall should be created. Changing this forces a new resource to be created.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if dns_servers is not None:
pulumi.set(__self__, "dns_servers", dns_servers)
if firewall_policy_id is not None:
pulumi.set(__self__, "firewall_policy_id", firewall_policy_id)
if ip_configurations is not None:
pulumi.set(__self__, "ip_configurations", ip_configurations)
if location is not None:
pulumi.set(__self__, "location", location)
if management_ip_configuration is not None:
pulumi.set(__self__, "management_ip_configuration", management_ip_configuration)
if name is not None:
pulumi.set(__self__, "name", name)
if private_ip_ranges is not None:
pulumi.set(__self__, "private_ip_ranges", private_ip_ranges)
if sku_name is not None:
pulumi.set(__self__, "sku_name", sku_name)
if sku_tier is not None:
pulumi.set(__self__, "sku_tier", sku_tier)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if threat_intel_mode is not None:
pulumi.set(__self__, "threat_intel_mode", threat_intel_mode)
if virtual_hub is not None:
pulumi.set(__self__, "virtual_hub", virtual_hub)
if zones is not None:
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which to create the resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of DNS servers that the Azure Firewall will direct DNS traffic to the for name resolution.
"""
return pulumi.get(self, "dns_servers")
@dns_servers.setter
def dns_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "dns_servers", value)
@property
@pulumi.getter(name="firewallPolicyId")
def firewall_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Firewall Policy applied to this Firewall.
"""
return pulumi.get(self, "firewall_policy_id")
@firewall_policy_id.setter
def firewall_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "firewall_policy_id", value)
@property
@pulumi.getter(name="ipConfigurations")
def ip_configurations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['FirewallIpConfigurationArgs']]]]:
"""
An `ip_configuration` block as documented below.
"""
return pulumi.get(self, "ip_configurations")
@ip_configurations.setter
def ip_configurations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['FirewallIpConfigurationArgs']]]]):
pulumi.set(self, "ip_configurations", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="managementIpConfiguration")
def management_ip_configuration(self) -> Optional[pulumi.Input['FirewallManagementIpConfigurationArgs']]:
"""
A `management_ip_configuration` block as documented below, which allows force-tunnelling of traffic to be performed by the firewall. Adding or removing this block or changing the `subnet_id` in an existing block forces a new resource to be created.
"""
return pulumi.get(self, "management_ip_configuration")
@management_ip_configuration.setter
def management_ip_configuration(self, value: Optional[pulumi.Input['FirewallManagementIpConfigurationArgs']]):
pulumi.set(self, "management_ip_configuration", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Firewall. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="privateIpRanges")
def private_ip_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of SNAT private CIDR IP ranges, or the special string `IANAPrivateRanges`, which indicates Azure Firewall does not SNAT when the destination IP address is a private range per IANA RFC 1918.
"""
return pulumi.get(self, "private_ip_ranges")
@private_ip_ranges.setter
def private_ip_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "private_ip_ranges", value)
@property
@pulumi.getter(name="skuName")
def sku_name(self) -> Optional[pulumi.Input[str]]:
"""
Sku name of the Firewall. Possible values are `AZFW_Hub` and `AZFW_VNet`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sku_name")
@sku_name.setter
def sku_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sku_name", value)
@property
@pulumi.getter(name="skuTier")
def sku_tier(self) -> Optional[pulumi.Input[str]]:
"""
Sku tier of the Firewall. Possible values are `Premium` and `Standard`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sku_tier")
@sku_tier.setter
def sku_tier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sku_tier", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="threatIntelMode")
def threat_intel_mode(self) -> Optional[pulumi.Input[str]]:
"""
The operation mode for threat intelligence-based filtering. Possible values are: `Off`, `Alert`,`Deny` and `""`(empty string). Defaults to `Alert`.
"""
return pulumi.get(self, "threat_intel_mode")
@threat_intel_mode.setter
def threat_intel_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "threat_intel_mode", value)
@property
@pulumi.getter(name="virtualHub")
def virtual_hub(self) -> Optional[pulumi.Input['FirewallVirtualHubArgs']]:
"""
A `virtual_hub` block as documented below.
"""
return pulumi.get(self, "virtual_hub")
@virtual_hub.setter
def virtual_hub(self, value: Optional[pulumi.Input['FirewallVirtualHubArgs']]):
pulumi.set(self, "virtual_hub", value)
@property
@pulumi.getter
def zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Specifies the availability zones in which the Azure Firewall should be created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "zones")
@zones.setter
def zones(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "zones", value)
@pulumi.input_type
class _FirewallState:
def __init__(__self__, *,
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
firewall_policy_id: Optional[pulumi.Input[str]] = None,
ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input['FirewallIpConfigurationArgs']]]] = None,
location: Optional[pulumi.Input[str]] = None,
management_ip_configuration: Optional[pulumi.Input['FirewallManagementIpConfigurationArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
sku_tier: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
threat_intel_mode: Optional[pulumi.Input[str]] = None,
virtual_hub: Optional[pulumi.Input['FirewallVirtualHubArgs']] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Firewall resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: A list of DNS servers that the Azure Firewall will direct DNS traffic to the for name resolution.
:param pulumi.Input[str] firewall_policy_id: The ID of the Firewall Policy applied to this Firewall.
:param pulumi.Input[Sequence[pulumi.Input['FirewallIpConfigurationArgs']]] ip_configurations: An `ip_configuration` block as documented below.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input['FirewallManagementIpConfigurationArgs'] management_ip_configuration: A `management_ip_configuration` block as documented below, which allows force-tunnelling of traffic to be performed by the firewall. Adding or removing this block or changing the `subnet_id` in an existing block forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Firewall. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] private_ip_ranges: A list of SNAT private CIDR IP ranges, or the special string `IANAPrivateRanges`, which indicates Azure Firewall does not SNAT when the destination IP address is a private range per IANA RFC 1918.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_name: Sku name of the Firewall. Possible values are `AZFW_Hub` and `AZFW_VNet`. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_tier: Sku tier of the Firewall. Possible values are `Premium` and `Standard`. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] threat_intel_mode: The operation mode for threat intelligence-based filtering. Possible values are: `Off`, `Alert`,`Deny` and `""`(empty string). Defaults to `Alert`.
:param pulumi.Input['FirewallVirtualHubArgs'] virtual_hub: A `virtual_hub` block as documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: Specifies the availability zones in which the Azure Firewall should be created. Changing this forces a new resource to be created.
"""
if dns_servers is not None:
pulumi.set(__self__, "dns_servers", dns_servers)
if firewall_policy_id is not None:
pulumi.set(__self__, "firewall_policy_id", firewall_policy_id)
if ip_configurations is not None:
pulumi.set(__self__, "ip_configurations", ip_configurations)
if location is not None:
pulumi.set(__self__, "location", location)
if management_ip_configuration is not None:
pulumi.set(__self__, "management_ip_configuration", management_ip_configuration)
if name is not None:
pulumi.set(__self__, "name", name)
if private_ip_ranges is not None:
pulumi.set(__self__, "private_ip_ranges", private_ip_ranges)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if sku_name is not None:
pulumi.set(__self__, "sku_name", sku_name)
if sku_tier is not None:
pulumi.set(__self__, "sku_tier", sku_tier)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if threat_intel_mode is not None:
pulumi.set(__self__, "threat_intel_mode", threat_intel_mode)
if virtual_hub is not None:
pulumi.set(__self__, "virtual_hub", virtual_hub)
if zones is not None:
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of DNS servers that the Azure Firewall will direct DNS traffic to the for name resolution.
"""
return pulumi.get(self, "dns_servers")
@dns_servers.setter
def dns_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "dns_servers", value)
@property
@pulumi.getter(name="firewallPolicyId")
def firewall_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Firewall Policy applied to this Firewall.
"""
return pulumi.get(self, "firewall_policy_id")
@firewall_policy_id.setter
def firewall_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "firewall_policy_id", value)
@property
@pulumi.getter(name="ipConfigurations")
def ip_configurations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['FirewallIpConfigurationArgs']]]]:
"""
An `ip_configuration` block as documented below.
"""
return pulumi.get(self, "ip_configurations")
@ip_configurations.setter
def ip_configurations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['FirewallIpConfigurationArgs']]]]):
pulumi.set(self, "ip_configurations", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="managementIpConfiguration")
def management_ip_configuration(self) -> Optional[pulumi.Input['FirewallManagementIpConfigurationArgs']]:
"""
A `management_ip_configuration` block as documented below, which allows force-tunnelling of traffic to be performed by the firewall. Adding or removing this block or changing the `subnet_id` in an existing block forces a new resource to be created.
"""
return pulumi.get(self, "management_ip_configuration")
@management_ip_configuration.setter
def management_ip_configuration(self, value: Optional[pulumi.Input['FirewallManagementIpConfigurationArgs']]):
pulumi.set(self, "management_ip_configuration", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Firewall. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="privateIpRanges")
def private_ip_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of SNAT private CIDR IP ranges, or the special string `IANAPrivateRanges`, which indicates Azure Firewall does not SNAT when the destination IP address is a private range per IANA RFC 1918.
"""
return pulumi.get(self, "private_ip_ranges")
@private_ip_ranges.setter
def private_ip_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "private_ip_ranges", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which to create the resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="skuName")
def sku_name(self) -> Optional[pulumi.Input[str]]:
"""
Sku name of the Firewall. Possible values are `AZFW_Hub` and `AZFW_VNet`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sku_name")
@sku_name.setter
def sku_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sku_name", value)
@property
@pulumi.getter(name="skuTier")
def sku_tier(self) -> Optional[pulumi.Input[str]]:
"""
Sku tier of the Firewall. Possible values are `Premium` and `Standard`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sku_tier")
@sku_tier.setter
def sku_tier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sku_tier", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="threatIntelMode")
def threat_intel_mode(self) -> Optional[pulumi.Input[str]]:
"""
The operation mode for threat intelligence-based filtering. Possible values are: `Off`, `Alert`,`Deny` and `""`(empty string). Defaults to `Alert`.
"""
return pulumi.get(self, "threat_intel_mode")
@threat_intel_mode.setter
def threat_intel_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "threat_intel_mode", value)
@property
@pulumi.getter(name="virtualHub")
def virtual_hub(self) -> Optional[pulumi.Input['FirewallVirtualHubArgs']]:
"""
A `virtual_hub` block as documented below.
"""
return pulumi.get(self, "virtual_hub")
@virtual_hub.setter
def virtual_hub(self, value: Optional[pulumi.Input['FirewallVirtualHubArgs']]):
pulumi.set(self, "virtual_hub", value)
@property
@pulumi.getter
def zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Specifies the availability zones in which the Azure Firewall should be created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "zones")
@zones.setter
def zones(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "zones", value)
class Firewall(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
firewall_policy_id: Optional[pulumi.Input[str]] = None,
ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FirewallIpConfigurationArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
management_ip_configuration: Optional[pulumi.Input[pulumi.InputType['FirewallManagementIpConfigurationArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
sku_tier: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
threat_intel_mode: Optional[pulumi.Input[str]] = None,
virtual_hub: Optional[pulumi.Input[pulumi.InputType['FirewallVirtualHubArgs']]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages an Azure Firewall.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_virtual_network = azure.network.VirtualNetwork("exampleVirtualNetwork",
address_spaces=["10.0.0.0/16"],
location=example_resource_group.location,
resource_group_name=example_resource_group.name)
example_subnet = azure.network.Subnet("exampleSubnet",
resource_group_name=example_resource_group.name,
virtual_network_name=example_virtual_network.name,
address_prefixes=["10.0.1.0/24"])
example_public_ip = azure.network.PublicIp("examplePublicIp",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
allocation_method="Static",
sku="Standard")
example_firewall = azure.network.Firewall("exampleFirewall",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
ip_configurations=[azure.network.FirewallIpConfigurationArgs(
name="configuration",
subnet_id=example_subnet.id,
public_ip_address_id=example_public_ip.id,
)])
```
## Import
Azure Firewalls can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:network/firewall:Firewall example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/azureFirewalls/testfirewall
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: A list of DNS servers that the Azure Firewall will direct DNS traffic to the for name resolution.
:param pulumi.Input[str] firewall_policy_id: The ID of the Firewall Policy applied to this Firewall.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FirewallIpConfigurationArgs']]]] ip_configurations: An `ip_configuration` block as documented below.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['FirewallManagementIpConfigurationArgs']] management_ip_configuration: A `management_ip_configuration` block as documented below, which allows force-tunnelling of traffic to be performed by the firewall. Adding or removing this block or changing the `subnet_id` in an existing block forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Firewall. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] private_ip_ranges: A list of SNAT private CIDR IP ranges, or the special string `IANAPrivateRanges`, which indicates Azure Firewall does not SNAT when the destination IP address is a private range per IANA RFC 1918.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_name: Sku name of the Firewall. Possible values are `AZFW_Hub` and `AZFW_VNet`. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_tier: Sku tier of the Firewall. Possible values are `Premium` and `Standard`. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] threat_intel_mode: The operation mode for threat intelligence-based filtering. Possible values are: `Off`, `Alert`,`Deny` and `""`(empty string). Defaults to `Alert`.
:param pulumi.Input[pulumi.InputType['FirewallVirtualHubArgs']] virtual_hub: A `virtual_hub` block as documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: Specifies the availability zones in which the Azure Firewall should be created. Changing this forces a new resource to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: FirewallArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an Azure Firewall.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_virtual_network = azure.network.VirtualNetwork("exampleVirtualNetwork",
address_spaces=["10.0.0.0/16"],
location=example_resource_group.location,
resource_group_name=example_resource_group.name)
example_subnet = azure.network.Subnet("exampleSubnet",
resource_group_name=example_resource_group.name,
virtual_network_name=example_virtual_network.name,
address_prefixes=["10.0.1.0/24"])
example_public_ip = azure.network.PublicIp("examplePublicIp",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
allocation_method="Static",
sku="Standard")
example_firewall = azure.network.Firewall("exampleFirewall",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
ip_configurations=[azure.network.FirewallIpConfigurationArgs(
name="configuration",
subnet_id=example_subnet.id,
public_ip_address_id=example_public_ip.id,
)])
```
## Import
Azure Firewalls can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:network/firewall:Firewall example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/azureFirewalls/testfirewall
```
:param str resource_name: The name of the resource.
:param FirewallArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(FirewallArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
firewall_policy_id: Optional[pulumi.Input[str]] = None,
ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FirewallIpConfigurationArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
management_ip_configuration: Optional[pulumi.Input[pulumi.InputType['FirewallManagementIpConfigurationArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
sku_tier: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
threat_intel_mode: Optional[pulumi.Input[str]] = None,
virtual_hub: Optional[pulumi.Input[pulumi.InputType['FirewallVirtualHubArgs']]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = FirewallArgs.__new__(FirewallArgs)
__props__.__dict__["dns_servers"] = dns_servers
__props__.__dict__["firewall_policy_id"] = firewall_policy_id
__props__.__dict__["ip_configurations"] = ip_configurations
__props__.__dict__["location"] = location
__props__.__dict__["management_ip_configuration"] = management_ip_configuration
__props__.__dict__["name"] = name
__props__.__dict__["private_ip_ranges"] = private_ip_ranges
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["sku_name"] = sku_name
__props__.__dict__["sku_tier"] = sku_tier
__props__.__dict__["tags"] = tags
__props__.__dict__["threat_intel_mode"] = threat_intel_mode
__props__.__dict__["virtual_hub"] = virtual_hub
__props__.__dict__["zones"] = zones
super(Firewall, __self__).__init__(
'azure:network/firewall:Firewall',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
firewall_policy_id: Optional[pulumi.Input[str]] = None,
ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FirewallIpConfigurationArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
management_ip_configuration: Optional[pulumi.Input[pulumi.InputType['FirewallManagementIpConfigurationArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
sku_tier: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
threat_intel_mode: Optional[pulumi.Input[str]] = None,
virtual_hub: Optional[pulumi.Input[pulumi.InputType['FirewallVirtualHubArgs']]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Firewall':
"""
Get an existing Firewall resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: A list of DNS servers that the Azure Firewall will direct DNS traffic to the for name resolution.
:param pulumi.Input[str] firewall_policy_id: The ID of the Firewall Policy applied to this Firewall.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FirewallIpConfigurationArgs']]]] ip_configurations: An `ip_configuration` block as documented below.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['FirewallManagementIpConfigurationArgs']] management_ip_configuration: A `management_ip_configuration` block as documented below, which allows force-tunnelling of traffic to be performed by the firewall. Adding or removing this block or changing the `subnet_id` in an existing block forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Firewall. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] private_ip_ranges: A list of SNAT private CIDR IP ranges, or the special string `IANAPrivateRanges`, which indicates Azure Firewall does not SNAT when the destination IP address is a private range per IANA RFC 1918.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_name: Sku name of the Firewall. Possible values are `AZFW_Hub` and `AZFW_VNet`. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_tier: Sku tier of the Firewall. Possible values are `Premium` and `Standard`. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] threat_intel_mode: The operation mode for threat intelligence-based filtering. Possible values are: `Off`, `Alert`,`Deny` and `""`(empty string). Defaults to `Alert`.
:param pulumi.Input[pulumi.InputType['FirewallVirtualHubArgs']] virtual_hub: A `virtual_hub` block as documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: Specifies the availability zones in which the Azure Firewall should be created. Changing this forces a new resource to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _FirewallState.__new__(_FirewallState)
__props__.__dict__["dns_servers"] = dns_servers
__props__.__dict__["firewall_policy_id"] = firewall_policy_id
__props__.__dict__["ip_configurations"] = ip_configurations
__props__.__dict__["location"] = location
__props__.__dict__["management_ip_configuration"] = management_ip_configuration
__props__.__dict__["name"] = name
__props__.__dict__["private_ip_ranges"] = private_ip_ranges
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["sku_name"] = sku_name
__props__.__dict__["sku_tier"] = sku_tier
__props__.__dict__["tags"] = tags
__props__.__dict__["threat_intel_mode"] = threat_intel_mode
__props__.__dict__["virtual_hub"] = virtual_hub
__props__.__dict__["zones"] = zones
return Firewall(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of DNS servers that the Azure Firewall will direct DNS traffic to the for name resolution.
"""
return pulumi.get(self, "dns_servers")
@property
@pulumi.getter(name="firewallPolicyId")
def firewall_policy_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the Firewall Policy applied to this Firewall.
"""
return pulumi.get(self, "firewall_policy_id")
@property
@pulumi.getter(name="ipConfigurations")
def ip_configurations(self) -> pulumi.Output[Optional[Sequence['outputs.FirewallIpConfiguration']]]:
"""
An `ip_configuration` block as documented below.
"""
return pulumi.get(self, "ip_configurations")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="managementIpConfiguration")
def management_ip_configuration(self) -> pulumi.Output[Optional['outputs.FirewallManagementIpConfiguration']]:
"""
A `management_ip_configuration` block as documented below, which allows force-tunnelling of traffic to be performed by the firewall. Adding or removing this block or changing the `subnet_id` in an existing block forces a new resource to be created.
"""
return pulumi.get(self, "management_ip_configuration")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Firewall. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateIpRanges")
def private_ip_ranges(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of SNAT private CIDR IP ranges, or the special string `IANAPrivateRanges`, which indicates Azure Firewall does not SNAT when the destination IP address is a private range per IANA RFC 1918.
"""
return pulumi.get(self, "private_ip_ranges")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which to create the resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="skuName")
def sku_name(self) -> pulumi.Output[str]:
"""
Sku name of the Firewall. Possible values are `AZFW_Hub` and `AZFW_VNet`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sku_name")
@property
@pulumi.getter(name="skuTier")
def sku_tier(self) -> pulumi.Output[str]:
"""
Sku tier of the Firewall. Possible values are `Premium` and `Standard`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sku_tier")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="threatIntelMode")
def threat_intel_mode(self) -> pulumi.Output[Optional[str]]:
"""
The operation mode for threat intelligence-based filtering. Possible values are: `Off`, `Alert`,`Deny` and `""`(empty string). Defaults to `Alert`.
"""
return pulumi.get(self, "threat_intel_mode")
@property
@pulumi.getter(name="virtualHub")
def virtual_hub(self) -> pulumi.Output[Optional['outputs.FirewallVirtualHub']]:
"""
A `virtual_hub` block as documented below.
"""
return pulumi.get(self, "virtual_hub")
@property
@pulumi.getter
def zones(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Specifies the availability zones in which the Azure Firewall should be created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "zones")
| 53.736277 | 364 | 0.681086 | 5,494 | 45,031 | 5.384055 | 0.048234 | 0.09334 | 0.068627 | 0.044625 | 0.942224 | 0.932421 | 0.924679 | 0.921771 | 0.920047 | 0.909263 | 0 | 0.003498 | 0.219227 | 45,031 | 837 | 365 | 53.800478 | 0.837822 | 0.402167 | 0 | 0.862661 | 1 | 0 | 0.125585 | 0.050073 | 0 | 0 | 0 | 0 | 0 | 1 | 0.165236 | false | 0.002146 | 0.015021 | 0 | 0.27897 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7342b164240fec71a956b8f48b220274a15924cb | 10,648 | py | Python | imperative/python/test/unit/functional/test_functional_distributed.py | jonrzhang/MegEngine | 94b72022156a068d3e87bceed7e1c7ae77dada16 | [
"Apache-2.0"
] | 1 | 2021-03-29T04:25:30.000Z | 2021-03-29T04:25:30.000Z | imperative/python/test/unit/functional/test_functional_distributed.py | jonrzhang/MegEngine | 94b72022156a068d3e87bceed7e1c7ae77dada16 | [
"Apache-2.0"
] | null | null | null | imperative/python/test/unit/functional/test_functional_distributed.py | jonrzhang/MegEngine | 94b72022156a068d3e87bceed7e1c7ae77dada16 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import platform
import numpy as np
import pytest
import megengine as mge
import megengine.distributed as dist
from megengine import Parameter, Tensor, tensor
from megengine.core._imperative_rt.core2 import sync
from megengine.device import get_default_device, set_default_device
from megengine.distributed.helper import get_device_count_by_fork
from megengine.functional.distributed import (
all_gather,
all_reduce_max,
all_reduce_min,
all_reduce_sum,
all_to_all,
broadcast,
gather,
reduce_scatter_sum,
reduce_sum,
remote_recv,
remote_send,
scatter,
)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_reduce_sum():
@dist.launcher(n_gpus=2)
def worker(data, expect):
rank = dist.get_rank()
inp = tensor(data[rank])
output = reduce_sum(inp)
if rank == 0:
assert np.allclose(output.numpy(), expect[rank])
else:
assert np.allclose(output.numpy(), 0)
def check(shape):
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = x + y
data = (x, y)
expect = (z, None)
worker(data, expect)
for shape in [(2, 3), (8, 10), (99, 77)]:
check(shape)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_broadcast():
@dist.launcher(n_gpus=2)
def worker(data, expect):
rank = dist.get_rank()
inp = tensor(data[rank])
output = broadcast(inp)
assert np.allclose(output.numpy(), expect[rank])
def check(shape):
x = np.random.rand(*shape).astype("float32")
y = x + 1
data = (x, y)
expect = (x, x)
worker(data, expect)
for shape in [(2, 3), (8, 10), (99, 77)]:
check(shape)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_all_gather():
@dist.launcher(n_gpus=2)
def worker(data, expect):
rank = dist.get_rank()
inp = tensor(data[rank])
output = all_gather(inp)
assert np.allclose(output.numpy(), expect[rank])
def check(shape):
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = np.concatenate((x, y))
data = (x, y)
expect = (z, z)
worker(data, expect)
for shape in [(2, 3), (8, 10), (99, 77)]:
check(shape)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_reduce_scatter_sum():
@dist.launcher(n_gpus=2)
def worker(data, expect):
rank = dist.get_rank()
inp = tensor(data[rank])
output = reduce_scatter_sum(inp)
assert np.allclose(output.numpy(), expect[rank])
def check(shape):
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = x + y
data = (x, y)
expect = (z[: shape[0] // 2], z[shape[0] // 2 :])
worker(data, expect)
for shape in [(2, 4), (8, 10), (88, 44)]:
check(shape)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_all_reduce_sum():
@dist.launcher(n_gpus=2)
def worker(data, expect):
rank = dist.get_rank()
inp = tensor(data[rank])
output = all_reduce_sum(inp)
assert np.allclose(output.numpy(), expect[rank])
def check(shape):
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = x + y
data = (x, y)
expect = (z, z)
worker(data, expect)
for shape in [(2, 3), (8, 10), (99, 77)]:
check(shape)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_all_reduce_max():
@dist.launcher(n_gpus=2)
def worker(data, expect):
rank = dist.get_rank()
inp = tensor(data[rank])
output = all_reduce_max(inp)
assert np.allclose(output.numpy(), expect[rank])
def check(shape):
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = np.maximum(x, y)
data = (x, y)
expect = (z, z)
worker(data, expect)
for shape in [(2, 3), (8, 10), (99, 77)]:
check(shape)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_all_reduce_min():
@dist.launcher(n_gpus=2)
def worker(data, expect):
rank = dist.get_rank()
inp = tensor(data[rank])
output = all_reduce_min(inp)
assert np.allclose(output.numpy(), expect[rank])
def check(shape):
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = np.minimum(x, y)
data = (x, y)
expect = (z, z)
worker(data, expect)
for shape in [(2, 3), (8, 10), (99, 77)]:
check(shape)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_gather():
@dist.launcher(n_gpus=2)
def worker(data, expect):
rank = dist.get_rank()
inp = tensor(data[rank])
output = gather(inp)
if rank == 0:
assert np.allclose(output.numpy(), expect[rank])
else:
assert np.allclose(output.numpy(), 0)
def check(shape):
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
z = np.concatenate((x, y))
data = (x, y)
expect = (z, None)
worker(data, expect)
for shape in [(2, 3), (8, 10), (99, 77)]:
check(shape)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_scatter():
@dist.launcher(n_gpus=2)
def worker(data, expect):
rank = dist.get_rank()
inp = tensor(data[rank])
output = scatter(inp)
assert np.allclose(output.numpy(), expect[rank])
def check(shape):
x = np.random.rand(*shape).astype("float32")
y = x + 1
data = (x, y)
expect = (x[: shape[0] // 2], x[shape[0] // 2 :])
worker(data, expect)
for shape in [(2, 3), (8, 10), (100, 77)]:
check(shape)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_all_to_all():
@dist.launcher(n_gpus=2)
def worker(data, expect):
rank = dist.get_rank()
inp = tensor(data[rank])
output = all_to_all(inp)
assert np.allclose(output.numpy(), expect[rank])
def check(shape):
x = np.random.rand(*shape).astype("float32")
y = np.random.rand(*shape).astype("float32")
a = np.concatenate((x[: shape[0] // 2], y[: shape[0] // 2]))
b = np.concatenate((x[shape[0] // 2 :], y[shape[0] // 2 :]))
data = (x, y)
expect = (a, b)
worker(data, expect)
for shape in [(2, 3), (8, 10), (100, 77)]:
check(shape)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_io_remote():
val = np.random.rand(4, 5).astype(np.float32)
@dist.launcher(n_gpus=2)
def worker():
rank = dist.get_rank()
if rank == 0: # remote send
x = Tensor(val, device="gpu0")
remote_send(x, 1)
sync()
else: # remote recv
y = remote_recv(0, val.shape, val.dtype)
assert y.device == "gpu1"
np.testing.assert_almost_equal(val, y.numpy())
worker()
| 30.863768 | 88 | 0.621619 | 1,488 | 10,648 | 4.331317 | 0.101478 | 0.06827 | 0.081924 | 0.081924 | 0.836152 | 0.833049 | 0.833049 | 0.82886 | 0.82886 | 0.82886 | 0 | 0.023434 | 0.226521 | 10,648 | 344 | 89 | 30.953488 | 0.759106 | 0.036063 | 0 | 0.710345 | 0 | 0 | 0.121428 | 0 | 0 | 0 | 0 | 0 | 0.048276 | 1 | 0.110345 | false | 0 | 0.034483 | 0 | 0.144828 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
734f5eec4076485a926fa4d47cfdded6b6b0d638 | 2,857 | py | Python | get_members.py | nacbotics5/Telethon_bot | 59fe8426be80cb1597266cf03770d5d3617be96e | [
"MIT"
] | 5 | 2021-04-22T12:12:44.000Z | 2021-11-12T19:51:43.000Z | get_members.py | nacbotics5/Telethon_bot | 59fe8426be80cb1597266cf03770d5d3617be96e | [
"MIT"
] | 1 | 2021-08-04T07:07:57.000Z | 2021-08-05T08:31:07.000Z | get_members.py | nacbotics5/Telethon_bot | 59fe8426be80cb1597266cf03770d5d3617be96e | [
"MIT"
] | 2 | 2021-07-10T20:34:40.000Z | 2021-11-08T18:13:33.000Z | import csv
from config import*
from telethon import TelegramClient, sync
client = TelegramClient(phone, api_id, api_hash)
# get all the channels that I can access
channels = {d.entity.username: d.entity
for d in client.get_dialogs()
if d.is_channel}
def save_members(username,id, access_hash,name):
with open('tbot.csv', mode='a+') as csv_file:
fieldnames = ['username','id','access_hash','name']
writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
writer.writeheader()
writer.writerow({'username': username,'id':id,'access_hash': access_hash,'name': name})
for i,channel_name in enumerate(channels.keys()):
try:print(f"{i} ::: {channel_name}")
except:pass
channel_id = int(input("Please select a number :: "))
channel_name = list(channels.keys())[channel_id]
channel = channels[channel_name]
print(f"\n\nYou have selected:: {channel_name}")
print("\n\nGetting the members of this channel\n")
try:
# get all the users and print them
for user in client.get_participants(channel):
name = f"{user.first_name} {user.last_name}"
save_members(user.username,user.id,user.access_hash,name)import csv
from config import*
from telethon import TelegramClient, sync
client = TelegramClient(phone, api_id, api_hash)
client.connect()
if not client.is_user_authorized():
client.send_code_request(phone)
client.sign_in(phone, input('Enter the code: '))
def get_channels():
# get all the channels that I can access
channels = {d.entity.username: d.entity
for d in client.get_dialogs()
if d.is_channel}
for i,channel_name in enumerate(channels.keys()):
try:print(f"{i} ::: {channel_name}")
except:pass
channel_id = int(input("Please select a number :: "))
channel_name = list(channels.keys())[channel_id]
channel = channels[channel_name]
print(f"\n\nYou have selected:: {channel_name}")
print("\n\nGetting the members of this channel\n")
try:
with open('tbot.csv', mode='w+') as csv_file:
fieldnames = ['username','id','access_hash','name']
writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
writer.writeheader()
# get all the users and print them
for user in client.get_participants(channel):
name = f"{user.first_name} {user.last_name}"
writer.writerow({'username': user.username,'id':user.id,'access_hash':user.access_hash,'name':name})
print(user.username,user.id,user.access_hash,name)
except Exception as e:
print(e)
get_channels()
try:
get_channels()
except Exception as e:
print(e)
print(user.username,user.id,user.access_hash,name)
except Exception as e:
print(e)
| 28.858586 | 116 | 0.655933 | 390 | 2,857 | 4.671795 | 0.212821 | 0.072448 | 0.061471 | 0.039517 | 0.834248 | 0.80022 | 0.787047 | 0.787047 | 0.767289 | 0.767289 | 0 | 0 | 0.218411 | 2,857 | 98 | 117 | 29.153061 | 0.815943 | 0.050053 | 0 | 0.809524 | 0 | 0 | 0.169191 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.031746 | 0.095238 | null | null | 0.174603 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7351505d3c39d049b1cecf0e4e2964d7140f847b | 7,069 | py | Python | 3GPP Meeting Helper/tests/test_tdocs_by_agenda_comments.py | telekom/3gpp-meeting-tools | 1276a62835fd595487aa817c9500c42c3f5e35f3 | [
"MIT"
] | null | null | null | 3GPP Meeting Helper/tests/test_tdocs_by_agenda_comments.py | telekom/3gpp-meeting-tools | 1276a62835fd595487aa817c9500c42c3f5e35f3 | [
"MIT"
] | 1 | 2020-09-04T06:26:41.000Z | 2020-09-04T06:26:41.000Z | 3GPP Meeting Helper/tests/test_tdocs_by_agenda_comments.py | telekom/3gpp-meeting-tools | 1276a62835fd595487aa817c9500c42c3f5e35f3 | [
"MIT"
] | 3 | 2020-06-12T02:09:48.000Z | 2021-08-30T10:36:37.000Z | import unittest
import parsing.html as html_parser
class Test_test_tdocs_by_agenda_comments(unittest.TestCase):
def test_tdoc_comment_empty(self):
comments = ''
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.revision_of, '')
self.assertEqual(parsed_comments.revised_to, '')
self.assertEqual(parsed_comments.merge_of, '')
self.assertEqual(parsed_comments.merged_to, '')
def test_tdoc_comment_none(self):
comments = None
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.revision_of, '')
self.assertEqual(parsed_comments.revised_to, '')
self.assertEqual(parsed_comments.merge_of, '')
self.assertEqual(parsed_comments.merged_to, '')
def test_tdoc_comment_s21901105(self):
comments = 'Revision of S2-1900064, merging S2-1900142, S2-1900585, S2-1900281, S2-1900147 and part of S2-1900587. Revised in parallel session to S2-1901260.'
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.merge_of, 'S2-1900142, S2-1900585, S2-1900281, S2-1900147, S2-1900587')
self.assertEqual(parsed_comments.merged_to, '')
self.assertEqual(parsed_comments.revision_of, 'S2-1900064')
self.assertEqual(parsed_comments.revised_to, 'S2-1901260')
def test_tdoc_comment_s219000064(self):
comments = 'Revised in parallel session, merging S2-1900142, S2-1900585, S2-1900281, S2-1900147 and part of S2-1900587, to S2-1901105.'
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.merge_of, 'S2-1900142, S2-1900585, S2-1900281, S2-1900147, S2-1900587')
self.assertEqual(parsed_comments.merged_to, '')
self.assertEqual(parsed_comments.revision_of, '')
self.assertEqual(parsed_comments.revised_to, 'S2-1901105')
def test_tdoc_comment_s21900142(self):
comments = 'Merged into S2-1901105.'
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.merge_of, '')
self.assertEqual(parsed_comments.merged_to, 'S2-1901105')
self.assertEqual(parsed_comments.revision_of, '')
self.assertEqual(parsed_comments.revised_to, '')
def test_tdoc_comment_s21900587(self):
comments = 'Merged into S2-1901105 and S2-1901106.'
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.merge_of, '')
self.assertEqual(parsed_comments.merged_to, 'S2-1901105, S2-1901106')
self.assertEqual(parsed_comments.revision_of, '')
self.assertEqual(parsed_comments.revised_to, '')
def test_tdoc_comment_s21901262(self):
comments = 'Revision of S2-1901108. This CR was agreed.'
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.merge_of, '')
self.assertEqual(parsed_comments.merged_to, '')
self.assertEqual(parsed_comments.revision_of, 'S2-1901108')
self.assertEqual(parsed_comments.revised_to, '')
def test_tdoc_comment_s21900272(self):
comments = ' Revised in parallel session, merging parts of S2-1900611, to S2-1901220.'
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.merge_of, 'S2-1900611')
self.assertEqual(parsed_comments.merged_to, '')
self.assertEqual(parsed_comments.revision_of, '')
self.assertEqual(parsed_comments.revised_to, 'S2-1901220')
def test_tdoc_comment_s21901220(self):
comments = 'Revision of S2-1900272, merging parts of S2-1900611. This was postponed.'
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.merge_of, 'S2-1900611')
self.assertEqual(parsed_comments.merged_to, '')
self.assertEqual(parsed_comments.revision_of, 'S2-1900272')
self.assertEqual(parsed_comments.revised_to, '')
def test_tdoc_comment_s21900611(self):
comments = 'Partially merged into S2-1901220 and revised in S2-1901221. Revised in parallel session to S2-1901221. '
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.merge_of, '')
self.assertEqual(parsed_comments.merged_to, 'S2-1901220')
self.assertEqual(parsed_comments.revision_of, '')
self.assertEqual(parsed_comments.revised_to, 'S2-1901221')
def test_tdoc_comment_s2190441(self):
comments = 'Revised in parallel session, merging S2-1900502 and S2-1900563 to S2-1901222.'
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.merge_of, 'S2-1900502, S2-1900563')
self.assertEqual(parsed_comments.merged_to, '')
self.assertEqual(parsed_comments.revision_of, '')
self.assertEqual(parsed_comments.revised_to, 'S2-1901222')
def test_tdoc_comment_s2190497(self):
comments = 'Revision of S2-1811853 from S2#129BIS. Revised in parallel session to S2-1901140. NSSAI IWK'
parsed_comments = html_parser.parse_tdoc_comments(comments)
self.assertEqual(parsed_comments.merge_of, '')
self.assertEqual(parsed_comments.merged_to, '')
self.assertEqual(parsed_comments.revision_of, '')
self.assertEqual(parsed_comments.revised_to, 'S2-1901140')
def test_tdoc_comment_s2190497_2(self):
comments = 'Revision of S2-1811853 from S2#129BIS. Revised in parallel session to S2-1901140. NSSAI IWK'
parsed_comments = html_parser.parse_tdoc_comments(comments, ignore_from_previous_meetings=False)
self.assertEqual(parsed_comments.merge_of, '')
self.assertEqual(parsed_comments.merged_to, '')
self.assertEqual(parsed_comments.revision_of, 'S2-1811853')
self.assertEqual(parsed_comments.revised_to, 'S2-1901140')
def test_tdoc_comment_s2190497_3(self):
comments = 'Revision of S2-1811853 from S2#129BIS. Revised in parallel session to S2-1901140. NSSAI IWK'
parsed_comments = html_parser.parse_tdoc_comments(comments, ignore_from_previous_meetings=True)
self.assertEqual(parsed_comments.merge_of, '')
self.assertEqual(parsed_comments.merged_to, '')
self.assertEqual(parsed_comments.revision_of, '')
self.assertEqual(parsed_comments.revised_to, 'S2-1901140')
def test_tdoc_comment_s21908318(self):
comments = 'Revision of S2-1907759. Revised in parallel session, merging related CRs, to S2-1908320.'
parsed_comments = html_parser.parse_tdoc_comments(comments, ignore_from_previous_meetings=True)
self.assertEqual(parsed_comments.merge_of, '')
self.assertEqual(parsed_comments.merged_to, '')
self.assertEqual(parsed_comments.revision_of, 'S2-1907759')
self.assertEqual(parsed_comments.revised_to, 'S2-1908320')
if __name__ == '__main__':
unittest.main()
| 55.226563 | 166 | 0.729806 | 870 | 7,069 | 5.642529 | 0.104598 | 0.213893 | 0.256671 | 0.354451 | 0.878794 | 0.826849 | 0.804645 | 0.779589 | 0.763088 | 0.763088 | 0 | 0.111054 | 0.170746 | 7,069 | 127 | 167 | 55.661417 | 0.726373 | 0 | 0 | 0.563636 | 0 | 0.054545 | 0.198755 | 0 | 0 | 0 | 0 | 0 | 0.545455 | 1 | 0.136364 | false | 0 | 0.018182 | 0 | 0.163636 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
b40309618d81da81ee595892eb3da732947b2b57 | 396 | py | Python | flask_rebar/swagger_generation/__init__.py | barakalon/flask-rebar | 65fd8dba4ed90bea7cecf27e1ec7737ddd0cdaf1 | [
"MIT"
] | null | null | null | flask_rebar/swagger_generation/__init__.py | barakalon/flask-rebar | 65fd8dba4ed90bea7cecf27e1ec7737ddd0cdaf1 | [
"MIT"
] | 1 | 2019-04-23T22:37:02.000Z | 2019-04-23T23:26:33.000Z | flask_rebar/swagger_generation/__init__.py | barakalon/flask-rebar | 65fd8dba4ed90bea7cecf27e1ec7737ddd0cdaf1 | [
"MIT"
] | null | null | null | from flask_rebar.swagger_generation.swagger_generator import ExternalDocumentation
from flask_rebar.swagger_generation.swagger_generator import SwaggerV2Generator
from flask_rebar.swagger_generation.swagger_generator import Tag
from flask_rebar.swagger_generation.marshmallow_to_swagger import sets_swagger_attr
from flask_rebar.swagger_generation.marshmallow_to_swagger import ConverterRegistry
| 66 | 83 | 0.924242 | 49 | 396 | 7.081633 | 0.306122 | 0.129683 | 0.201729 | 0.302594 | 0.786744 | 0.786744 | 0.786744 | 0.786744 | 0.32853 | 0 | 0 | 0.00266 | 0.050505 | 396 | 5 | 84 | 79.2 | 0.920213 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
b467e861bfe7b35c56b6c85833fa6ba319695561 | 307 | py | Python | template_preprocess/tests.py | vegitron/django-template-preprocess | 22014260aafe3916a6105f98848dbe076609785a | [
"Apache-2.0"
] | null | null | null | template_preprocess/tests.py | vegitron/django-template-preprocess | 22014260aafe3916a6105f98848dbe076609785a | [
"Apache-2.0"
] | 5 | 2017-07-25T21:49:25.000Z | 2018-09-19T00:59:32.000Z | template_preprocess/tests.py | vegitron/django-template-preprocess | 22014260aafe3916a6105f98848dbe076609785a | [
"Apache-2.0"
] | 1 | 2020-07-28T19:42:58.000Z | 2020-07-28T19:42:58.000Z | from template_preprocess.test.include import TestIncludeBlock
from template_preprocess.test.extend_block import TestExtendBlock
from template_preprocess.test.minify import TestHTMLMinify
from template_preprocess.test.static import TestStaticTag
from template_preprocess.test.compress import TestCompressTag
| 51.166667 | 65 | 0.90228 | 36 | 307 | 7.527778 | 0.444444 | 0.221402 | 0.405904 | 0.479705 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.065147 | 307 | 5 | 66 | 61.4 | 0.944251 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b47f2a6934ed63ffdab8e00ef65391df72a06c8a | 100,242 | py | Python | s3tests_boto3/functional/test_sts.py | Rjerk/s3-tests | 4a89a9a5b26604cf3ef54ae796faa41e24da9f88 | [
"MIT"
] | 194 | 2015-01-04T03:53:19.000Z | 2022-03-31T12:49:17.000Z | s3tests_boto3/functional/test_sts.py | Rjerk/s3-tests | 4a89a9a5b26604cf3ef54ae796faa41e24da9f88 | [
"MIT"
] | 248 | 2015-01-04T19:31:16.000Z | 2022-03-28T20:03:44.000Z | s3tests_boto3/functional/test_sts.py | Rjerk/s3-tests | 4a89a9a5b26604cf3ef54ae796faa41e24da9f88 | [
"MIT"
] | 204 | 2015-01-12T06:11:45.000Z | 2022-03-30T18:08:35.000Z | import boto3
import botocore.session
from botocore.exceptions import ClientError
from botocore.exceptions import ParamValidationError
from nose.tools import eq_ as eq
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
import isodate
import email.utils
import datetime
import threading
import re
import pytz
from collections import OrderedDict
import requests
import json
import base64
import hmac
import hashlib
import xml.etree.ElementTree as ET
import time
import operator
import nose
import os
import string
import random
import socket
import ssl
import logging
from collections import namedtuple
from email.header import decode_header
from . import(
get_iam_client,
get_sts_client,
get_client,
get_alt_user_id,
get_config_endpoint,
get_new_bucket_name,
get_parameter_name,
get_main_aws_access_key,
get_main_aws_secret_key,
get_thumbprint,
get_aud,
get_token,
get_realm_name,
check_webidentity,
get_iam_access_key,
get_iam_secret_key,
get_sub,
get_azp,
get_user_token
)
log = logging.getLogger(__name__)
def create_role(iam_client,path,rolename,policy_document,description,sessionduration,permissionboundary,tag_list=None):
role_err=None
if rolename is None:
rolename=get_parameter_name()
if tag_list is None:
tag_list = []
try:
role_response = iam_client.create_role(Path=path,RoleName=rolename,AssumeRolePolicyDocument=policy_document,Tags=tag_list)
except ClientError as e:
role_err = e.response['Code']
return (role_err,role_response,rolename)
def put_role_policy(iam_client,rolename,policyname,role_policy):
role_err=None
if policyname is None:
policyname=get_parameter_name()
try:
role_response = iam_client.put_role_policy(RoleName=rolename,PolicyName=policyname,PolicyDocument=role_policy)
except ClientError as e:
role_err = e.response['Code']
return (role_err,role_response)
def put_user_policy(iam_client,username,policyname,policy_document):
role_err=None
if policyname is None:
policyname=get_parameter_name()
try:
role_response = iam_client.put_user_policy(UserName=username,PolicyName=policyname,PolicyDocument=policy_document)
except ClientError as e:
role_err = e.response['Code']
return (role_err,role_response)
def get_s3_client_using_iam_creds():
iam_access_key = get_iam_access_key()
iam_secret_key = get_iam_secret_key()
default_endpoint = get_config_endpoint()
s3_client_iam_creds = boto3.client('s3',
aws_access_key_id = iam_access_key,
aws_secret_access_key = iam_secret_key,
endpoint_url=default_endpoint,
region_name='',
)
return s3_client_iam_creds
def create_oidc_provider(iam_client, url, clientidlist, thumbprintlist):
oidc_arn = None
oidc_error = None
clientids = []
if clientidlist is None:
clientidlist=clientids
try:
oidc_response = iam_client.create_open_id_connect_provider(
Url=url,
ClientIDList=clientidlist,
ThumbprintList=thumbprintlist,
)
oidc_arn = oidc_response['OpenIDConnectProviderArn']
print (oidc_arn)
except ClientError as e:
oidc_error = e.response['Code']
print (oidc_error)
try:
oidc_error = None
print (url)
if url.startswith('http://'):
url = url[len('http://'):]
elif url.startswith('https://'):
url = url[len('https://'):]
elif url.startswith('www.'):
url = url[len('www.'):]
oidc_arn = 'arn:aws:iam:::oidc-provider/{}'.format(url)
print (url)
print (oidc_arn)
oidc_response = iam_client.get_open_id_connect_provider(OpenIDConnectProviderArn=oidc_arn)
except ClientError as e:
oidc_arn = None
return (oidc_arn, oidc_error)
def get_s3_resource_using_iam_creds():
iam_access_key = get_iam_access_key()
iam_secret_key = get_iam_secret_key()
default_endpoint = get_config_endpoint()
s3_res_iam_creds = boto3.resource('s3',
aws_access_key_id = iam_access_key,
aws_secret_access_key = iam_secret_key,
endpoint_url=default_endpoint,
region_name='',
)
return s3_res_iam_creds
@attr(resource='get session token')
@attr(method='get')
@attr(operation='check')
@attr(assertion='s3 ops only accessible by temporary credentials')
@attr('test_of_sts')
def test_get_session_token():
iam_client=get_iam_client()
sts_client=get_sts_client()
sts_user_id=get_alt_user_id()
default_endpoint=get_config_endpoint()
user_policy = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":[\"*\"],\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}},{\"Effect\":\"Allow\",\"Action\":\"sts:GetSessionToken\",\"Resource\":\"*\",\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}}]}"
(resp_err,resp)=put_user_policy(iam_client,sts_user_id,None,user_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
response=sts_client.get_session_token()
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3_client=boto3.client('s3',
aws_access_key_id = response['Credentials']['AccessKeyId'],
aws_secret_access_key = response['Credentials']['SecretAccessKey'],
aws_session_token = response['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
finish=s3_client.delete_bucket(Bucket=bucket_name)
@attr(resource='get session token')
@attr(method='get')
@attr(operation='check')
@attr(assertion='s3 ops denied by permanent credentials')
@attr('test_of_sts')
def test_get_session_token_permanent_creds_denied():
s3bucket_error=None
iam_client=get_iam_client()
sts_client=get_sts_client()
sts_user_id=get_alt_user_id()
default_endpoint=get_config_endpoint()
s3_main_access_key=get_main_aws_access_key()
s3_main_secret_key=get_main_aws_secret_key()
user_policy = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":[\"*\"],\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}},{\"Effect\":\"Allow\",\"Action\":\"sts:GetSessionToken\",\"Resource\":\"*\",\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}}]}"
(resp_err,resp)=put_user_policy(iam_client,sts_user_id,None,user_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
response=sts_client.get_session_token()
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3_client=boto3.client('s3',
aws_access_key_id = s3_main_access_key,
aws_secret_access_key = s3_main_secret_key,
aws_session_token = response['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
try:
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
eq(s3bucket_error,'AccessDenied')
@attr(resource='assume role')
@attr(method='get')
@attr(operation='check')
@attr(assertion='role policy allows all s3 ops')
@attr('test_of_sts')
def test_assume_role_allow():
iam_client=get_iam_client()
sts_client=get_sts_client()
sts_user_id=get_alt_user_id()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bkt = s3_client.delete_bucket(Bucket=bucket_name)
eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
@attr(resource='assume role')
@attr(method='get')
@attr(operation='check')
@attr(assertion='role policy denies all s3 ops')
@attr('test_of_sts')
def test_assume_role_deny():
s3bucket_error=None
iam_client=get_iam_client()
sts_client=get_sts_client()
sts_user_id=get_alt_user_id()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
try:
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
eq(s3bucket_error,'AccessDenied')
@attr(resource='assume role')
@attr(method='get')
@attr(operation='check')
@attr(assertion='creds expire so all s3 ops fails')
@attr('test_of_sts')
def test_assume_role_creds_expiry():
iam_client=get_iam_client()
sts_client=get_sts_client()
sts_user_id=get_alt_user_id()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,DurationSeconds=900)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
time.sleep(900)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
try:
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
eq(s3bucket_error,'AccessDenied')
@attr(resource='assume role')
@attr(method='head')
@attr(operation='check')
@attr(assertion='HEAD fails with 403 when role policy denies s3:ListBucket')
@attr('test_of_sts')
def test_assume_role_deny_head_nonexistent():
# create a bucket with the normal s3 client
bucket_name = get_new_bucket_name()
get_client().create_bucket(Bucket=bucket_name)
iam_client=get_iam_client()
sts_client=get_sts_client()
sts_user_id=get_alt_user_id()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
policy_document = '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["arn:aws:iam:::user/'+sts_user_id+'"]},"Action":["sts:AssumeRole"]}]}'
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name)
# allow GetObject but deny ListBucket
role_policy = '{"Version":"2012-10-17","Statement":{"Effect":"Allow","Action":"s3:GetObject","Principal":"*","Resource":"arn:aws:s3:::*"}}'
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='')
status=200
try:
s3_client.head_object(Bucket=bucket_name, Key='nonexistent')
except ClientError as e:
status = e.response['ResponseMetadata']['HTTPStatusCode']
eq(status,403)
@attr(resource='assume role')
@attr(method='head')
@attr(operation='check')
@attr(assertion='HEAD fails with 404 when role policy allows s3:ListBucket')
@attr('test_of_sts')
def test_assume_role_allow_head_nonexistent():
# create a bucket with the normal s3 client
bucket_name = get_new_bucket_name()
get_client().create_bucket(Bucket=bucket_name)
iam_client=get_iam_client()
sts_client=get_sts_client()
sts_user_id=get_alt_user_id()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
policy_document = '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["arn:aws:iam:::user/'+sts_user_id+'"]},"Action":["sts:AssumeRole"]}]}'
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name)
# allow GetObject and ListBucket
role_policy = '{"Version":"2012-10-17","Statement":{"Effect":"Allow","Action":["s3:GetObject","s3:ListBucket"],"Principal":"*","Resource":"arn:aws:s3:::*"}}'
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='')
status=200
try:
s3_client.head_object(Bucket=bucket_name, Key='nonexistent')
except ClientError as e:
status = e.response['ResponseMetadata']['HTTPStatusCode']
eq(status,404)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='assuming role through web token')
@attr('webidentity_test')
@attr('token_claims_trust_policy_test')
def test_assume_role_with_web_identity():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bkt = s3_client.delete_bucket(Bucket=bucket_name)
eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
'''
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='assume_role_with_web_token creds expire')
@attr('webidentity_test')
def test_assume_role_with_web_identity_invalid_webtoken():
resp_error=None
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=""
try:
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken='abcdef')
except InvalidIdentityTokenException as e:
log.debug('{}'.format(resp))
log.debug('{}'.format(e.response.get("Error", {}).get("Code")))
log.debug('{}'.format(e))
resp_error = e.response.get("Error", {}).get("Code")
eq(resp_error,'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
'''
#######################
# Session Policy Tests
#######################
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='checking session policy working for two different buckets')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_check_on_different_buckets():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"arn:aws:s3:::test2\",\"arn:aws:s3:::test2/*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\",\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name_1 = 'test1'
try:
s3bucket = s3_client.create_bucket(Bucket=bucket_name_1)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
eq(s3bucket_error, 'AccessDenied')
bucket_name_2 = 'test2'
try:
s3bucket = s3_client.create_bucket(Bucket=bucket_name_2)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
eq(s3bucket_error, 'AccessDenied')
bucket_body = 'please-write-something'
#body.encode(encoding='utf_8')
try:
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
eq(s3_put_obj_error,'NoSuchBucket')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='checking session policy working for same bucket')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_check_on_same_bucket():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\",\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='checking put_obj op denial')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_check_put_obj_denial():
check_webidentity()
iam_client=get_iam_client()
iam_access_key=get_iam_access_key()
iam_secret_key=get_iam_secret_key()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
try:
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
eq(s3_put_obj_error, 'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='checking put_obj working by swapping policies')
@attr('webidentity_test')
@attr('session_policy')
def test_swapping_role_policy_and_session_policy():
check_webidentity()
iam_client=get_iam_client()
iam_access_key=get_iam_access_key()
iam_secret_key=get_iam_secret_key()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\",\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='checking put_obj working by setting different permissions to role and session policy')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_check_different_op_permissions():
check_webidentity()
iam_client=get_iam_client()
iam_access_key=get_iam_access_key()
iam_secret_key=get_iam_secret_key()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
try:
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
eq(s3_put_obj_error, 'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='checking op behaviour with deny effect')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_check_with_deny_effect():
check_webidentity()
iam_client=get_iam_client()
iam_access_key=get_iam_access_key()
iam_secret_key=get_iam_secret_key()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
try:
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
eq(s3_put_obj_error, 'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='checking put_obj working with deny and allow on same op')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_check_with_deny_on_same_op():
check_webidentity()
iam_client=get_iam_client()
iam_access_key=get_iam_access_key()
iam_secret_key=get_iam_secret_key()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3_client_iam_creds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
try:
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
eq(s3_put_obj_error, 'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='checking op when bucket policy has role arn')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_bucket_policy_role_arn():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3client_iamcreds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
resource1 = "arn:aws:s3:::" + bucket_name_1
resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*"
rolearn = "arn:aws:iam:::role/" + general_role_name
bucket_policy = json.dumps(
{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {"AWS": "{}".format(rolearn)},
"Action": ["s3:GetObject","s3:PutObject"],
"Resource": [
"{}".format(resource1),
"{}".format(resource2)
]
}]
})
s3client_iamcreds.put_bucket_policy(Bucket=bucket_name_1, Policy=bucket_policy)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
try:
obj = s3_client.get_object(Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3object_error = e.response.get("Error", {}).get("Code")
eq(s3object_error, 'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='checking op when bucket policy has session arn')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_bucket_policy_session_arn():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3client_iamcreds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
resource1 = "arn:aws:s3:::" + bucket_name_1
resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*"
rolesessionarn = "arn:aws:iam:::assumed-role/" + general_role_name + "/" + role_session_name
bucket_policy = json.dumps(
{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {"AWS": "{}".format(rolesessionarn)},
"Action": ["s3:GetObject","s3:PutObject"],
"Resource": [
"{}".format(resource1),
"{}".format(resource2)
]
}]
})
s3client_iamcreds.put_bucket_policy(Bucket=bucket_name_1, Policy=bucket_policy)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
s3_get_obj = s3_client.get_object(Bucket=bucket_name_1, Key="test-1.txt")
eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='checking copy object op with role, session and bucket policy')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_copy_object():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3client_iamcreds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
resource1 = "arn:aws:s3:::" + bucket_name_1
resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*"
rolesessionarn = "arn:aws:iam:::assumed-role/" + general_role_name + "/" + role_session_name
print (rolesessionarn)
bucket_policy = json.dumps(
{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {"AWS": "{}".format(rolesessionarn)},
"Action": ["s3:GetObject","s3:PutObject"],
"Resource": [
"{}".format(resource1),
"{}".format(resource2)
]
}]
})
s3client_iamcreds.put_bucket_policy(Bucket=bucket_name_1, Policy=bucket_policy)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
copy_source = {
'Bucket': bucket_name_1,
'Key': 'test-1.txt'
}
s3_client.copy(copy_source, bucket_name_1, "test-2.txt")
s3_get_obj = s3_client.get_object(Bucket=bucket_name_1, Key="test-2.txt")
eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='checking op is denied when no role policy')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_no_bucket_role_policy():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
s3client_iamcreds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\",\"s3:GetObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
try:
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3putobj_error = e.response.get("Error", {}).get("Code")
eq(s3putobj_error, 'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='checking op is denied when resource policy denies')
@attr('webidentity_test')
@attr('session_policy')
def test_session_policy_bucket_policy_deny():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
aud=get_aud()
token=get_token()
realm=get_realm_name()
url = 'http://localhost:8080/auth/realms/{}'.format(realm)
thumbprintlist = [thumbprint]
(oidc_arn,oidc_error) = create_oidc_provider(iam_client, url, None, thumbprintlist)
if oidc_error is not None:
raise RuntimeError('Unable to create/get openid connect provider {}'.format(oidc_error))
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
s3client_iamcreds = get_s3_client_using_iam_creds()
bucket_name_1 = 'test1'
s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
resource1 = "arn:aws:s3:::" + bucket_name_1
resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*"
rolesessionarn = "arn:aws:iam:::assumed-role/" + general_role_name + "/" + role_session_name
bucket_policy = json.dumps(
{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Deny",
"Principal": {"AWS": "{}".format(rolesessionarn)},
"Action": ["s3:GetObject","s3:PutObject"],
"Resource": [
"{}".format(resource1),
"{}".format(resource2)
]
}]
})
s3client_iamcreds.put_bucket_policy(Bucket=bucket_name_1, Policy=bucket_policy)
session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}"
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
try:
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt")
except ClientError as e:
s3putobj_error = e.response.get("Error", {}).get("Code")
eq(s3putobj_error, 'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_arn
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='assuming role using web token using sub in trust policy')
@attr('webidentity_test')
@attr('token_claims_trust_policy_test')
def test_assume_role_with_web_identity_with_sub():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
sub=get_sub()
token=get_token()
realm=get_realm_name()
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":sub\":\""+sub+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bkt = s3_client.delete_bucket(Bucket=bucket_name)
eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='assuming role using web token using azp in trust policy')
@attr('webidentity_test')
@attr('token_claims_trust_policy_test')
def test_assume_role_with_web_identity_with_azp():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
azp=get_azp()
token=get_token()
realm=get_realm_name()
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":azp\":\""+azp+"\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bkt = s3_client.delete_bucket(Bucket=bucket_name)
eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='assuming role using web token using aws:RequestTag in trust policy')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_request_tag_trust_policy_test')
def test_assume_role_with_web_identity_with_request_tag():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bkt = s3_client.delete_bucket(Bucket=bucket_name)
eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='assuming role using web token with aws:PrincipalTag in role policy')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_principal_tag_role_policy_test')
def test_assume_role_with_web_identity_with_principal_tag():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"aws:PrincipalTag/Department\":\"Engineering\"}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bkt = s3_client.delete_bucket(Bucket=bucket_name)
eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='assuming role using web token with aws:PrincipalTag in role policy')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_principal_tag_role_policy_test')
def test_assume_role_with_web_identity_for_all_values():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"ForAllValues:StringEquals\":{\"aws:PrincipalTag/Department\":[\"Engineering\",\"Marketing\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bkt = s3_client.delete_bucket(Bucket=bucket_name)
eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='assuming role using web token with aws:PrincipalTag in role policy')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_principal_tag_role_policy_test')
def test_assume_role_with_web_identity_for_all_values_deny():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
#ForAllValues: The condition returns true if every key value in the request matches at least one value in the policy
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"ForAllValues:StringEquals\":{\"aws:PrincipalTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
try:
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
except ClientError as e:
s3bucket_error = e.response.get("Error", {}).get("Code")
eq(s3bucket_error,'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='assuming role using web token with aws:TagKeys in trust policy')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_tag_keys_test')
def test_assume_role_with_web_identity_tag_keys_trust_policy():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:TagKeys\":\"Department\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"ForAnyValue:StringEquals\":{\"aws:PrincipalTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bkt = s3_client.delete_bucket(Bucket=bucket_name)
eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='get')
@attr(operation='check')
@attr(assertion='assuming role using web token with aws:TagKeys in role permission policy')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_tag_keys_test')
def test_assume_role_with_web_identity_tag_keys_role_policy():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"aws:TagKeys\":[\"Department\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_name = get_new_bucket_name()
s3bucket = s3_client.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bkt = s3_client.delete_bucket(Bucket=bucket_name)
eq(bkt['ResponseMetadata']['HTTPStatusCode'],204)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='assuming role using web token with s3:ResourceTag in role permission policy')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_resource_tags_test')
def test_assume_role_with_web_identity_resource_tag():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
s3_res_iam_creds = get_s3_resource_using_iam_creds()
s3_client_iam_creds = s3_res_iam_creds.meta.client
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'},{'Key':'Department', 'Value': 'Marketing'}]})
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt")
eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='assuming role using web token with s3:ResourceTag with missing tags on bucket')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_resource_tags_test')
def test_assume_role_with_web_identity_resource_tag_deny():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
s3_res_iam_creds = get_s3_resource_using_iam_creds()
s3_client_iam_creds = s3_res_iam_creds.meta.client
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
try:
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
eq(s3_put_obj_error,'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='assuming role using web token with s3:ResourceTag with wrong resource tag in policy')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_resource_tags_test')
def test_assume_role_with_web_identity_wrong_resource_tag_deny():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
s3_res_iam_creds = get_s3_resource_using_iam_creds()
s3_client_iam_creds = s3_res_iam_creds.meta.client
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'WrongResourcetag'}]})
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
try:
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt")
except ClientError as e:
s3_put_obj_error = e.response.get("Error", {}).get("Code")
eq(s3_put_obj_error,'AccessDenied')
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='assuming role using web token with s3:ResourceTag matching aws:PrincipalTag in role permission policy')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_resource_tags_test')
def test_assume_role_with_web_identity_resource_tag_princ_tag():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
s3_res_iam_creds = get_s3_resource_using_iam_creds()
s3_client_iam_creds = s3_res_iam_creds.meta.client
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'}]})
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"${aws:PrincipalTag/Department}\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
tags = 'Department=Engineering&Department=Marketing'
key = "test-1.txt"
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key=key, Tagging=tags)
eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
s3_get_obj = s3_client.get_object(Bucket=bucket_name, Key=key)
eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='assuming role using web token with s3:ResourceTag used to test copy object')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_resource_tags_test')
def test_assume_role_with_web_identity_resource_tag_copy_obj():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
s3_res_iam_creds = get_s3_resource_using_iam_creds()
s3_client_iam_creds = s3_res_iam_creds.meta.client
#create two buckets and add same tags to both
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'}]})
copy_bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=copy_bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bucket_tagging = s3_res_iam_creds.BucketTagging(copy_bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'}]})
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}"
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"${aws:PrincipalTag/Department}\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
tags = 'Department=Engineering'
key = "test-1.txt"
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key=key, Tagging=tags)
eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
#copy to same bucket
copy_source = {
'Bucket': bucket_name,
'Key': 'test-1.txt'
}
s3_client.copy(copy_source, bucket_name, "test-2.txt")
s3_get_obj = s3_client.get_object(Bucket=bucket_name, Key="test-2.txt")
eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200)
#copy to another bucket
copy_source = {
'Bucket': bucket_name,
'Key': 'test-1.txt'
}
s3_client.copy(copy_source, copy_bucket_name, "test-1.txt")
s3_get_obj = s3_client.get_object(Bucket=copy_bucket_name, Key="test-1.txt")
eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
@attr(resource='assume role with web identity')
@attr(method='put')
@attr(operation='check')
@attr(assertion='assuming role using web token with iam:ResourceTag in role trust policy')
@attr('webidentity_test')
@attr('abac_test')
@attr('token_role_tags_test')
def test_assume_role_with_web_identity_role_resource_tag():
check_webidentity()
iam_client=get_iam_client()
sts_client=get_sts_client()
default_endpoint=get_config_endpoint()
role_session_name=get_parameter_name()
thumbprint=get_thumbprint()
user_token=get_user_token()
realm=get_realm_name()
s3_res_iam_creds = get_s3_resource_using_iam_creds()
s3_client_iam_creds = s3_res_iam_creds.meta.client
bucket_name = get_new_bucket_name()
s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name)
eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200)
bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name)
Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'},{'Key':'Department', 'Value': 'Marketing'}]})
oidc_response = iam_client.create_open_id_connect_provider(
Url='http://localhost:8080/auth/realms/{}'.format(realm),
ThumbprintList=[
thumbprint,
],
)
#iam:ResourceTag refers to the tag attached to role, hence the role is allowed to be assumed only when it has a tag matching the policy.
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"iam:ResourceTag/Department\":\"Engineering\"}}}]}"
tags_list = [
{'Key':'Department','Value':'Engineering'},
{'Key':'Department','Value':'Marketing'}
]
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None,tags_list)
eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'')
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}"
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
eq(response['ResponseMetadata']['HTTPStatusCode'],200)
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token)
eq(resp['ResponseMetadata']['HTTPStatusCode'],200)
s3_client = boto3.client('s3',
aws_access_key_id = resp['Credentials']['AccessKeyId'],
aws_secret_access_key = resp['Credentials']['SecretAccessKey'],
aws_session_token = resp['Credentials']['SessionToken'],
endpoint_url=default_endpoint,
region_name='',
)
bucket_body = 'this is a test file'
s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt")
eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200)
oidc_remove=iam_client.delete_open_id_connect_provider(
OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"]
)
| 46.151934 | 340 | 0.699836 | 12,151 | 100,242 | 5.451732 | 0.026088 | 0.02758 | 0.054797 | 0.018794 | 0.950199 | 0.946652 | 0.942667 | 0.939919 | 0.935873 | 0.933066 | 0 | 0.021509 | 0.126215 | 100,242 | 2,171 | 341 | 46.173192 | 0.734787 | 0.005337 | 0 | 0.826869 | 0 | 0.002248 | 0.195566 | 0.014934 | 0 | 0 | 0 | 0 | 0.019112 | 1 | 0.022485 | false | 0 | 0.017988 | 0 | 0.043845 | 0.05059 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c34488976882eb9506744897bee3abfae2074a8f | 78 | py | Python | lunchmenu/lang/__init__.py | janeuzil/lunchmenu | 717404561198618e2da5f435f15f20088f811f6b | [
"MIT"
] | 1 | 2018-04-18T07:14:36.000Z | 2018-04-18T07:14:36.000Z | lunchmenu/lang/__init__.py | janeuzil/lunchmenu | 717404561198618e2da5f435f15f20088f811f6b | [
"MIT"
] | null | null | null | lunchmenu/lang/__init__.py | janeuzil/lunchmenu | 717404561198618e2da5f435f15f20088f811f6b | [
"MIT"
] | null | null | null | from lang import Answers
from lang import Commands
from lang import LangError
| 19.5 | 26 | 0.846154 | 12 | 78 | 5.5 | 0.5 | 0.363636 | 0.636364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153846 | 78 | 3 | 27 | 26 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
c375829d40159a12de4694e8971aaf1bc83e8abc | 228 | py | Python | src/test/python/testDataSetRepo/provider/library/a.py | ninjapapa/SMV2 | 42cf9f176c3ec0bed61f66fbf859c18d97027dd6 | [
"Apache-2.0"
] | null | null | null | src/test/python/testDataSetRepo/provider/library/a.py | ninjapapa/SMV2 | 42cf9f176c3ec0bed61f66fbf859c18d97027dd6 | [
"Apache-2.0"
] | 34 | 2022-02-26T04:27:34.000Z | 2022-03-29T23:05:47.000Z | src/test/python/testDataSetRepo/provider/library/a.py | ninjapapa/SMV2 | 42cf9f176c3ec0bed61f66fbf859c18d97027dd6 | [
"Apache-2.0"
] | null | null | null | from smv.provider import SmvProvider
class MyBaseProvider(SmvProvider):
@staticmethod
def provider_type(): return "aaa"
class MyConcreteProvider(MyBaseProvider):
@staticmethod
def provider_type(): return "bbb"
| 22.8 | 41 | 0.758772 | 23 | 228 | 7.434783 | 0.608696 | 0.175439 | 0.269006 | 0.315789 | 0.385965 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.157895 | 228 | 9 | 42 | 25.333333 | 0.890625 | 0 | 0 | 0.285714 | 0 | 0 | 0.026316 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | true | 0 | 0.142857 | 0.285714 | 0.714286 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
6f065fbb9510dab129c7d02944beee705fa9ee23 | 155 | py | Python | algorithms/shoebox/__init__.py | TiankunZhou/dials | bd5c95b73c442cceb1c61b1690fd4562acf4e337 | [
"BSD-3-Clause"
] | 2 | 2021-03-17T11:25:46.000Z | 2021-11-18T04:20:54.000Z | algorithms/shoebox/__init__.py | TiankunZhou/dials | bd5c95b73c442cceb1c61b1690fd4562acf4e337 | [
"BSD-3-Clause"
] | null | null | null | algorithms/shoebox/__init__.py | TiankunZhou/dials | bd5c95b73c442cceb1c61b1690fd4562acf4e337 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import absolute_import, division, print_function
from dials.algorithms.shoebox.masker import *
from dials_algorithms_shoebox_ext import *
| 31 | 64 | 0.858065 | 20 | 155 | 6.2 | 0.6 | 0.145161 | 0.306452 | 0.419355 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.096774 | 155 | 4 | 65 | 38.75 | 0.885714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0.333333 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6f1cea5248515844ae59cc392ec4c066b39849d8 | 2,966 | py | Python | tools/leapsec_table.py | mutoso/libtai | b9fcb0aedb9bb6a7635f626047d2ff886b41d521 | [
"MIT"
] | null | null | null | tools/leapsec_table.py | mutoso/libtai | b9fcb0aedb9bb6a7635f626047d2ff886b41d521 | [
"MIT"
] | null | null | null | tools/leapsec_table.py | mutoso/libtai | b9fcb0aedb9bb6a7635f626047d2ff886b41d521 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import requests
import datetime
r = requests.get('https://www.ietf.org/timezones/data/leap-seconds.list')
times = [line for line in r.content.split('\n')[:-1] if line[0] != '#']
times = [line.split('\t')[:2] for line in times]
# Convert from NTP timestamp to UNIX timestamp
def convert(time):
NTP2UNIX = (70 * 365 + 17) * 86400
time = int(time)
t = time - NTP2UNIX;
while t < 0:
t += 65536 * 65536
return t
# ============= leapsecsat_unix =============
print """unsigned int leapsecsat_unix(time_t date)
{
#define TABLE_LEN """ + str(len(times)) + """
time_t leapsec_dates[TABLE_LEN] = { 0 };
uint8_t leapsec_diff[TABLE_LEN] = { 0 };
#define new_leapsec(i, date, diff) leapsec_dates[i] = date; leapsec_diff[i] = diff;"""
for i, leapsec in enumerate(times):
print ' new_leapsec(%d, %d, %s);' % (i, convert(leapsec[0]), leapsec[1])
print """ #undef new_leapsec
// go backwards since it's more likely that people will be using modern dates
for (int i = TABLE_LEN - 1; i >= 0; i--)
{
if (leapsec_dates[i] - date <= 0)
{
return leapsec_diff[i];
}
}
#undef TABLE_LEN
// some date before the first leap seconds were inserted
return leapsec_diff[0];
}"""
# ============= leapsecsat_tai =============
print
print """unsigned int leapsecsat_tai(tai_t date)
{
#define TABLE_LEN """ + str(len(times)) + """
time_t leapsec_dates[TABLE_LEN] = { 0 };
uint8_t leapsec_diff[TABLE_LEN] = { 0 };
#define new_leapsec(i, date, diff) leapsec_dates[i] = date; leapsec_diff[i] = diff;"""
for i, leapsec in enumerate(times):
print ' new_leapsec(%d, %d, %s);' % (i, convert(leapsec[0]) + int(leapsec[1]), leapsec[1])
print """ #undef new_leapsec
// go backwards since it's more likely that people will be using modern dates
for (int i = TABLE_LEN - 1; i >= 0; i--)
{
if (leapsec_dates[i] - date <= 0)
{
return leapsec_diff[i];
}
}
#undef TABLE_LEN
// some date before the first leap seconds were inserted
return leapsec_diff[0];
}"""
# ============= isleapsec_tai =============
print
print """bool isleapsec_tai(tai_t date)
{
#define TABLE_LEN """ + str(len(times)) + """
time_t leapsec_dates[TABLE_LEN] = { 0 };
uint8_t leapsec_diff[TABLE_LEN] = { 0 };
#define new_leapsec(i, date, diff) leapsec_dates[i] = date; leapsec_diff[i] = diff;"""
for i, leapsec in enumerate(times):
print ' new_leapsec(%d, %d, %s);' % (i, convert(leapsec[0]) + int(leapsec[1]) - 1, leapsec[1])
print """ #undef new_leapsec
// go backwards since it's more likely that people will be using modern dates
for (int i = TABLE_LEN - 1; i >= 0; i--)
{
if (leapsec_dates[i] == date)
{
return true;
}
}
#undef TABLE_LEN
// not a leap second
return false;
}"""
| 28.796117 | 101 | 0.587323 | 417 | 2,966 | 4.038369 | 0.22542 | 0.071259 | 0.032067 | 0.06057 | 0.717933 | 0.717933 | 0.717933 | 0.717933 | 0.717933 | 0.717933 | 0 | 0.025755 | 0.240728 | 2,966 | 102 | 102 | 29.078431 | 0.722025 | 0.073163 | 0 | 0.519481 | 0 | 0.038961 | 0.699599 | 0.059424 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.025974 | null | null | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6f3c97cbd729affb98b239665e7f650e9afe5a12 | 8,510 | py | Python | app/main/application/controller/question_controller.py | meneel/TAP-API | 3f839a132044389bf4d27f978275d026071d6df1 | [
"MIT"
] | 1 | 2021-12-12T10:01:10.000Z | 2021-12-12T10:01:10.000Z | app/main/application/controller/question_controller.py | meneel/TAP-API | 3f839a132044389bf4d27f978275d026071d6df1 | [
"MIT"
] | null | null | null | app/main/application/controller/question_controller.py | meneel/TAP-API | 3f839a132044389bf4d27f978275d026071d6df1 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
from flask import request
from .._base import BaseResource
from ...repository import *
from ...repository.interface import *
class Level1Controller(BaseResource):
"""Level 1 Controller"""
def __init__(self):
super().__init__()
self.req_parser.add_argument("SubjectID", type = str, required = False)
self.req_parser.add_argument("SelectedAnswer", type = str, required = False)
self.req_parser.add_argument("L1QuestionID", type = str, required = False)
self.req_parser.add_argument("Marks", type = str, required = False)
self.req_parser.add_argument("Question", type = str, required = False)
self.req_parser.add_argument("WrongOptions", type = str, required = False)
self.req_parser.add_argument("CorrectOption", type = str, required = False)
self.req_parser.add_argument("Time", type = str, required = False)
self.level1_question_repository= Level1QuesionRepository()
def get(self):
if request.endpoint=="question":
try:
if self.args["SubjectID"]:
return self.formatter.get(self.level1_question_repository.get_by_SubjectID(self.args["SubjectID"]), None, 200, None)
elif self.args["L1QuestionID"]:
return self.formatter.get(self.level1_question_repository.get_by_L1QuestionID(self.args["L1QuestionID"]), None, 200, None)
else:
return self.formatter.get(self.level1_question_repository.get_all(), None, 200, None)
except AttributeError as err:
return self.formatter.get([], err, 204, "Value is Not Present")
def post(self):
if request.endpoint=="post_question":
try:
if self.args["SubjectID"] :
return self.formatter.post(self.level1_question_repository.create_question(self.args), err=None, code=200, msg="Successfully Posted Level 1 Question")
else:
raise AttributeError
except AttributeError as err:
return self.formatter.post([], err=repr(err),code=204, msg="Please pass correct values")
except ValueError as err:
return self.formatter.post([], err=repr(err),code=409, msg="Duplicate value found")
except Exception as err:
return self.formatter.post([], err=err, msg="Please pass correct values")
elif request.endpoint=="check_answer":
# try:
args = request.get_json()
output = {k : [] for k in [ "UUID", "SubjectID" , "L1QuestionID", "SelectedAnswer"]}
for i in args:
for key in output.keys():
output[key].append(i.get(key, None))
return self.formatter.post(self.level1_question_repository.Check_answer(output), err=None, code=200, msg=None)
# except AttributeError as err:
# return self.formatter.post([], err=repr(err),code=204, msg="Please pass correct values")
# except ValueError as err:
# return self.formatter.post([], err=repr(err),code=409, msg="Duplicate value found")
# except Exception as err:
# return self.formatter.post([], err, "Please pass correct values")
elif request.endpoint=="api4":
args = request.get_json()
output = {k : [] for k in ["SubjectID","UUID"]}
for i in args:
for key in output.keys():
output[key].append(i.get(key, None))
return self.formatter.get(self.level1_question_repository.get_question_sub(output), None, 200, None)
def delete(self):
try:
if self.args["L1QuestionID"]:
return self.formatter.post(self.level1_question_repository.delete_questions(self.args), err=None, code=200, msg="Successfully Deleted Question")
else:
raise AttributeError
except AttributeError as err:
return self.formatter.post([], err=repr(err),code=204, msg="Please pass correct values")
except ValueError as err:
return self.formatter.post([], err=repr(err),code=409, msg="Duplicate value found")
except Exception as err:
return self.formatter.post([], err=err, msg="Please pass correct values")
class Level2Controller(BaseResource):
"""Level 2 Controller"""
def __init__(self, **kwargs):
super().__init__()
self.req_parser.add_argument("SubjectID", type = str, required = False)
self.req_parser.add_argument("UUID", type = str, required = False)
self.req_parser.add_argument("SelectedAnswer", type = str, required = False)
self.req_parser.add_argument("L2QuestionID", type = str, required = False)
self.req_parser.add_argument("Marks", type = str, required = False)
self.req_parser.add_argument("Question", type = str, required = False)
self.req_parser.add_argument("Instruction", type = str, required = False)
self.req_parser.add_argument("FileUpload", type = str, required = False)
self.req_parser.add_argument("Image", type = str, required = False)
self.req_parser.add_argument("Time", type = str, required = False)
self.level2_question_repository= Level2QuesionRepository()
def get(self):
if request.endpoint=="question2":
try:
if self.args["SubjectID"]:
return self.formatter.get(self.level2_question_repository.get_by_SubjectID(self.args["SubjectID"]), None, 200, None)
elif self.args["L2QuestionID"]:
return self.formatter.get(self.level2_question_repository.get_by_L1QuestionID(self.args["L1QuestionID"]), None, 200, None)
else:
return self.formatter.get(self.level2_question_repository.get_all(), None, 200, None)
except AttributeError as err:
return self.formatter.get([], err=err, msg="Please pass correct values")
def post(self):
if request.endpoint=="post_question2":
try:
if self.args["SubjectID"] :
return self.formatter.post(self.level2_question_repository.create_question(self.args), err=None, code=200, msg="Successfully Posted Level 2 Question")
else:
raise AttributeError
except AttributeError as err:
return self.formatter.post([], err=repr(err),code=204, msg="Please pass correct values")
except ValueError as err:
return self.formatter.post([], err=repr(err),code=409, msg="Duplicate value found")
except Exception as err:
return self.formatter.post([], err=err, msg="Please pass correct values")
elif request.endpoint=="check_answer2":
try:
return self.formatter.post(self.level2_question_repository.check_answer(self.args), err=None, code=200, msg=None)
except AttributeError as err:
return self.formatter.post([], err=repr(err),code=204, msg="Please pass correct values")
except ValueError as err:
return self.formatter.post([], err=repr(err),code=409, msg="Duplicate value found")
except Exception as err:
return self.formatter.post([], err=err, msg="Please pass correct values")
elif request.endpoint=="api5":
return self.formatter.get(self.level2_question_repository.get_question_sub(self.args), None, 200, None)
def delete(self):
try:
if self.args["L2QuestionID"]:
return self.formatter.post(self.level2_question_repository.delete_questions(self.args), err=None, code=200, msg="Successfully Deleted Question")
else:
raise AttributeError
except AttributeError as err:
return self.formatter.post([], err=repr(err),code=204, msg="Please pass correct values")
except ValueError as err:
return self.formatter.post([], err=repr(err),code=409, msg="Duplicate value found")
except Exception as err:
return self.formatter.post([], err=err, msg="Please pass correct values")
| 43.641026 | 170 | 0.609988 | 966 | 8,510 | 5.258799 | 0.109731 | 0.066929 | 0.127165 | 0.108661 | 0.891535 | 0.887205 | 0.867126 | 0.838976 | 0.807283 | 0.742913 | 0 | 0.019837 | 0.277321 | 8,510 | 194 | 171 | 43.865979 | 0.806179 | 0.046886 | 0 | 0.622047 | 0 | 0 | 0.118018 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.062992 | false | 0.086614 | 0.031496 | 0 | 0.354331 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.