hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
e8bd63c6cb372cf0709f3f9d4e4c66e45c8f435a
122
py
Python
event_microservice/events/admin.py
RolesFGA/events
291839eca219214da87af432ce13b4f064bfaee3
[ "MIT" ]
1
2018-11-17T22:36:01.000Z
2018-11-17T22:36:01.000Z
event_microservice/events/admin.py
RolesFGA/events
291839eca219214da87af432ce13b4f064bfaee3
[ "MIT" ]
11
2018-11-12T16:54:06.000Z
2018-11-27T03:20:24.000Z
event_microservice/events/admin.py
RolesFGA/events
291839eca219214da87af432ce13b4f064bfaee3
[ "MIT" ]
2
2018-11-12T17:58:37.000Z
2018-12-11T00:53:14.000Z
from django.contrib import admin from django.contrib import admin from . models import Event admin.site.register(Event)
17.428571
32
0.811475
18
122
5.5
0.5
0.20202
0.343434
0.464646
0.606061
0.606061
0
0
0
0
0
0
0.131148
122
6
33
20.333333
0.933962
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
fa1e5e2e5232a60d07b8795bba104e455aa23baa
693
py
Python
Muchkinova_Ksenia_dz_2/task_2_1.py
Ksenia2075/homework
bc5b0cdef86431599d5e614ee7a73e9e23c256f1
[ "MIT" ]
null
null
null
Muchkinova_Ksenia_dz_2/task_2_1.py
Ksenia2075/homework
bc5b0cdef86431599d5e614ee7a73e9e23c256f1
[ "MIT" ]
null
null
null
Muchkinova_Ksenia_dz_2/task_2_1.py
Ksenia2075/homework
bc5b0cdef86431599d5e614ee7a73e9e23c256f1
[ "MIT" ]
null
null
null
#Выяснить тип результата выражений: 15 * 3 15 / 3 15 // 2 15 ** 2 print(type(15 * 3)) print(isinstance(15 * 3, int)) print(isinstance(15 * 3, str)) print(isinstance(15 * 3, bool)) print(isinstance(15 * 3, float)) print(type(15 / 3)) print(isinstance(15 / 3, int)) print(isinstance(15 / 3, str)) print(isinstance(15 / 3, bool)) print(isinstance(15 / 3, float)) print(type(15 // 2)) print(isinstance(15 // 2, int)) print(isinstance(15 // 2, str)) print(isinstance(15 // 2, bool)) print(isinstance(15 // 2, float)) print(type(15 ** 2)) print(isinstance(15 ** 2, int)) print(isinstance(15 ** 2, str)) print(isinstance(15 ** 2, bool)) print(isinstance(15 ** 2, float))
24.75
36
0.632035
108
693
4.055556
0.12963
0.547945
0.621005
0.328767
0.90411
0.90411
0.90411
0.90411
0.90411
0.90411
0
0.125874
0.174603
693
27
37
25.666667
0.63986
0.049062
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0.833333
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
11
d73d58f0b23413c84b9cf784ccedfc53d723801c
20,354
py
Python
model_training_and_evaluation/get_best_performing_hyperparams.py
skdreier/NIreland_NLP
87eaa99b89c41cc926caa69e543b09774b191a1d
[ "MIT" ]
1
2020-05-20T16:42:53.000Z
2020-05-20T16:42:53.000Z
model_training_and_evaluation/get_best_performing_hyperparams.py
skdreier/NIreland_NLP
87eaa99b89c41cc926caa69e543b09774b191a1d
[ "MIT" ]
10
2020-01-09T18:41:42.000Z
2020-07-10T18:27:05.000Z
model_training_and_evaluation/get_best_performing_hyperparams.py
skdreier/NIreland_NLP
87eaa99b89c41cc926caa69e543b09774b191a1d
[ "MIT" ]
1
2020-01-07T19:58:32.000Z
2020-01-07T19:58:32.000Z
from glob import glob import sys def will_accept_filename_as_source_for_hyperparams(filename, model_type, task, hyperparams): if model_type == 'RoBERTa': lowercase = hyperparams[0] num_sents_as_context = hyperparams[1] batch_size = hyperparams[2] learning_rate = hyperparams[3] if learning_rate == 3e-5 and batch_size == 136 and not lowercase: if num_sents_as_context == 0 or num_sents_as_context == 2 or num_sents_as_context == 4: if not filename.endswith('RobertaMakeupWillHopefullyComplete.txt'): return False elif model_type == 'lstm' or model_type == 'feedforward': if task == 'binary': if not filename.endswith('NewembedsWord2vecMakeup.txt'): return False else: lowercase = hyperparams[0] pretrained_word2vec_embeddings_only_used_positive_sentences = hyperparams[5] if pretrained_word2vec_embeddings_only_used_positive_sentences: if lowercase: # needs to be from the original runs if not filename.endswith('OldsmallembedsWord2vecMultiway.txt'): return False else: # needs to be from the most recent runs if not filename.endswith('NewsmallembedsWord2vecMultiway.txt'): return False else: if not filename.endswith('NewembedsWord2vecMakeup.txt'): return False return True def get_best_set_of_hyperparams_for_model(model_name, task, num_sents_as_context_param=None): assert model_name in ['RoBERTa', 'lstm', 'feedforward', 'logreg'] best_dev_f1_seen_so_far = 0.0 corresponding_hparams = None if num_sents_as_context_param is None: num_sents_as_context_list = [0, 1, 2, 3, 4] else: num_sents_as_context_list = [num_sents_as_context_param] for num_sents_as_context in num_sents_as_context_list: for fname in glob('experiment_results/' + str(num_sents_as_context) + '_sents_as_context/terminal_outputs/overall*'): all_text_lowercased = True in_multiway_set = None with open(fname, 'r') as f: for line in f: if 'Read in existing binary data split' in line: in_multiway_set = False elif 'Read in existing multi-way data split' in line: in_multiway_set = True if line.startswith('\tWith ') and 'result: accuracy is ' in line and not line.endswith(')\n') and \ ('logistic regression' in line if model_name == 'logreg' else model_name in line) and \ (not in_multiway_set if task == 'binary' else in_multiway_set): # this is a dev-set-performance-reporting line including its hyperparams, which we collect assert in_multiway_set is not None f1 = float(line[line.index('f1 is ') + len('f1 is '):]) if f1 > best_dev_f1_seen_so_far: line = line[len('\tWith '):] line = line[:line.rfind(',')] hparams = None if model_name == 'RoBERTa': # batch size 16 and learning rate 1e-05 batch_size = int(line[line.index('batch size ') + len('batch size '): line.index(' and')]) learning_rate = float(line[line.rfind(' ') + 1:]) hparams = (all_text_lowercased, num_sents_as_context, batch_size, learning_rate) elif model_name == 'lstm': # batch size 16, learning rate 5e-05, and NO doubled context features batch_size = int(line[line.index('batch size ') + len('batch_size '): line.index(', ')]) line = line[line.index(', ') + 1:] learning_rate = line[:line.index(', ')] learning_rate = float(learning_rate[learning_rate.rfind(' ') + 1:]) doubled_context_features = (' NO doubled context features' not in line) if (task != 'binary' and fname.endswith('OldsmallembedsWord2vecMultiway.txt')) or \ fname.endswith('NewsmallembedsWord2vecMultiway.txt'): assert task != 'binary' word2vecembeds_only_pretrained_on_positive_sents = True else: word2vecembeds_only_pretrained_on_positive_sents = False hparams = (all_text_lowercased, num_sents_as_context, batch_size, learning_rate, doubled_context_features, word2vecembeds_only_pretrained_on_positive_sents) elif model_name == 'feedforward': # batch size 32, learning rate 0.001, and NO doubled context features batch_size = int(line[line.index('batch size ') + len('batch_size '): line.index(', ')]) line = line[line.index(', ') + 1:] learning_rate = line[:line.index(', ')] learning_rate = float(learning_rate[learning_rate.rfind(' ') + 1:]) doubled_context_features = (' NO doubled context features' not in line) if (task != 'binary' and fname.endswith('OldsmallembedsWord2vecMultiway.txt')) or \ fname.endswith('NewsmallembedsWord2vecMultiway.txt'): assert task != 'binary' word2vecembeds_only_pretrained_on_positive_sents = True else: word2vecembeds_only_pretrained_on_positive_sents = False hparams = (all_text_lowercased, num_sents_as_context, batch_size, learning_rate, doubled_context_features, word2vecembeds_only_pretrained_on_positive_sents) elif model_name == 'logreg': # regularization weight 0.0001 and NO doubled context features reg_weight = float(line[line.index('regularization weight ') + len('regularization weight '): line.index(' and')]) doubled_context_features = (' NO doubled context features' not in line) hparams = (all_text_lowercased, num_sents_as_context, reg_weight, doubled_context_features) assert hparams is not None if will_accept_filename_as_source_for_hyperparams(fname, model_name, task, hparams): corresponding_hparams = hparams best_dev_f1_seen_so_far = f1 else: continue for num_sents_as_context in num_sents_as_context_list: for fname in glob('experiment_results/' + str(num_sents_as_context) + '_sents_as_context_CASED/terminal_outputs/overall*'): all_text_lowercased = False in_multiway_set = None with open(fname, 'r') as f: for line in f: if 'Read in existing binary data split' in line: in_multiway_set = False elif 'Read in existing multi-way data split' in line: in_multiway_set = True if line.startswith('\tWith ') and 'result: accuracy is ' in line and not line.endswith(')\n') and \ ('logistic regression' in line if model_name == 'logreg' else model_name in line) and \ (not in_multiway_set if task == 'binary' else in_multiway_set): # this is a dev-set-performance-reporting line including its hyperparams, which we collect assert in_multiway_set is not None f1 = float(line[line.index('f1 is ') + len('f1 is '):]) if f1 > best_dev_f1_seen_so_far: line = line[len('\tWith '):] line = line[:line.rfind(',')] hparams = None if model_name == 'RoBERTa': # batch size 16 and learning rate 1e-05 batch_size = int(line[line.index('batch size ') + len('batch size '): line.index(' and')]) learning_rate = float(line[line.rfind(' ') + 1:]) hparams = (all_text_lowercased, num_sents_as_context, batch_size, learning_rate) elif model_name == 'lstm': # batch size 16, learning rate 5e-05, and NO doubled context features batch_size = int(line[line.index('batch size ') + len('batch_size '): line.index(', ')]) line = line[line.index(', ') + 1:] learning_rate = line[:line.index(', ')] learning_rate = float(learning_rate[learning_rate.rfind(' ') + 1:]) doubled_context_features = (' NO doubled context features' not in line) if (task != 'binary' and fname.endswith('OldsmallembedsWord2vecMultiway.txt')) or \ fname.endswith('NewsmallembedsWord2vecMultiway.txt'): assert task != 'binary' word2vecembeds_only_pretrained_on_positive_sents = True else: word2vecembeds_only_pretrained_on_positive_sents = False hparams = (all_text_lowercased, num_sents_as_context, batch_size, learning_rate, doubled_context_features, word2vecembeds_only_pretrained_on_positive_sents) elif model_name == 'feedforward': # batch size 32, learning rate 0.001, and NO doubled context features batch_size = int(line[line.index('batch size ') + len('batch_size '): line.index(', ')]) line = line[line.index(', ') + 1:] learning_rate = line[:line.index(', ')] learning_rate = float(learning_rate[learning_rate.rfind(' ') + 1:]) doubled_context_features = (' NO doubled context features' not in line) if (task != 'binary' and fname.endswith('OldsmallembedsWord2vecMultiway.txt')) or \ fname.endswith('NewsmallembedsWord2vecMultiway.txt'): assert task != 'binary' word2vecembeds_only_pretrained_on_positive_sents = True else: word2vecembeds_only_pretrained_on_positive_sents = False hparams = (all_text_lowercased, num_sents_as_context, batch_size, learning_rate, doubled_context_features, word2vecembeds_only_pretrained_on_positive_sents) elif model_name == 'logreg': # regularization weight 0.0001 and NO doubled context features reg_weight = float(line[line.index('regularization weight ') + len('regularization weight '): line.index(' and')]) doubled_context_features = (' NO doubled context features' not in line) hparams = (all_text_lowercased, num_sents_as_context, reg_weight, doubled_context_features) assert hparams is not None if will_accept_filename_as_source_for_hyperparams(fname, model_name, task, hparams): corresponding_hparams = hparams best_dev_f1_seen_so_far = f1 else: continue assert corresponding_hparams is not None return corresponding_hparams, best_dev_f1_seen_so_far def hparams_match(desired_set_of_hparams, part_of_line, model_type): if model_type == 'logreg': # regularization weight, doubled context features assert part_of_line.startswith('regularization weight ') part_of_line = part_of_line[len('regularization weight '):] reg_weight = float(part_of_line[:part_of_line.index(' ')]) if reg_weight != desired_set_of_hparams[2]: return False doubled_feats = 'NO doubled features' not in part_of_line if (doubled_feats and not desired_set_of_hparams[3]) or (desired_set_of_hparams[3] and not doubled_feats): return False return True elif model_type == 'RoBERTa': # batch size, learning rate assert part_of_line.startswith('lr ') part_of_line = part_of_line[len('lr '):] learning_rate = float(part_of_line[:part_of_line.index(' ')]) if learning_rate != desired_set_of_hparams[3]: return False batch_size = int(part_of_line[part_of_line.index('batch size ') + len('batch size '):]) if batch_size != desired_set_of_hparams[2]: return False return True elif model_type == 'lstm' or model_type == 'feedforward': # batch size, learning rate, doubled context features assert part_of_line.startswith('lr ') learning_rate = float(part_of_line[part_of_line.index('lr ') + len('lr '): part_of_line.index(', ')]) if learning_rate != desired_set_of_hparams[3]: return False batch_size = int(part_of_line[part_of_line.index('batch size ') + len('batch size '): part_of_line.index(', and')]) if batch_size != desired_set_of_hparams[2]: return False doubled_feats = 'NO doubled features' not in part_of_line if (doubled_feats and not desired_set_of_hparams[4]) or (desired_set_of_hparams[4] and not doubled_feats): return False return True def print_test_performance_for_best_model(model_name, task, num_sents_as_context_param=None): assert model_name in ['RoBERTa', 'lstm', 'feedforward', 'logreg'] if task != 'binary': task = 'multiway' best_dev_params, best_dev_f1 = \ get_best_set_of_hyperparams_for_model(model_name, task, num_sents_as_context_param=num_sents_as_context_param) if best_dev_params[0]: fname_end = '_sents_as_context/terminal_outputs/overall*' else: fname_end = '_sents_as_context_CASED/terminal_outputs/overall*' num_sents_as_context_list = [best_dev_params[1]] print('Best dev F1 of any ' + model_name + ' model: ' + str(best_dev_f1)) print('Corresponding best hparams: ' + str(best_dev_params)) if model_name == 'lstm' or model_name == 'feedforward': test_line_start = 'For ' + task + ' case, best ' + model_name + ' word2vec baseline' + ' model had ' elif model_name == 'logreg': test_line_start = 'For ' + task + ' case, best ' + 'baseline ' + model_name + ' model had ' else: test_line_start = 'For ' + task + ' case, best ' + model_name + ' model had ' for num_sents_as_context in num_sents_as_context_list: for fname in glob('experiment_results/' + str(num_sents_as_context) + fname_end): passed_line_indicating_test_perf_for_our_model_coming_up = False with open(fname, 'r') as f: in_multiway_set = None for line in f: if 'Read in existing binary data split' in line: in_multiway_set = False passed_line_indicating_test_perf_for_our_model_coming_up = False elif 'Read in existing multi-way data split' in line: in_multiway_set = True passed_line_indicating_test_perf_for_our_model_coming_up = False if line.startswith(test_line_start): # check whether the hyperparams in the line match the ones we're looking for if model_name == 'logreg': line = line[len(test_line_start): line.index(', and achieved the following performance')] passed_line_indicating_test_perf_for_our_model_coming_up = hparams_match(best_dev_params, line, model_name) else: line = line[len(test_line_start): line.index('. Performance:')] passed_line_indicating_test_perf_for_our_model_coming_up = hparams_match(best_dev_params, line, model_name) if passed_line_indicating_test_perf_for_our_model_coming_up: if model_name == 'logreg': if line.startswith('(Test set) With regularization weight ') and \ (not in_multiway_set if task == 'binary' else in_multiway_set): assert in_multiway_set is not None if will_accept_filename_as_source_for_hyperparams(fname, model_name, task, best_dev_params): # this is the line with our test set performance print(line + '\t(From ' + fname + ')') passed_line_indicating_test_perf_for_our_model_coming_up = False else: if line.startswith('Test ' + model_name) and ' result: accuracy is ' in line and \ line.endswith(')\n') and \ (not in_multiway_set if task == 'binary' else in_multiway_set): assert in_multiway_set is not None if will_accept_filename_as_source_for_hyperparams(fname, model_name, task, best_dev_params): # this is the line with our test set performance print(line + '\t(From ' + fname + ')') passed_line_indicating_test_perf_for_our_model_coming_up = False if __name__ == '__main__': model_name = sys.argv[1].strip() task = sys.argv[2].strip() if len(list(sys.argv)) < 4: num_sents_as_context = None else: num_sents_as_context = sys.argv[3].strip() if num_sents_as_context.isdigit(): num_sents_as_context = int(num_sents_as_context) else: num_sents_as_context = None print_test_performance_for_best_model(model_name, task, num_sents_as_context_param=num_sents_as_context)
65.658065
120
0.53051
2,101
20,354
4.809614
0.08377
0.029095
0.058189
0.063929
0.857298
0.815042
0.799703
0.761603
0.718654
0.708164
0
0.010899
0.395942
20,354
309
121
65.87055
0.81098
0.049867
0
0.731884
0
0
0.114907
0.031884
0.007246
0
0
0
0.057971
1
0.014493
false
0.028986
0.007246
0
0.083333
0.021739
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d743cd94be63f9c3dd90d64e1f8401ac34d2a60c
3,963
py
Python
Project 10 - Online Dictionary/OnlineDictionary_test.py
jimmyqtran/IntermediateSoftwareDesignPython
d237a1bb18deb02f2a24a17b517c7261436a5110
[ "MIT" ]
null
null
null
Project 10 - Online Dictionary/OnlineDictionary_test.py
jimmyqtran/IntermediateSoftwareDesignPython
d237a1bb18deb02f2a24a17b517c7261436a5110
[ "MIT" ]
null
null
null
Project 10 - Online Dictionary/OnlineDictionary_test.py
jimmyqtran/IntermediateSoftwareDesignPython
d237a1bb18deb02f2a24a17b517c7261436a5110
[ "MIT" ]
null
null
null
""" CS3B, Assignment #10, Online Dictionary Jimmy Tran Testing """ import unittest from OnlineDictionary import * class TimeFuncTest(unittest.TestCase): def testTimeFunc1(self): result, duration = time_func(pow, 2, 128) self.assertEqual(result, 340282366920938463463374607431768211456) self.assertTrue(isinstance(duration, float)) def testTimeFunc2(self): result, duration = time_func((lambda a, b: a - b), a=2, b=5) self.assertEqual(result, -3) self.assertTrue(isinstance(duration, float)) result, duration = time_func((lambda a, b: a - b), b=2, a=5) self.assertEqual(result, 3) self.assertTrue(isinstance(duration, float)) result, duration = time_func((lambda a, b: a - b), 2, 5) self.assertEqual(result, -3) self.assertTrue(isinstance(duration, float)) result, duration = time_func((lambda a, b: a - b), 2, b=5) self.assertEqual(result, -3) self.assertTrue(isinstance(duration, float)) def setUp(self): self.dictionary = Dictionary(source=DictionarySource.OXFORD_ONLINE) def testTimeFuncDictionary(self): word = "ace" # ONLINE entry, source, duration = self.dictionary.search(word) self.assertTrue(isinstance(entry, DictionaryEntry)) self.assertEqual(word, entry.word) self.assertEqual("noun", entry.part_of_speech) self.assertEqual("a playing card with a single spot on it, " "ranked as the highest card in its suit in most card games", entry.definition) self.assertEqual("the ace of diamonds", entry.example) self.assertEqual(DictionarySource.OXFORD_ONLINE, source) self.assertTrue(isinstance(duration, float)) # CACHE entry2, source2, duration2 = self.dictionary.search(word) self.assertTrue(isinstance(entry2, DictionaryEntry)) self.assertEqual(word, entry2.word) self.assertEqual("noun", entry2.part_of_speech) self.assertEqual("a playing card with a single spot on it, " "ranked as the highest card in its suit in most card games", entry2.definition) self.assertEqual("the ace of diamonds", entry2.example) self.assertEqual(DictionarySource.CACHE, source2) self.assertTrue(isinstance(duration2, float)) # Check that the second duration is faster than the first duration self.assertLess(duration2, duration) word = "python" # ONLINE AGAIN entry, source, duration = self.dictionary.search(word) self.assertTrue(isinstance(entry, DictionaryEntry)) self.assertEqual(word, entry.word) self.assertEqual("noun", entry.part_of_speech) self.assertEqual("a large heavy-bodied nonvenomous snake occurring throughout the Old World tropics, " "killing prey by constriction and asphyxiation.", entry.definition) self.assertEqual(None, entry.example) self.assertEqual(DictionarySource.OXFORD_ONLINE, source) self.assertTrue(isinstance(duration, float)) # CACHE AGAIN entry2, source2, duration2 = self.dictionary.search(word) self.assertTrue(isinstance(entry2, DictionaryEntry)) self.assertEqual(word, entry2.word) self.assertEqual("noun", entry2.part_of_speech) self.assertEqual("a large heavy-bodied nonvenomous snake occurring throughout the Old World tropics, " "killing prey by constriction and asphyxiation.", entry2.definition) self.assertEqual(None, entry2.example) self.assertEqual(DictionarySource.CACHE, source2) self.assertTrue(isinstance(duration2, float)) # Check that the second duration is faster than the first duration self.assertLess(duration2, duration)
41.715789
110
0.654555
435
3,963
5.926437
0.229885
0.145462
0.121024
0.086889
0.830877
0.81924
0.80256
0.770753
0.770753
0.75834
0
0.027647
0.251577
3,963
94
111
42.159574
0.841537
0.05728
0
0.686567
0
0
0.138904
0
0
0
0
0
0.597015
1
0.059701
false
0
0.029851
0
0.104478
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
d781cc6f14de93eb5b6e891b9ec12a0b558a7014
151
py
Python
src/maggma/builders/__init__.py
wuxiaohua1011/maggma
b7a059b2d12d9b96aa2092c40eb41f121c0a598b
[ "BSD-3-Clause-LBNL" ]
null
null
null
src/maggma/builders/__init__.py
wuxiaohua1011/maggma
b7a059b2d12d9b96aa2092c40eb41f121c0a598b
[ "BSD-3-Clause-LBNL" ]
null
null
null
src/maggma/builders/__init__.py
wuxiaohua1011/maggma
b7a059b2d12d9b96aa2092c40eb41f121c0a598b
[ "BSD-3-Clause-LBNL" ]
null
null
null
from maggma.core import Builder from maggma.builders.map_builder import MapBuilder, CopyBuilder from maggma.builders.group_builder import GroupBuilder
37.75
63
0.874172
20
151
6.5
0.55
0.230769
0.276923
0
0
0
0
0
0
0
0
0
0.086093
151
3
64
50.333333
0.942029
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
d789f7afb8b7d53720c449341f492c7f511defb7
102,733
py
Python
ThePi.py
isix/The1234Pi
431beb0e8e9bc18765412c60773d924a43c7100d
[ "MIT" ]
null
null
null
ThePi.py
isix/The1234Pi
431beb0e8e9bc18765412c60773d924a43c7100d
[ "MIT" ]
null
null
null
ThePi.py
isix/The1234Pi
431beb0e8e9bc18765412c60773d924a43c7100d
[ "MIT" ]
null
null
null
# ========================================= # The 12:34 in Pi for my friend Bill Hebel # ========================================= # Author: Isaias V. Prestes <isaias.prestes@gmail.com> # Creation date: 07MAR2018 12:34 # Last modification: 07MAR2018 14:13 pinumber = ''' 3.141592653589793238462643383279502884197169399375105820974944592307816406286208998628034825342117067982148086513282306647093844609550582231725359408128481117450284102701938521105559644622948954930381964428810975665933446128475648233786783165271201909145648566923460348610454326648213393607260249141273724587006606315588174881520920962829254091715364367892590360011330530548820466521384146951941511609433057270365759591953092186117381932611793105118548074462379962749567351885752724891227938183011949129833673362440656643086021394946395224737190702179860943702770539217176293176752384674818467669405132000568127145263560827785771342757789609173637178721468440901224953430146549585371050792279689258923542019956112129021960864034418159813629774771309960518707211349999998372978049951059731732816096318595024459455346908302642522308253344685035261931188171010003137838752886587533208381420617177669147303598253490428755468731159562863882353787593751957781857780532171226806613001927876611195909216420198938095257201065485863278865936153381827968230301952035301852968995773622599413891249721775283479131515574857242454150695950829533116861727855889075098381754637464939319255060400927701671139009848824012858361603563707660104710181942955596198946767837449448255379774726847104047534646208046684259069491293313677028989152104752162056966024058038150193511253382430035587640247496473263914199272604269922796782354781636009341721641219924586315030286182974555706749838505494588586926995690927210797509302955321165344987202755960236480665499119881834797753566369807426542527862551818417574672890977772793800081647060016145249192173217214772350141441973568548161361157352552133475741849468438523323907394143334547762416862518983569485562099219222184272550254256887671790494601653466804988627232791786085784383827967976681454100953883786360950680064225125205117392984896084128488626945604241965285022210661186306744278622039194945047123713786960956364371917287467764657573962413890865832645995813390478027590099465764078951269468398352595709825822620522489407726719478268482601476990902640136394437455305068203496252451749399651431429809190659250937221696461515709858387410597885959772975498930161753928468138268683868942774155991855925245953959431049972524680845987273644695848653836736222626099124608051243884390451244136549762780797715691435997700129616089441694868555848406353422072225828488648158456028506016842739452267467678895252138522549954666727823986456596116354886230577456498035593634568174324112515076069479451096596094025228879710893145669136867228748940560101503308617928680920874760917824938589009714909675985261365549781893129784821682998948722658804857564014270477555132379641451523746234364542858444795265867821051141354735739523113427166102135969536231442952484937187110145765403590279934403742007310578539062198387447808478489683321445713868751943506430218453191048481005370614680674919278191197939952061419663428754440643745123718192179998391015919561814675142691239748940907186494231961567945208095146550225231603881930142093762137855956638937787083039069792077346722182562599661501421503068038447734549202605414665925201497442850732518666002132434088190710486331734649651453905796268561005508106658796998163574736384052571459102897064140110971206280439039759515677157700420337869936007230558763176359421873125147120532928191826186125867321579198414848829164470609575270695722091756711672291098169091528017350671274858322287183520935396572512108357915136988209144421006751033467110314126711136990865851639831501970165151168517143765761835155650884909989859982387345528331635507647918535893226185489632132933089857064204675259070915481416549859461637180270981994309924488957571282890592323326097299712084433573265489382391193259746366730583604142813883032038249037589852437441702913276561809377344403070746921120191302033038019762110110044929321516084244485963766983895228684783123552658213144957685726243344189303968642624341077322697802807318915441101044682325271620105265227211166039666557309254711055785376346682065310989652691862056476931257058635662018558100729360659876486117910453348850346113657686753249441668039626579787718556084552965412665408530614344431858676975145661406800700237877659134401712749470420562230538994561314071127000407854733269939081454664645880797270826683063432858785698305235808933065757406795457163775254202114955761581400250126228594130216471550979259230990796547376125517656751357517829666454779174501129961489030463994713296210734043751895735961458901938971311179042978285647503203198691514028708085990480109412147221317947647772622414254854540332157185306142288137585043063321751829798662237172159160771669254748738986654949450114654062843366393790039769265672146385306736096571209180763832716641627488880078692560290228472104031721186082041900042296617119637792133757511495950156604963186294726547364252308177036751590673502350728354056704038674351362222477158915049530984448933309634087807693259939780541934144737744184263129860809988868741326047215695162396586457302163159819319516735381297416772947867242292465436680098067692823828068996400482435403701416314965897940924323789690706977942236250822168895738379862300159377647165122893578601588161755782973523344604281512627203734314653197777416031990665541876397929334419521541341899485444734567383162499341913181480927777103863877343177207545654532207770921201905166096280490926360197598828161332316663652861932668633606273567630354477628035045077723554710585954870279081435624014517180624643626794561275318134078330336254232783944975382437205835311477119926063813346776879695970309833913077109870408591337464144282277263465947047458784778720192771528073176790770715721344473060570073349243693113835049316312840425121925651798069411352801314701304781643788518529092854520116583934196562134914341595625865865570552690496520985803385072242648293972858478316305777756068887644624824685792603953527734803048029005876075825104747091643961362676044925627420420832085661190625454337213153595845068772460290161876679524061634252257719542916299193064553779914037340432875262888963995879475729174642635745525407909145135711136941091193932519107602082520261879853188770584297259167781314969900901921169717372784768472686084900337702424291651300500516832336435038951702989392233451722013812806965011784408745196012122859937162313017114448464090389064495444006198690754851602632750529834918740786680881833851022833450850486082503930213321971551843063545500766828294930413776552793975175461395398468339363830474611996653858153842056853386218672523340283087112328278921250771262946322956398989893582116745627010218356462201349671518819097303811980049734072396103685406643193950979019069963955245300545058068550195673022921913933918568034490398205955100226353536192041994745538593810234395544959778377902374216172711172364343543947822181852862408514006660443325888569867054315470696574745855033232334210730154594051655379068662733379958511562578432298827372319898757141595781119635833005940873068121602876496286744604774649159950549737425626901049037781986835938146574126804925648798556145372347867330390468838343634655379498641927056387293174872332083760112302991136793862708943879936201629515413371424892830722012690147546684765357616477379467520049075715552781965362132392640616013635815590742202020318727760527721900556148425551879253034351398442532234157623361064250639049750086562710953591946589751413103482276930624743536325691607815478181152843667957061108615331504452127473924544945423682886061340841486377670096120715124914043027253860764823634143346235189757664521641376796903149501910857598442391986291642193994907236234646844117394032659184044378051333894525742399508296591228508555821572503107125701266830240292952522011872676756220415420516184163484756516999811614101002996078386909291603028840026910414079288621507842451670908700069928212066041837180653556725253256753286129104248776182582976515795984703562226293486003415872298053498965022629174878820273420922224533985626476691490556284250391275771028402799806636582548892648802545661017296702664076559042909945681506526530537182941270336931378517860904070866711496558343434769338578171138645587367812301458768712660348913909562009939361031029161615288138437909904231747336394804575931493140529763475748119356709110137751721008031559024853090669203767192203322909433467685142214477379393751703443661991040337511173547191855046449026365512816228824462575916333039107225383742182140883508657391771509682887478265699599574490661758344137522397096834080053559849175417381883999446974867626551658276584835884531427756879002909517028352971634456212964043523117600665101241200659755851276178583829204197484423608007193045761893234922927965019875187212726750798125547095890455635792122103334669749923563025494780249011419521238281530911407907386025152274299581807247162591668545133312394804947079119153267343028244186041426363954800044800267049624820179289647669758318327131425170296923488962766844032326092752496035799646925650493681836090032380929345958897069536534940603402166544375589004563288225054525564056448246515187547119621844396582533754388569094113031509526179378002974120766514793942590298969594699556576121865619673378623625612521632086286922210327488921865436480229678070576561514463204692790682120738837781423356282360896320806822246801224826117718589638140918390367367222088832151375560037279839400415297002878307667094447456013455641725437090697939612257142989467154357846878861444581231459357198492252847160504922124247014121478057345510500801908699603302763478708108175450119307141223390866393833952942578690507643100638351983438934159613185434754649556978103829309716465143840700707360411237359984345225161050702705623526601276484830840761183013052793205427462865403603674532865105706587488225698157936789766974220575059683440869735020141020672358502007245225632651341055924019027421624843914035998953539459094407046912091409387001264560016237428802109276457931065792295524988727584610126483699989225695968815920560010165525637567856672279661988578279484885583439751874454551296563443480396642055798293680435220277098429423253302257634180703947699415979159453006975214829336655566156787364005366656416547321704390352132954352916941459904160875320186837937023488868947915107163785290234529244077365949563051007421087142613497459561513849871375704710178795731042296906667021449863746459528082436944578977233004876476524133907592043401963403911473202338071509522201068256342747164602433544005152126693249341967397704159568375355516673027390074972973635496453328886984406119649616277344951827369558822075735517665158985519098666539354948106887320685990754079234240230092590070173196036225475647894064754834664776041146323390565134330684495397907090302346046147096169688688501408347040546074295869913829668246818571031887906528703665083243197440477185567893482308943106828702722809736248093996270607472645539925399442808113736943388729406307926159599546262462970706259484556903471197299640908941805953439325123623550813494900436427852713831591256898929519642728757394691427253436694153236100453730488198551706594121735246258954873016760029886592578662856124966552353382942878542534048308330701653722856355915253478445981831341129001999205981352205117336585640782648494276441137639386692480311836445369858917544264739988228462184490087776977631279572267265556259628254276531830013407092233436577916012809317940171859859993384923549564005709955856113498025249906698423301735035804408116855265311709957089942732870925848789443646005041089226691783525870785951298344172953519537885534573742608590290817651557803905946408735061232261120093731080485485263572282576820341605048466277504500312620080079980492548534694146977516493270950493463938243222718851597405470214828971117779237612257887347718819682546298126868581705074027255026332904497627789442362167411918626943965067151577958675648239939176042601763387045499017614364120469218237076488783419689686118155815873606293860381017121585527266830082383404656475880405138080163363887421637140643549556186896411228214075330265510042410489678352858829024367090488711819090949453314421828766181031007354770549815968077200947469613436092861484941785017180779306810854690009445899527942439813921350558642219648349151263901280383200109773868066287792397180146134324457264009737425700735921003154150893679300816998053652027600727749674584002836240534603726341655425902760183484030681138185510597970566400750942608788573579603732451414678670368809880609716425849759513806930944940151542222194329130217391253835591503100333032511174915696917450271494331515588540392216409722910112903552181576282328318234254832611191280092825256190205263016391147724733148573910777587442538761174657867116941477642144111126358355387136101102326798775641024682403226483464176636980663785768134920453022408197278564719839630878154322116691224641591177673225326433568614618654522268126887268445968442416107854016768142080885028005414361314623082102594173756238994207571362751674573189189456283525704413354375857534269869947254703165661399199968262824727064133622217892390317608542894373393561889165125042440400895271983787386480584726895462438823437517885201439560057104811949884239060613695734231559079670346149143447886360410318235073650277859089757827273130504889398900992391350337325085598265586708924261242947367019390772713070686917092646254842324074855036608013604668951184009366860954632500214585293095000090715105823626729326453738210493872499669933942468551648326113414611068026744663733437534076429402668297386522093570162638464852851490362932019919968828517183953669134522244470804592396602817156551565666111359823112250628905854914509715755390024393153519090210711945730024388017661503527086260253788179751947806101371500448991721002220133501310601639154158957803711779277522597874289191791552241718958536168059474123419339842021874564925644346239253195313510331147639491199507285843065836193536932969928983791494193940608572486396883690326556436421664425760791471086998431573374964883529276932822076294728238153740996154559879825989109371712621828302584811238901196822142945766758071865380650648702613389282299497257453033283896381843944770779402284359883410035838542389735424395647555684095224844554139239410001620769363684677641301781965937997155746854194633489374843912974239143365936041003523437770658886778113949861647874714079326385873862473288964564359877466763847946650407411182565837887845485814896296127399841344272608606187245545236064315371011274680977870446409475828034876975894832824123929296058294861919667091895808983320121031843034012849511620353428014412761728583024355983003204202451207287253558119584014918096925339507577840006746552603144616705082768277222353419110263416315714740612385042584598841990761128725805911393568960143166828317632356732541707342081733223046298799280490851409479036887868789493054695570307261900950207643349335910602454508645362893545686295853131533718386826561786227363716975774183023986006591481616404944965011732131389574706208847480236537103115089842799275442685327797431139514357417221975979935968525228574526379628961269157235798662057340837576687388426640599099350500081337543245463596750484423528487470144354541957625847356421619813407346854111766883118654489377697956651727966232671481033864391375186594673002443450054499539974237232871249483470604406347160632583064982979551010954183623503030945309733583446283947630477564501500850757894954893139394489921612552559770143685894358587752637962559708167764380012543650237141278346792610199558522471722017772370041780841942394872540680155603599839054898572354674564239058585021671903139526294455439131663134530893906204678438778505423939052473136201294769187497519101147231528932677253391814660730008902776896311481090220972452075916729700785058071718638105496797310016787085069420709223290807038326345345203802786099055690013413718236837099194951648960075504934126787643674638490206396401976668559233565463913836318574569814719621084108096188460545603903845534372914144651347494078488442377217515433426030669883176833100113310869042193903108014378433415137092435301367763108491351615642269847507430329716746964066653152703532546711266752246055119958183196376370761799191920357958200759560530234626775794393630746305690108011494271410093913691381072581378135789400559950018354251184172136055727522103526803735726527922417373605751127887218190844900617801388971077082293100279766593583875890939568814856026322439372656247277603789081445883785501970284377936240782505270487581647032458129087839523245323789602984166922548964971560698119218658492677040395648127810217991321741630581055459880130048456299765112124153637451500563507012781592671424134210330156616535602473380784302865525722275304999883701534879300806260180962381516136690334111138653851091936739383522934588832255088706450753947395204396807906708680644509698654880168287434378612645381583428075306184548590379821799459968115441974253634439960290251001588827216474500682070419376158454712318346007262933955054823955713725684023226821301247679452264482091023564775272308208106351889915269288910845557112660396503439789627825001611015323516051965590421184494990778999200732947690586857787872098290135295661397888486050978608595701773129815531495168146717695976099421003618355913877781769845875810446628399880600616229848616935337386578773598336161338413385368421197893890018529569196780455448285848370117096721253533875862158231013310387766827211572694951817958975469399264219791552338576623167627547570354699414892904130186386119439196283887054367774322427680913236544948536676800000106526248547305586159899914017076983854831887501429389089950685453076511680333732226517566220752695179144225280816517166776672793035485154204023817460892328391703275425750867655117859395002793389592057668278967764453184040418554010435134838953120132637836928358082719378312654961745997056745071833206503455664403449045362756001125018433560736122276594927839370647842645676338818807565612168960504161139039063960162022153684941092605387688714837989559999112099164646441191856827700457424343402167227644558933012778158686952506949936461017568506016714535431581480105458860564550133203758645485840324029871709348091055621167154684847780394475697980426318099175642280987399876697323769573701580806822904599212366168902596273043067931653114940176473769387351409336183321614280214976339918983548487562529875242387307755955595546519639440182184099841248982623673771467226061633643296406335728107078875816404381485018841143188598827694490119321296827158884133869434682859006664080631407775772570563072940049294030242049841656547973670548558044586572022763784046682337985282710578431975354179501134727362577408021347682604502285157979579764746702284099956160156910890384582450267926594205550395879229818526480070683765041836562094555434613513415257006597488191634135955671964965403218727160264859304903978748958906612725079482827693895352175362185079629778514618843271922322381015874445052866523802253284389137527384589238442253547265309817157844783421582232702069028723233005386216347988509469547200479523112015043293226628272763217790884008786148022147537657810581970222630971749507212724847947816957296142365859578209083073323356034846531873029302665964501371837542889755797144992465403868179921389346924474198509733462679332107268687076806263991936196504409954216762784091466985692571507431574079380532392523947755744159184582156251819215523370960748332923492103451462643744980559610330799414534778457469999212859999939961228161521931488876938802228108300198601654941654261696858678837260958774567618250727599295089318052187292461086763995891614585505839727420980909781729323930106766386824040111304024700735085782872462713494636853181546969046696869392547251941399291465242385776255004748529547681479546700705034799958886769501612497228204030399546327883069597624936151010243655535223069061294938859901573466102371223547891129254769617600504797492806072126803922691102777226102544149221576504508120677173571202718024296810620377657883716690910941807448781404907551782038565390991047759414132154328440625030180275716965082096427348414695726397884256008453121406593580904127113592004197598513625479616063228873618136737324450607924411763997597461938358457491598809766744709300654634242346063423747466608043170126005205592849369594143408146852981505394717890045183575515412522359059068726487863575254191128887737176637486027660634960353679470269232297186832771739323619200777452212624751869833495151019864269887847171939664976907082521742336566272592844062043021411371992278526998469884770232382384005565551788908766136013047709843861168705231055314916251728373272867600724817298763756981633541507460883866364069347043720668865127568826614973078865701568501691864748854167915459650723428773069985371390430026653078398776385032381821553559732353068604301067576083890862704984188859513809103042359578249514398859011318583584066747237029714978508414585308578133915627076035639076394731145549583226694570249413983163433237897595568085683629725386791327505554252449194358912840504522695381217913191451350099384631177401797151228378546011603595540286440590249646693070776905548102885020808580087811577381719174177601733073855475800605601433774329901272867725304318251975791679296996504146070664571258883469797964293162296552016879730003564630457930884032748077181155533090988702550520768046303460865816539487695196004408482065967379473168086415645650530049881616490578831154345485052660069823093157776500378070466126470602145750579327096204782561524714591896522360839664562410519551052235723973951288181640597859142791481654263289200428160913693777372229998332708208296995573772737566761552711392258805520189887620114168005468736558063347160373429170390798639652296131280178267971728982293607028806908776866059325274637840539769184808204102194471971386925608416245112398062011318454124478205011079876071715568315407886543904121087303240201068534194723047666672174986986854707678120512473679247919315085644477537985379973223445612278584329684664751333657369238720146472367942787004250325558992688434959287612400755875694641370562514001179713316620715371543600687647731867558714878398908107429530941060596944315847753970094398839491443235366853920994687964506653398573888786614762944341401049888993160051207678103588611660202961193639682134960750111649832785635316145168457695687109002999769841263266502347716728657378579085746646077228341540311441529418804782543876177079043000156698677679576090996693607559496515273634981189641304331166277471233881740603731743970540670310967676574869535878967003192586625941051053358438465602339179674926784476370847497833365557900738419147319886271352595462518160434225372996286326749682405806029642114638643686422472488728343417044157348248183330164056695966886676956349141632842641497453334999948000266998758881593507357815195889900539512085351035726137364034367534714104836017546488300407846416745216737190483109676711344349481926268111073994825060739495073503169019731852119552635632584339099822498624067031076831844660729124874754031617969941139738776589986855417031884778867592902607004321266617919223520938227878880988633599116081923535557046463491132085918979613279131975649097600013996234445535014346426860464495862476909434704829329414041114654092398834443515913320107739441118407410768498106634724104823935827401944935665161088463125678529776973468430306146241803585293315973458303845541033701091676776374276210213701354854450926307190114731848574923318167207213727935567952844392548156091372812840633303937356242001604566455741458816605216660873874804724339121295587776390696903707882852775389405246075849623157436917113176134783882719416860662572103685132156647800147675231039357860689611125996028183930954870905907386135191459181951029732787557104972901148717189718004696169777001791391961379141716270701895846921434369676292745910994006008498356842520191559370370101104974733949387788598941743303178534870760322198297057975119144051099423588303454635349234982688362404332726741554030161950568065418093940998202060999414021689090070821330723089662119775530665918814119157783627292746156185710372172471009521423696483086410259288745799932237495519122195190342445230753513380685680735446499512720317448719540397610730806026990625807602029273145525207807991418429063884437349968145827337207266391767020118300464819000241308350884658415214899127610651374153943565721139032857491876909441370209051703148777346165287984823533829726013611098451484182380812054099612527458088109948697221612852489742555551607637167505489617301680961380381191436114399210638005083214098760459930932485102516829446726066613815174571255975495358023998314698220361338082849935670557552471290274539776214049318201465800802156653606776550878380430413431059180460680083459113664083488740800574127258670479225831912741573908091438313845642415094084913391809684025116399193685322555733896695374902662092326131885589158083245557194845387562878612885900410600607374650140262782402734696252821717494158233174923968353013617865367376064216677813773995100658952887742766263684183068019080460984980946976366733566228291513235278880615776827815958866918023894033307644191240341202231636857786035727694154177882643523813190502808701857504704631293335375728538660588890458311145077394293520199432197117164223500564404297989208159430716701985746927384865383343614579463417592257389858800169801475742054299580124295810545651083104629728293758416116253256251657249807849209989799062003593650993472158296517413579849104711166079158743698654122234834188772292944633517865385673196255985202607294767407261676714557364981210567771689348491766077170527718760119990814411305864557791052568430481144026193840232247093924980293355073184589035539713308844617410795916251171486487446861124760542867343670904667846867027409188101424971114965781772427934707021668829561087779440504843752844337510882826477197854000650970403302186255614733211777117441335028160884035178145254196432030957601869464908868154528562134698835544456024955666843660292219512483091060537720198021831010327041783866544718126039719068846237085751808003532704718565949947612424811099928867915896904956394762460842406593094862150769031498702067353384834955083636601784877106080980426924713241000946401437360326564518456679245666955100150229833079849607994988249706172367449361226222961790814311414660941234159359309585407913908720832273354957208075716517187659944985693795623875551617575438091780528029464200447215396280746360211329425591600257073562812638733106005891065245708024474937543184149401482119996276453106800663118382376163966318093144467129861552759820145141027560068929750246304017351489194576360789352855505317331416457050499644389093630843874484783961684051845273288403234520247056851646571647713932377551729479512613239822960239454857975458651745878771331813875295980941217422730035229650808917770506825924882232215493804837145478164721397682096332050830564792048208592047549985732038887639160199524091893894557676874973085695595801065952650303626615975066222508406742889826590751063756356996821151094966974458054728869363102036782325018232370845979011154847208761821247781326633041207621658731297081123075815982124863980721240786887811450165582513617890307086087019897588980745664395515741536319319198107057533663373803827215279884935039748001589051942087971130805123393322190346624991716915094854140187106035460379464337900589095772118080446574396280618671786101715674096766208029576657705129120990794430463289294730615951043090222143937184956063405618934251305726829146578329334052463502892917547087256484260034962961165413823007731332729830500160256724014185152041890701154288579920812198449315699905918201181973350012618772803681248199587707020753240636125931343859554254778196114293516356122349666152261473539967405158499860355295332924575238881013620234762466905581643896786309762736550472434864307121849437348530060638764456627218666170123812771562137974614986132874411771455244470899714452288566294244023018479120547849857452163469644897389206240194351831008828348024924908540307786387516591130287395878709810077271827187452901397283661484214287170553179654307650453432460053636147261818096997693348626407743519992868632383508875668359509726557481543194019557685043724800102041374983187225967738715495839971844490727914196584593008394263702087563539821696205532480321226749891140267852859967340524203109179789990571882194939132075343170798002373659098537552023891164346718558290685371189795262623449248339249634244971465684659124891855662958932990903523923333364743520370770101084388003290759834217018554228386161721041760301164591878053936744747205998502358289183369292233732399948043710841965947316265482574809948250999183300697656936715968936449334886474421350084070066088359723503953234017958255703601693699098867113210979889707051728075585519126993067309925070407024556850778679069476612629808225163313639952117098452809263037592242674257559989289278370474445218936320348941552104459726188380030067761793138139916205806270165102445886924764924689192461212531027573139084047000714356136231699237169484813255420091453041037135453296620639210547982439212517254013231490274058589206321758949434548906846399313757091034633271415316223280552297297953801880162859073572955416278867649827418616421878988574107164906919185116281528548679417363890665388576422915834250067361245384916067413734017357277995634104332688356950781493137800736235418007061918026732855119194267609122103598746924117283749312616339500123959924050845437569850795704622266461900010350049018303415354584283376437811198855631877779253720116671853954183598443830520376281944076159410682071697030228515225057312609304689842343315273213136121658280807521263154773060442377475350595228717440266638914881717308643611138906942027908814311944879941715404210341219084709408025402393294294549387864023051292711909751353600092197110541209668311151632870542302847007312065803262641711616595761327235156666253667271899853419989523688483099930275741991646384142707798870887422927705389122717248632202889842512528721782603050099451082478357290569198855546788607946280537122704246654319214528176074148240382783582971930101788834567416781139895475044833931468963076339665722672704339321674542182455706252479721997866854279897799233957905758189062252547358220523642485078340711014498047872669199018643882293230538231855973286978092225352959101734140733488476100556401824239219269506208318381454698392366461363989101210217709597670490830508185470419466437131229969235889538493013635657618610606222870559942337163102127845744646398973818856674626087948201864748767272722206267646533809980196688368099415907577685263986514625333631245053640261056960551318381317426118442018908885319635698696279503673842431301133175330532980201668881748134298868158557781034323175306478498321062971842518438553442762012823457071698853051832617964117857960888815032960229070561447622091509473903594664691623539680920139457817589108893199211226007392814916948161527384273626429809823406320024402449589445612916704950823581248739179964864113348032475777521970893277226234948601504665268143987705161531702669692970492831628550421289814670619533197026950721437823047687528028735412616639170824592517001071418085480063692325946201900227808740985977192180515853214739265325155903541020928466592529991435379182531454529059841581763705892790690989691116438118780943537152133226144362531449012745477269573939348154691631162492887357471882407150399500944673195431619385548520766573882513963916357672315100555603726339486720820780865373494244011579966750736071115935133195919712094896471755302453136477094209463569698222667377520994516845064362382421185353488798939567318780660610788544000550827657030558744854180577889171920788142335113866292966717964346876007704799953788338787034871802184243734211227394025571769081960309201824018842705704609262256417837526526335832424066125331152942345796556950250681001831090041124537901533296615697052237921032570693705109083078947999900499939532215362274847660361367769797856738658467093667958858378879562594646489137665219958828693380183601193236857855855819555604215625088365020332202451376215820461810670519533065306060650105488716724537794283133887163139559690583208341689847606560711834713621812324622725884199028614208728495687963932546428534307530110528571382964370999035694888528519040295604734613113826387889755178856042499874831638280404684861893818959054203988987265069762020199554841265000539442820393012748163815853039643992547020167275932857436666164411096256633730540921951967514832873480895747777527834422109107311135182804603634719818565557295714474768255285786334934285842311874944000322969069775831590385803935352135886007960034209754739229673331064939560181223781285458431760556173386112673478074585067606304822940965304111830667108189303110887172816751957967534718853722930961614320400638132246584111115775835858113501856904781536893813771847281475199835050478129771859908470762197460588742325699582889253504193795826061621184236876851141831606831586799460165205774052942305360178031335726326705479033840125730591233960188013782542192709476733719198728738524805742124892118347087662966720727232565056512933312605950577772754247124164831283298207236175057467387012820957554430596839555568686118839713552208445285264008125202766555767749596962661260456524568408613923826576858338469849977872670655519185446869846947849573462260629421962455708537127277652309895545019303773216664918257815467729200521266714346320963789185232321501897612603437368406719419303774688099929687758244104787812326625318184596045385354383911449677531286426092521153767325886672260404252349108702695809964759580579466397341906401003636190404203311357933654242630356145700901124480089002080147805660371015412232889146572239314507607167064355682743774396578906797268743847307634645167756210309860409271709095128086309029738504452718289274968921210667008164858339553773591913695015316201890888748421079870689911480466927065094076204650277252865072890532854856143316081269300569378541786109696920253886503457718317668688592368148847527649846882194973972970773718718840041432312763650481453112285099002074240925585925292610302106736815434701525234878635164397623586041919412969769040526483234700991115424260127343802208933109668636789869497799400126016422760926082349304118064382913834735467972539926233879158299848645927173405922562074910530853153718291168163721939518870095778818158685046450769934394098743351443162633031724774748689791820923948083314397084067308407958935810896656477585990556376952523265361442478023082681183103773588708924061303133647737101162821461466167940409051861526036009252194721889091810733587196414214447865489952858234394705007983038853886083103571930600277119455802191194289992272235345870756624692617766317885514435021828702668561066500353105021631820601760921798468493686316129372795187307897263735371715025637873357977180818487845886650433582437700414771041493492743845758710715973155943942641257027096512510811554824793940359768118811728247215825010949609662539339538092219559191818855267806214992317276316321833989693807561685591175299845013206712939240414459386239880938124045219148483164621014738918251010909677386906640415897361047643650006807710565671848628149637111883219244566394581449148616550049567698269030891118568798692947051352481609174324301538368470729289898284602223730145265567989862776796809146979837826876431159883210904371561129976652153963546442086919756737000573876497843768628768179249746943842746525631632300555130417422734164645512781278457777245752038654375428282567141288583454443513256205446424101103795546419058116862305964476958705407214198521210673433241075676757581845699069304604752277016700568454396923404171108988899341635058515788735343081552081177207188037910404698306957868547393765643363197978680367187307969392423632144845035477631567025539006542311792015346497792906624150832885839529054263768766896880503331722780018588506973623240389470047189761934734430843744375992503417880797223585913424581314404984770173236169471976571535319775499716278566311904691260918259124989036765417697990362375528652637573376352696934435440047306719886890196814742876779086697968852250163694985673021752313252926537589641517147955953878427849986645630287883196209983049451987439636907068276265748581043911223261879405994155406327013198989570376110532360629867480377915376751158304320849872092028092975264981256916342500052290887264692528466610466539217148208013050229805263783642695973370705392278915351056888393811324975707133102950443034671598944878684711643832805069250776627450012200352620370946602341464899839025258883014867816219677519458316771876275720050543979441245990077115205154619930509838698254284640725554092740313257163264079293418334214709041254253352324802193227707535554679587163835875018159338717423606155117101312352563348582036514614187004920570437201826173319471570086757853933607862273955818579758725874410254207710547536129404746010009409544495966288148691590389907186598056361713769222729076419775517772010427649694961105622059250242021770426962215495872645398922769766031052498085575947163107587013320886146326641259114863388122028444069416948826152957762532501987035987067438046982194205638125583343642194923227593722128905642094308235254408411086454536940496927149400331978286131818618881111840825786592875742638445005994422956858646048103301538891149948693543603022181094346676400002236255057363129462629609619876056425996394613869233083719626595473923462413459779574852464783798079569319865081597767535055391899115133525229873611277918274854200868953965835942196333150286956119201229888988700607999279541118826902307891310760361763477948943203210277335941690865007193280401716384064498787175375678118532132840821657110754952829497493621460821558320568723218557406516109627487437509809223021160998263303391546949464449100451528092508974507489676032409076898365294065792019831526541065813682379198409064571246894847020935776119313998024681340520039478194986620262400890215016616381353838151503773502296607462795291038406868556907015751662419298724448271942933100485482445458071889763300323252582158128032746796200281476243182862217105435289834820827345168018613171959332471107466222850871066611770346535283957762599774467218571581612641114327179434788599089280848669491413909771673690027775850268664654056595039486784111079011610400857274456293842549416759460548711723594642910585090995021495879311219613590831588262068233215615308683373083817327932819698387508708348388046388478441884003184712697454370937329836240287519792080232187874488287284372737801782700805878241074935751488997891173974612932035108143270325140903048746226294234432757126008664250833318768865075642927160552528954492153765175149219636718104943531785838345386525565664065725136357506435323650893679043170259787817719031486796384082881020946149007971513771709906195496964007086766710233004867263147551053723175711432231741141168062286420638890621019235522354671166213749969326932173704310598722503945657492461697826097025335947502091383667377289443869640002811034402608471289900074680776484408871134135250336787731679770937277868216611786534423173226463784769787514433209534000165069213054647689098505020301504488083426184520873053097318949291642532293361243151430657826407028389840984160295030924189712097160164926561341343342229882790992178604267981245728534580133826099587717811310216734025656274400729683406619848067661580502169183372368039902793160642043681207990031626444914619021945822969099212278855394878353830564686488165556229431567312827439082645061162894280350166133669782405177015521962652272545585073864058529983037918035043287670380925216790757120406123759632768567484507915114731344000183257034492090971243580944790046249431345502890068064870429353403743603262582053579011839564908935434510134296961754524957396062149028872893279252069653538639644322538832752249960598697475988232991626354597332444516375533437749292899058117578635555562693742691094711700216541171821975051983178713710605106379555858890556885288798908475091576463907469361988150781468526213325247383765119299015610918977792200870579339646382749068069876916819749236562422608715417610043060890437797667851966189140414492527048088197149880154205778700652159400928977760133075684796699295543365613984773806039436889588764605498387147896848280538470173087111776115966350503997934386933911978988710915654170913308260764740630571141109883938809548143782847452883836807941888434266622207043872288741394780101772139228191199236540551639589347426395382482960903690028835932774585506080131798840716244656399794827578365019551422155133928197822698427863839167971509126241054872570092407004548848569295044811073808799654748156891393538094347455697212891982717702076661360248958146811913361412125878389557735719498631721084439890142394849665925173138817160266326193106536653504147307080441493916936326237376777709585031325599009576273195730864804246770121232702053374266705314244820816813030639737873664248367253983748769098060218278578621651273856351329014890350988327061725893257536399397905572917516009761545904477169226580631511102803843601737474215247608515209901615858231257159073342173657626714239047827958728150509563309280266845893764964977023297364131906098274063353108979246424213458374090116939196425045912881340349881063540088759682005440836438651661788055760895689672753153808194207733259791727843762566118431989102500749182908647514979400316070384554946538594602745244746681231468794344161099333890899263841184742525704457251745932573898956518571657596148126602031079762825416559050604247911401695790033835657486925280074302562341949828646791447632277400552946090394017753633565547193100017543004750471914489984104001586794617924161001645471655133707407395026044276953855383439755054887109978520540117516974758134492607943368954378322117245068734423198987884412854206474280973562580706698310697993526069339213568588139121480735472846322778490808700246777630360555123238665629517885371967303463470122293958160679250915321748903084088651606111901149844341235012464692802880599613428351188471544977127847336176628506216977871774382436256571177945006447771837022199910669502165675764404499794076503799995484500271066598781360380231412683690578319046079276529727769404361302305178708054651154246939526512710105292707030667302444712597393995051462840476743136373997825918454117641332790646063658415292701903027601733947486696034869497654175242930604072700505903950314852292139257559484507886797792525393176515641619716844352436979444735596426063339105512682606159572621703669850647328126672452198906054988028078288142979633669674412480598219214633956574572210229867759974673812606936706913408155941201611596019023775352555630060624798326124988128819293734347686268921923977783391073310658825681377717232831532908252509273304785072497713944833389255208117560845296659055394096556854170600117985729381399825831929367910039184409928657560599359891000296986446097471471847010153128376263114677420914557404181590880006494323785583930853082830547607679952435739163122188605754967383224319565065546085288120190236364471270374863442172725787950342848631294491631847534753143504139209610879605773098720135248407505763719925365047090858251393686346386336804289176710760211115982887553994012007601394703366179371539630613986365549221374159790511908358829009765664730073387931467891318146510931676157582135142486044229244530411316065270097433008849903467540551864067734260358340960860553374736276093565885310976099423834738222208729246449768456057956251676557408841032173134562773585605235823638953203853402484227337163912397321599544082842166663602329654569470357718487344203422770665383738750616921276801576618109542009770836360436111059240911788954033802142652394892968643980892611463541457153519434285072135345301831587562827573389826889852355779929572764522939156747756667605108788764845349363606827805056462281359888587925994094644604170520447004631513797543173718775603981596264750141090665886616218003826698996196558058720863972117699521946678985701179833244060181157565807428418291061519391763005919431443460515404771057005433900018245311773371895585760360718286050635647997900413976180895536366960316219311325022385179167205518065926351803625121457592623836934822266589557699466049193811248660909979812857182349400661555219611220720309227764620099931524427358948871057662389469388944649509396033045434084210246240104872332875008174917987554387938738143989423801176270083719605309438394006375611645856094312951759771393539607432279248922126704580818331376416581826956210587289244774003594700926866265965142205063007859200248829186083974373235384908396432614700053242354064704208949921025040472678105908364400746638002087012666420945718170294675227854007450855237772089058168391844659282941701828823301497155423523591177481862859296760504820386434310877956289292540563894662194826871104282816389397571175778691543016505860296521745958198887868040811032843273986719862130620555985526603640504628215230615459447448990883908199973874745296981077620148713400012253552224669540931521311533791579802697955571050850747387475075806876537644578252443263804614304288923593485296105826938210349800040524840708440356116781717051281337880570564345061611933042444079826037795119854869455915205196009304127100727784930155503889536033826192934379708187432094991415959339636811062755729527800425486306005452383915106899891357882001941178653568214911852820785213012551851849371150342215954224451190020739353962740020811046553020793286725474054365271759589350071633607632161472581540764205302004534018357233829266191530835409512022632916505442612361919705161383935732669376015691442994494374485680977569630312958871916112929468188493633864739274760122696415884890096571708616059814720446742866420876533479985822209061980217321161423041947775499073873856794118982466091309169177227420723336763503267834058630193019324299639720444517928812285447821195353089891012534297552472763573022628138209180743974867145359077863353016082155991131414420509144729353502223081719366350934686585865631485557586244781862010871188976065296989926932817870557643514338206014107732926106343152533718224338526352021773544071528189813769875515757454693972715048846979361950047772097056179391382898984532742622728864710888327017372325881824465843624958059256033810521560620615571329915608489206434030339526226345145428367869828807425142256745180618414956468611163540497189768215422772247947403357152743681940989205011365340012384671429655186734415374161504256325671343024765512521921803578016924032669954174608759240920700466934039651017813485783569444076047023254075555776472845075182689041829396611331016013111907739863246277821902365066037404160672496249013743321724645409741299557052914243820807609836482346597388669134991978401310801558134397919485283043673901248208244481412809544377389832005986490915950532285791457688496257866588599917986752055455809900455646117875524937012455321717019428288461740273664997847550829422802023290122163010230977215156944642790980219082668986883426307160920791408519769523555348865774342527753119724743087304361951139611908003025587838764420608504473063129927788894272918972716989057592524467966018970748296094919064876469370275077386643239191904225429023531892337729316673608699622803255718530891928440380507103006477684786324319100022392978525537237556621364474009676053943983823576460699246526008909062410590421545392790441152958034533450025624410100635953003959886446616959562635187806068851372346270799732723313469397145628554261546765063246567662027924520858134771760852169134094652030767339184114750414016892412131982688156866456148538028753933116023229255561894104299533564009578649534093511526645402441877594931693056044868642086275720117231952640502309977456764783848897346431721598062678767183800524769688408498918508614900343240347674268624595239589035858213500645099817824463608731775437885967767291952611121385919472545140030118050343787527766440276261894101757687268042817662386068047788524288743025914524707395054652513533945959878961977891104189029294381856720507096460626354173294464957661265195349570186001541262396228641389779673332907056737696215649818450684226369036784955597002607986799626101903933126376855696876702929537116252800554310078640872893922571451248113577862766490242516199027747109033593330930494838059785662884478744146984149906712376478958226329490467981208998485716357108783119184863025450162092980582920833481363840542172005612198935366937133673339246441612522319694347120641737549121635700857369439730597970971972666664226743111776217640306868131035189911227133972403688700099686292254646500638528862039380050477827691283560337254825579391298525150682996910775425764748832534141213280062671709400909822352965795799780301828242849022147074811112401860761341515038756983091865278065889668236252393784527263453042041880250844236319038331838455052236799235775292910692504326144695010986108889991465855188187358252816430252093928525807796973762084563748211443398816271003170315133440230952635192958868069082135585368016100021374085115448491268584126869589917414913382057849280069825519574020181810564129725083607035685105533178784082900004155251186577945396331753853209214972052660783126028196116485809868458752512999740409279768317663991465538610893758795221497173172813151793290443112181587102351874075722210012376872194474720934931232410706508061856237252673254073332487575448296757345001932190219911996079798937338367324257610393898534927877747398050808001554476406105352220232540944356771879456543040673589649101761077594836454082348613025471847648518957583667439979150851285802060782055446299172320202822291488695939972997429747115537185892423849385585859540743810488262464878805330427146301194158989632879267832732245610385219701113046658710050008328517731177648973523092666123458887310288351562644602367199664455472760831011878838915114934093934475007302585581475619088139875235781233134227986650352272536717123075686104500454897036007956982762639234410714658489578024140815840522953693749971066559489445924628661996355635065262340533943914211127181069105229002465742360413009369188925586578466846121567955425660541600507127664176605687427420032957716064344860620123982169827172319782681662824993871499544913730205184366907672357740005393266262276032365975171892590180110429038427418550789488743883270306328327996300720069801224436511639408692222074532024462412115580435454206421512158505689615735641431306888344318528085397592773443365538418834030351782294625370201578215737326552318576355409895403323638231921989217117744946940367829618592080340386757583411151882417743914507736638407188048935825686854201164503135763335550944031923672034865101056104987272647213198654343545040913185951314518127643731043897250700498198705217627249406521461995923214231443977654670835171474936798618655279171582408065106379950018429593879915835017158075988378496225739851212981032637937621832245659423668537679911314010804313973233544909082491049914332584329882103398469814171575601082970658306521134707680368069532297199059990445120908727577622535104090239288877942463048328031913271049547859918019696783532146444118926063152661816744319355081708187547705080265402529410921826485821385752668815558411319856002213515888721036569608751506318753300294211868222189377554602722729129050429225978771066787384000061677215463844129237119352182849982435092089180168557279815642185819119749098573057033266764646072875743056537260276898237325974508447964954564803077159815395582777913937360171742299602735310276871944944491793978514463159731443535185049141394155732938204854212350817391254974981930871439661513294204591938010623142177419918406018034794988769105155790555480695387854006645337598186284641990522045280330626369562649091082762711590385699505124652999606285544383833032763859980079292284665950355121124528408751622906026201185777531374794936205549640107300134885315073548735390560290893352640071327473262196031177343394367338575912450814933573691166454128178817145402305475066713651825828489809951213919399563324133655677709800308191027204099714868741813466700609405102146269028044915964654533010775469541308871416531254481306119240782118869005602778182423502269618934435254763357353648561936325441775661398170393063287216690572225974520919291726219984440964615826945638023950283712168644656178523556516412771282691868861557271620147493405227694659571219831494338162211400693630743044417328478610177774383797703723179525543410722344551255558999864618387676490397246116795901810003509892864120419516355110876320426761297982652942588295114127584126273279079880755975185157684126474220947972184330935297266521001566251455299474512763155091763673025946213293019040283795424632325855030109670692272022707486341900543830265068121414213505715417505750863990767394633514620908288893493837643939925690060406731142209331219593620298297235116325938677224147791162957278075239505625158160313335938231150051862689053065836812998810866326327198061127154885879809348791291370749823057592909186293919501472119758606727009254771802575033773079939713453953264619526999659638565491759045833358579910201271320458390320085387888163363768518208372788513117522776960978796214237216254521459128183179821604411131167140691482717098101545778193920231156387195080502467972579249760577262591332855972637121120190572077140914864507409492671803581515757151405039761096384675556929897038354731410022380258346876735012977541327953206097115450648421218593649099791776687477448188287063231551586503289816422828823274686610659273219790716238464215348985247621678905026099804526648392954235728734397768049577409144953839157556548545905897649519851380100795801078375994577529919670054760225255203445398871253878017196071816407812484784725791240782454436168234523957068951427226975043187363326301110305342333582160933319121880660826834142891041517324721605335584999322454873077882290525232423486153152097693846104258284971496347534183756200301491570327968530186863157248840152663983568956363465743532178349319982554211730846774529708583950761645822963032442432823773745051702856069806788952176819815671078163340526675953942492628075696832610749532339053622309080708145591983735537774874202903901814293731152933464446815121294509759653430628421531944572711861490001765055817709530246887526325011970520947615941676872778447200019278913725184162285778379228443908430118112149636642465903363419454065718354477191244662125939265662030688852005559912123536371822692253178145879259375044144893398160865790087616502463519704582889548179375668104647461410514249887025213993687050937230544773411264135489280684105910771667782123833281026218558775131272117934444820144042574508306394473836379390628300897330624138061458941422769474793166571762318247216835067807648757342049155762821758397297513447899069658953254894033561561316740327647246921250575911625152965456854463349811431767025729566184477548746937846423373723898192066204851189437886822480727935202250179654534375727416391079197295295081294292220534771730418447791567399173841831171036252439571615271466900581470000263301045264354786590329073320546833887207873544476264792529769017091200787418373673508771337697768349634425241994995138831507487753743384945825976556099655595431804092017849718468549737069621208852437701385375768141663272241263442398215294164537800049250726276515078908507126599703670872669276430837722968598516912230503746274431085293430527307886528397733524601746352770320593817912539691562106363762588293757137384075440646896478310070458061344673127159119460843593582598778283526653115106504162329532904777217408355934972375855213804830509000964667608830154061282430874064559443185341375522016630581211103345312074508682433943215904359443031243122747138584203039010607094031523555617276799416002039397509989762933532585557562480899669182986422267750236019325797472674257821111973470940235745722227121252685238429587427350156366009318804549333898974157149054418255973808087156528143010267046028431681923039253529779576586241439270154974087927313105163611913757700892956482332364829826302460797587576774537716010249080462430185652416175665560016085912153455626760219268998285537787258314514408265458348440947846317877737479465358016996077940556870119232860804113090462935087182712593466871276669487389982459852778649956916546402945893506496433580982476596516514209098675520380830920323048734270346828875160407154665383461961122301375945157925269674364253192739003603860823645076269882749761872357547676288995075211480485252795084503395857083813047693788132112367428131948795022806632017002246033198967197064916374117585485187848401205484467258885140156272501982171906696081262778548596481836962141072171421498636191877475450965030895709947093433785698167446582826791194061195603784539785583924076127634410576675102430755981455278616781594965706255975507430652108530159790807334373607943286675789053348366955548680391343372015649883422089339997164147974693869690548008919306713805717150585730714881564992071408675825960287605645978242377024246980532805663278704192676846711626687946348695046450742021937394525926266861355294062478136120620263649819999949840514386828525895634226432870766329930489172340072547176418868535137233266787792173834754148002280339299735793615241275582956927683723123479898944627433045456679006203242051639628258844308543830720149567210646053323853720314324211260742448584509458049408182092763914000854042202355626021856434899414543995041098059181794888262805206644108631900168856815516922948620301073889718100770929059048074909242714101893354281842999598816966099383696164438152887721408526808875748829325873580990567075581701794916190611400190855374488272620093668560447559655747648567400817738170330738030547697360978654385938218722058390234444350886749986650604064587434600533182743629617786251808189314436325120510709469081358644051922951293245007883339878842933934243512634336520438581291283434529730865290978330067126179813031679438553572629699874035957045845223085639009891317947594875212639707837594486113945196028675121056163897600888009274611586080020780334159145179707303683519697776607637378533301202412011204698860920933908536577322239241244905153278095095586645947763448226998607481329730263097502881210351772312446509534965369309001863776409409434983731325132186208021480992268550294845466181471555744470966953017769043427203189277060471778452793916047228153437980353967986142437095668322149146543801459382927739339603275404800955223181666738035718393275707714204672383862461780397629237713120958078936384144792980258806552212926209362393063731349664018661951081158347117331202580586672763999276357907806381881306915636627412543125958993611964762610140556350339952314032311381965623632719896183725484533370206256346422395276694356837676136871196292181875457608161705303159072882870071231366630872275491866139577373054606599743781098764980241401124214277366808275139095931340415582626678951084677611866595766016599817808941498575497628438785610026379654317831363402513581416115190209649913354873313111502270068193013592959597164019719605362503355847998096348871803911161281359596856547886832585643789617315976200241962155289629790481982219946226948713746244472909345647002853769495885959160678928249105441251599630078136836749020937491573289627002865682934443134234735123929825916673950342599586897069726733258273590312128874666045146148785034614282776599160809039865257571726308183349444182019353338507129234577437557934406217871133006310600332405399169368260374617663856575887758020122936635327026710068126182517291460820254189288593524449107013820621155382779356529691457650204864328286555793470720963480737269214118689546732276775133569019015372366903686538916129168888787640752549349424973342718117889275993159671935475898809792452526236365903632007085444078454479734829180208204492667063442043755532505052752283377888704080403353192340768563010934777212563908864041310107381785333831603813528082811904083256440184205374679299262203769871801806112262449090924264198582086175117711378905160914038157500336642415609521632819712233502316742260056794128140621721964184270578432895980288233505982820819666624903585778994033315227481777695284368163008853176969478369058067106482808359804669884109813515865490693331952239436328792399053481098783027450017206543369906611778455436468772363184446476806914282800455107468664539280539940910875493916609573161971503316696830992946634914279878084225722069714887558063748030886299511847318712477729191007022758889348693945628951580296537215040960310776128983126358996489341024703603664505868728758905140684123812424738638542790828273382797332688550493587430316027474906312957234974261122151741715313361862241091386950068883589896234927631731647834007746088665559873338211382992877691149549218419208777160606847287467368188616750722101726110383067178785669481294878504894306308616994879870316051588410828235127415353851336589533294862949449506186851477910580469603906937266267038651290520113781085861618888694795760741358553458515176805197333443349523012039577073962377131603024288720053732099825300897761897312981788194467173116064723147624845755192873278282512718244680782421521646956781929409823892628494376024885227900362021938669648221562809360537317804086372726842669642192994681921490870170753336109479138180406328738759384826953558307739576144799727000347288018278528138950321798634521611106660883931405322694490545552786789441757920244002145078019209980446138254780585804844241640477503153605490659143007815837243012313751156228401583864427089071828481675752712384678245953433444962201009607105137060846180118754312072549133499424761711563332140893460915656155060031738421870157022610310191660388706466143889773631878094071152752817468957640158104701696524755774089164456867771715850058326994340167720215676772406812836656526412298243946513319735919970940327593850266955747023181320324371642058614103360652453693916005064495306016126782264894243739716671766123104897503188573216555498834212180284691252908610148552781527762562375045637576949773433684601560772703550962904939248708840628106794362241870474700836884267102255830240359984164595112248527263363264511401739524808619463584078375355688562231711552094722306543709260679735100056554938122457548372854571179739361575616764169289580525729752233855861138832217110736226581621884244317885748879810902665379342666421699091405653643224930133486798815488662866505234699723557473842483059042367714327879231642240387776433019260019228477831383763253612102533693581262408686669973827597736568222790721583247888864236934639616436330873013981421143030600873066616480367898409133592629340230432497492688783164360268101130957071614191283068657732353263965367739031766136131596555358499939860056515592193675997771793301974468814837110320650369319289452140265091546518430993655349333718342529843367991593941746622390038952767381333061774762957494386871697845376721949350659087571191772087547710718993796089477451265475750187119487073873678589020061737332107569330221632062843206567119209695058576117396163232621770894542621460985841023781321581772760222273813349541048100307327510779994899197796388353073444345753297591426376840544226478421606312276964696715647399904371590332390656072664411643860540483884716191210900870101913072607104411414324197679682854788552477947648180295973604943970047959604029274629920357209976195014034831538094771460105633344699882082212058728151072918297121191787642488035467231691654185225672923442918712816323259696541354858957713320833991128877591722611527337901034136208561457799239877832508355073019981845902595835598926055329967377049172245493532968330000223018151722657578752405883224908582128008974790932610076257877042865600699617621217684547899644070506624171021332748679623743022915535820078014116534806564748823061500339206898379476625503654982280532966286211793062843017049240230198571997894883689718304380518217441914766042975243725168343541121703863137941142209529588579806015293875275379903093887168357209576071522190027937929278630363726876582268124199338480816602160372215471014300737753779269906958712128928801905203160128586182549441335382078488346531163265040764242839087012101519423196165226842200371123046430067344206474771802135307012409886035339915266792387110170622186588357378121093517977560442563469499978725112544085452227481091487430725986960204027594117894258128188215995235965897918114407765335432175759525553615812800116384672031934650729680799079396371496177431211940202129757312516525376801735910155733815377200195244454362007184847566341540744232862106099761324348754884743453966598133871746609302053507027195298394327142537115576660002578442303107342955153394506048622276496668762407932435319299263925373107689213535257232108088981933916866827894828117047262450194840970097576092098372409007471797334078814182519584259809624174761013825264395513525931188504563626418830033853965243599741693132289471987830842760040136807470390409723847394583489618653979059411859931035616843686921948538205578039577388136067954990008512325944252972448666676683464140218991594456530942344065066785194841776677947047204195882204329538032631053749488312218039127967844610013972675389219511911783658766252808369005324900459741094706877291232821430463533728351995364827432583311914445901780960778288358373011185754365995898272453192531058811502630754257149394302445393187017992360816661130542625399583389794297160207033876781503301028012009599725222228080142357109476035192554443492998676781789104555906301595380976187592035893734197896235893112598390259831026719330418921510968915622506965911982832345550305908173073519550372166587028805399213857603703537710517802128012956684198414036287272562321442875430221090947272107347413497551419073704331827662617727599688882602722524713368335345281669277959132886138176634985772893690096574956228710302436259077241221909430087175569262575806570991201665962243608024287002454736203639484125595488172727247365346778364720191830399871762703751572464992228946793232269361917764161461879561395669956778306829031658969943076733350823499079062410020250613405734430069574547468217569044165154063658468046369262127421107539904218871612761778701425886482577522388918459952337629237791558574454947736129552595222657863646211837759847370034797140820699414558071908021359073226923310083175951065901912129479540860364075735875020589020870457967000705526250581142066390745921527330940682364944159089100922029668052332526619891131184201629163107689408472356436680818216865721968826835840278550078280404345371018365109695178233574303050485265373807353107418591770561039739506264035544227515610110726177937063472380499066692216197119425912044508464174638358993823994651739550900085947999013602667426149429006646711506717542217703877450767356374215478290591101261915755587023895700140511782264698994491790830179547587676016809410013583761357859135692445564776446417866711539195135769610486492249008344671548638305447791433009768048687834818467273375843689272431044740680768527862558516509208826381323362314873333671476452045087662761495038994950480956046098960432912335834885999029452640028499428087862403981181488476730121675416110662999555366819312328742570206373835202008686369131173346973174121915363324674532563087134730279217495622701468732586789173455837996435135880095935087755635624881049385299900767513551352779241242927748856588856651324730251471021057535251651181485090275047684551825209633189906852761443513821366215236889057878669943228881602837748203550601602989400911971385017987168363374413927597364401700701476370665570350433812111357641501845182141361982349515960106475271257593518530433287553778305750956742544268471221961870917856078393614451138333564910325640573389866717812397223751931643061701385953947436784339267098671245221118969084023632741149660124348309892994173803058841716661307304006758838043211155537944060549772170594282151488616567277124090338772774562909711013488518437411869565544974573684521806698291104505800429988795389902780438359628240942186055628778842880212755388480372864001944161425749990427200959520465417059810498996750451193647117277222043610261407975080968697517660023718774834801612031023468056711264476612374762785219024120256994353471622666089367521983311181351114650385489502512065577263614547360442685949807439693233129712737715734709971395229118265348515558713733662912024271430250376326950135091161295299378586468130722648600827088133353819370368259886789332123832705329762585738279009782646054559855513183668884462826513379849166783940976135376625179825824966345877195012438404035914084920973375464247448817618407002356958017741017769692507781489338667255789856458985105689196092439884156928069698335224022563457049731224526935419383700484318335719651662672157552419340193309901831930919658292096965624766768365964701959575473934551433741370876151732367720422738567427917069820454995309591887243493952409444167899884631984550485239366297207977745281439941825678945779571255242682608994086331737153889626288962940211210888442737656862452761213037101730078513571540453304150795944777614359743780374243664697324713841049212431413890357909241603640631403814983148190525172093710396402680899483257229795456404270175772290417323479607361878788991331830584306939482596131871381642346721873084513387721908697510494284376932502498165667381626061594176825250999374167288395174406693254965340310145222531618900923537648637848288134420987004809622717122640748957193900291857330746010436072919094576799461492929042798168772942648772995285843464777538690695014898413392454039414468026362540211861431703125111757764282991464453340892097696169909837265236176874560589470496817013697490952307208268288789073019001825342580534342170592871393173799314241085264739094828459641809361413847583113613057610846236683723769591349261582451622155213487924414504175684806412063652017038633012953277769902311864802006755690568229501635493199230591424639621702532974757311409422018019936803502649563695586642590676268568737211033915679383989576556519317788300024161353956243777784080174881937309502069990089089932808839743036773659552489130015663329407790713961546453408879151030065132193448667324827590794680787981942501958262232039513125201410996053126069655540424867054998678692302174698900954785072567297879476988883109348746442640071818316033165551153427615562240547447337804924621495213325852769884733626918264917433898782478927846891882805466998230368993978341374758702580571634941356843392939606819206177333179173820856243643363535986349449689078106401967407443658366707158692452118299789380407713750129085864657890577142683358276897855471768718442772612050926648610205153564284063236848180728794071712796682006072755955590404023317874944734645476062818954151213916291844429765106694796935401686601005519607768733539651161493093757096855455938151378956903925101495326562814701199832699220006639287537471313523642158926512620407288771657835840521964605410543544364216656224456504299901025658692727914275293117208279393775132610605288123537345106837293989358087124386938593438917571337630072031976081660446468393772580690923729752348670291691042636926209019960520412102407764819031601408586355842760953708655816427399534934654631450404019952853725200495780525465625115410925243799132626271360909940290226206283675213230506518393405745011209934146491843332364656937172591448932415900624202061288573292613359680872650004562828455757459659212053034131011182750130696150983551563200431078460190656549380654252522916199181995960275232770224985573882489988270746593635576858256051806896428537685077201222034792099393617926820659014216561592530673794456894907085326356819683186177226824991147261573203580764629811624401331673789278868922903259334986179702199498192573961767307583441709855922217017182571277753449150820527843090461946083521740200583867284970941102326695392144546106621500641067474020700918991195137646690448126725369153716229079138540393756007783515337416774794210038400230895185099454877903934612222086506016050035177626483161115332558770507354127924990985937347378708119425305512143697974991495186053592040383023571635272763087469321962219006426088618367610334600225547747781364101269190656968649501268837629690723396127628722304114181361006026404403003599698891994582739762411461374480405969706257676472376606554161857469052722923822827518679915698339074767114610302277660602006124687647772881909679161335401988140275799217416767879923160396356949285151363364721954061117176738737255572852294005436178517650230754469386930787349911035218253292972604455321079788771144989887091151123725060423875373484125708606406905205845212275453384800820530245045651766951857691320004281675805492481178051983264603244579282973012910531838563682120621553128866856495651261389226136706409395333457052698695969235035309422454386527867767302754040270224638448355323991475136344104405009233036127149608135549053153902100229959575658370538126196568314428605795669662215472169562087001372776853696084070483332513279311223250714863020695124539500373572334680709465648308920980153487870563349109236605755405086411152144148143463043727327104502776866195310785832333485784029716092521532609255893265560067212435946425506599677177038844539618163287961446081778927217183690888012677820743010642252463480745430047649288555340906218515365435547412547615276977266776977277705831580141218568801170502836527554321480348800444297999806215790456416195721278450892848980642649742709057912906921780729876947797511244730599140605062994689428093103421641662993561482813099887074529271604843363081840412646963792584309418544221635908457614607855856247381493142707826621518554160387020687698046174740080832434366538235455510944949843109349475994467267366535251766270677219418319197719637801570216993367508376005716345464367177672338758864340564487156696432104128259564534984138841289042068204700761559691684303899934836679354254921032811336318472259230555438305820694167562999201337317548912203723034907268106853445403599356182357631283776764063101312533521214199461186935083317658785204711236433122676512996417132521751355326186768194233879036546890800182713528358488844411176123410117991870923650718485785622102110400977699445312179502247957806950653296594038398736990724079767904082679400761872954783596349279390457697366164340535979221928587057495748169669406233427261973351813662606373598257555249650980726012366828360592834185584802695841377255897088378994291054980033111388460340193916612218669605849157148573356828614950001909759112521880039641976216355937574371801148055944229873041819680808564726571354761283162920044988031540210553059707666636274932830891688093235929008178741198573831719261672883491840242972129043496552694272640255964146352591434840067586769035038232057293413298159353304444649682944136732344215838076169483121933311981906109614295220153617029857510559432646146850545268497576480780800922133581137819774927176854507553832876887447459159373116247060109124460982942484128752022446259447763874949199784044682925736096853454984326653686284448936570411181779380644161653122360021491876876946739840751717630751684985635920148689294310594020245796962292456664488196757629434953532638217161339575779076637076456957025973880043841580589433613710655185998760075492418721171488929522173772114608115434498266547987258005667472405112200738345927157572771521858994694811794064446639943237004429114074721818022482583773601734668530074498556471542003612359339731291445859152288740871950870863221883728826282288463184371726190330577714765156414382230679184738603914768310814135827575585364359772165002827780371342286968878734979509603110889919614338666406845069742078770028050936720338723262963785603865321643234881555755701846908907464787912243637555666867806761054495501726079114293083128576125448194444947324481909379536900820638463167822506480953181040657025432760438570350592281891987806586541218429921727372095510324225107971807783304260908679427342895573555925272380551144043800123904168771644518022649168164192740110645162243110170005669112173318942340054795968466980429801736257040673328212996215368488140410219446342464622074557564396045298531307140908460849965376780379320189914086581466217531933766597011433060862500982956691763884605676297293146491149370462446935198403953444913514119366793330193661766365255514917498230798707228086085962611266050428929696653565251668888557211227680277274370891738963977225756489053340103885593112567999151658902501648696142720700591605616615970245198905183296927893555030393468121976158218398048396056252309146263844738629603984892438618729850777592879272206855480721049781765328621018747676689724884113956034948037672703631692100735083407386526168450748249644859742813493648037242611670426687083192504099761531907685577032742178501000644198412420739640013960360158381056592841368457411910273642027416372348821452410134771652960312840865841978795111651152982781462037913985500639996032659124852530849369031313010079997719136223086601109992914287124938854161203802041134018888721969347790449752745428807280350930582875442075513481666092787935356652125562013998824962847872621443236285367650259145046837763528258765213915648097214192967554938437558260025316853635673137926247587804944594418342917275698837622626184636545274349766241113845130548144983631178978448973207671950878415861887969295581973325069995140260151167552975057543781024223895792578656212843273120220071673057406928686936393018676595825132649914595026091706934751940897535746401683081179884645247361895605647942635807056256328118926966302647953595109712765913623318086692153578860781275991053717140220450618607537486630635059148391646765672320571451688617079098469593223672494673758309960704258922048155079913275208858378111768521426933478692189524062265792104362034885292626798401395321645879115157905046057971083898337186403802441751134722647254701079479399695355466961972676325522991465493349966323418595145036098034409221220671256769872342794070885707047429317332918852389672197135392449242617864118863779096281448691786946817759171715066911148002075943201206196963779510322708902956608556222545260261046073613136886900928172106819861855378098201847115416363032626569928342415502360097804641710852553761272890533504550613568414377585442967797701466029438768722511536380119175815402812081825560648541078793359892106442724489861896162941341800129513068363860929410008313667337215300835269623573717533073865333820484219030818644918409372394403340524490955455801640646076158101030176748847501766190869294609876920169120218168829104087070956095147041692114702741339005225334083481287035303102391969997859741390859360543359969707560446013424245368249609877258131102473279856207212657249900346829388687230489556225320446360263985422525841646432427161141981780248259556354490721922658386366266375083594431487763515614571074552801615967704844271419443518327569840755267792641126176525061596523545718795667317091331935876162825592078308018520689015150471334038610031005591481785211038475454293338918844412051794396997019411269511952656491959418997541839323464742429070271887522353439367363366320030723274703740712398256202466265197409019976245205619855762576000870817308328834438183107005451449354588542267857855191537229237955549433341017442016960009069641561273229777022121795186837635908225512881647002199234886404395915301846400471432118636062252701154112228380277853891109849020134274101412155976996543887719748537643115822983853312307175113296190455900793806427669581901484262799122179294798734890186847167650382732855205908298452980625925035212845192592798659350613296194679625237397256558415785374456755899803240549218696288849033256085145534439166022625777551291620077279685262938793753045418108072928589198971538179734349618723292761474785019261145041327487324297058340847111233374627461727462658241532427105932250625530231473875925172478732288149145591560503633457542423377916037495250249302235148196138116256391141561032684495807250827343176594405409826976526934457986347970974312449827193311386387315963636121862349726140955607992062831699942007205481152535339394607685001990988655386143349578165008996164907967814290114838764568217491407562376761845377514403147541120676016072646055685925779932207033733339891636950434669069482843662998003741452762771654762382554617088318981086880684785370553648046935095881802536052974079353867651119507937328208314626896007107517552061443378411454995013643244632819334638905093654571450690086448344018042836339051357815727397333453728426337217406577577107983051755572103679597690188995849413019599957301790124019390868135658553966194137179448763207986880037160730322054742357226689680188212342439188598416897227765219403249322731479366923400484897605903795809469604175427961378255378122394764614783292697654516229028170110043784603875654415173943396004891531881757665050095169740241564477129365661425394936888423051740012992055685428985389794266995677702708914651373689220610441548166215680421983847673087178759027920917590069527345668202651337311151800018143412096260165862982107666352336177400783778342370915264406305407180784335806107296110555002041513169637304684921335683726540030750982908936461204789111475303704989395283345782408281738644132271000296831194020332345642082647327623383029463937899837583655455991934086623509096796113400486702712317652666371077872511186035403755448741869351973365662177235922939677646325156202348757011379571209623772343137021203100496515211197601317641940820343734851285260291333491512508311980285017785571072537314913921570910513096505988599993156086365547740355189816673353588004821466509974143376118277772335191074121757284159258087259131507460602563490377726337391446137703802131834744730111303267029691733504770163210661622783002726928336558401179141944780874825336071440329625228577500980859960904093631263562132816207145340610422411208301000858726425211226248014264751942618432585338675387405474349107271004975428115946601713612259044015899160022982780179603519408004651353475269877760952783998436808690898919783969353217998013913544255271791022539701081063214304851137829149851138196914304349750018998068164441212327332830719282436240673319655469267785119315277511344646890550424811336143498460484905125834568326644152848971397237604032821266025351669391408204994732048602162775979177123475109750240307893575993771509502175169355582707253391189233407022383207758580213717477837877839101523413209848942345961369234049799827930414446316270721479611745697571968123929191374098292580556195520743424329598289898052923336641541925636738068949420147124134052507220406179435525255522500874879008656831454283516775054229480327478304405643858159195266675828292970522612762871104013480178722480178968405240792436058274246744307672164527031345135416764966890127478680101029513386269864974821211862904033769156857624069929637249309720162870720018983542369036414927023696193854737248032985504511208919287982987446786412915941753167560253343531062674525450711418148323988060729714023472552071349079839898235526872395090936566787899238371257897624875599044322889538837731734894112275707141095979004791930104674075041143538178246463079598955563899188477378134134707024674736211204898622699188851745625173251934135203811586335012391305444191007362844756751416105041097350585276204448919097890198431548528053398577784431393388399431044446566924455088594631408175122033139068159659251054685801313383815217641821043342978882611963044311138879625874609022613090084997543039577124323061690626291940392143974027089477766370248815549932245882597902063125743691094639325280624164247686849545532493801763937161563684785982371590238542126584061536722860713170267474013114526106376538339031592194346981760535838031061288785205154693363924108846763200956708971836749057816308515813816196688222204757043759061433804072585386208356517699842677452319582418268369827016023741493836349662935157685406139734274647089968561817016055110488097155485911861718966802597354170542398513556001872033507906094642127114399319604652742405088222535977348151913543857125325854049394601086579379805862014336607882521971780902581737087091646045272797715350991034073642502038638671822052287969445838765294795104866071739022932745542678566977686593992341683412227466301506215532050265534146099524935605085492175654913483095890653617569381763747364418337897422970070354520666317092960759198962773242309025239744386101426309868773391388251868431650102796491149773758288891345034114886594867021549210108432808078342808941729800898329753694064496990312539986391958160146899522088066228540841486427478628197554662927881462160717138188018084057208471586890683691939338186427845453795671927239797236465166759201105799566396259853551276355876814021340982901629687342985079247184605687482833138125916196247615690287590107273310329914062386460833337863825792630239159000355760903247728133888733917809696660146961503175422675112599331552967421333630022296490648093458200818106180210022766458040027821333675857301901137175467276305904435313131903609248909724642792845554991349000518029570708291905255678188991389962513866231938005361134622429461024895407240485712325662888893172211643294781619055486805494344103409068071608802822795968695013364381426825217047287086301013730115523686141690837567574763723976318575703810944339056456446852418302814810799837691851212720193504404180460472162693944578837709010597469321972055811407877598977207200968938224930323683051586265728111463799698313751793762321511125234973430524062210524423435373290565516340666950616589287821870775679417608071297378133518711793165003315552382248773065344417945341539520242444970341012087407218810938826816751204229940494817944947273289477011157413944122845552182842492224065875268917227278060711675404697300803703961878779669488255561467438439257011582954666135867867189766129731126720007297155361302750355616781776544228744211472988161480270524380681765357327557860250584708401320883793281600876908130049249147368251703538221961903901499952349538710599735114347829233949918793660869230137559636853237380670359114424326856151210940425958263930167801712866923928323105765885171402021119695706479981403150563304514156441462316376380990440281625691757648914256971416359843931743327023781233693804301289262637538266779503416933432360750024817574180875038847509493945489620974048544263563716499594992098088429479036366629752600324385635294584472894454716620929749549661687741412088213047702281611645604400723635158114972973921896673738264720472264222124201656015028497130633279581430251601369482556701478093579088965713492615816134690180696508955631012121849180584792272069187169631633004485802010286065785859126997463766174146393415956953955420331462802651895116793807457331575984608617370268786760294367778050024467339133243166988035407323238828184750105164133118953703648842269027047805274249060349208295475505400345716018407257453693814553117535421072655783561549987444748042732345788006187314934156604635297977945507535930479568720931672453654720838168585560604380197703076424608348987610134570939487700294617579206195254925575710903852517148852526567104534981341980339064152987634369542025608027761442191431892139390883454313176968510184010384447234894886952098194353190650655535461733581404554483788475252625394966586999205841765278012534103389646981864243003414679138061902805960785488801078970551694621522877309010446746249797999262712095168477956848258334140226647721084336243759374161053673404195473896419789542533503630186140095153476696147625565187382329246854735693580289601153679178730355315937836308224861517777054157757656175935851201669294311113886358215966761883032610416465171484697938542262168716140012237821377977413126897726671299202592201740877007695628347393220108815935628628192856357189338495885060385315817976067947984087836097596014973342057270460352179060564760328556927627349518220323614411258418242624771201203577638889597431823282787131460805353357449429762179678903456816988955351850447832561638070947695169908624710001974880920500952194363237871976487033922381154036347548862684595615975519376541011501406700122692747439388858994385973024541480106123590803627458528849356325158538438324249325266608758890831870070910023737710657698505643392885433765834259675065371500533351448990829388773735205145933304962653141514138612443793588507094468804548697535817021290849078734780681436632332281941582734567135644317153796781805819585246484008403290998194378171817730231700398973305049538735611626102399943325978012689343260558471027876490107092344388463401173555686590358524491937018104162620850429925869743581709813389404593447193749387762423240985283276226660494238512970945324558625210360082928664972417491914198896612955807677097959479530601311915901177394310420904907942444886851308684449370590902600612064942574471035354765785924270813041061854621988183009063458818703875585627491158737542106466795134648758677154383801852134828191581246259933516019893559516796893285220582479942103451271587716334522299541883968044883552975336128683722593539007920166694133909116875880398882886921600237325736158820716351627133281051818760210485218067552664867390890090719513805862673512431221569163790227732870541084203784152568328871804698795251307326634027851905941733892035854039567703561132935448258562828761061069822972142096199350933131217118789107876687204454887608941017479864713788246215395593333327556200943958043453791978228059039595992743691379377866494096404877784174833643268402628293240626008190808180439091455635193685606304508914228964521998779884934747772913279726602765840166789013649050874114212686196986204412696528298108704547986155954533802120115564697997678573892018624359932677768945406050821883822790983362716712449002676117849826437703300208184459000971723520433199470824209877151444975101705564302954282181967000920251561584417420593365814813490269311151709387226002645863056132560579256092733226557934628080568344392137368840565043430739657406101777937014142461549307074136080544210029560009566358897789926763051771878194370676149821756418659011616086540863539151303920131680576903417259645369235080641744656235152392905040947995318407486215121056183385456617665260639371365880252166622357613220194170137266496607325201077194793126528276330241380516490717456596485374835466919452358031530196916048099460681490403781982973236093008713576079862142542209641900436790547904993007837242158195453541837112936865843055384271762803527912882112930835157565659994474178843838156514843422985870424559243469329523282180350833372628379183021659183618155421715744846577842013432998259456688455826617197901218084948033244878725818377480552226815101137174536841787028027445244290547451823467491956418855124442133778352142386597992598820328708510933838682990657199461490629025742768603885051103263854454041918495886653854504057132362968106914681484786965916686184275679846004186876229805556296304595322792305161672159196867584952363529893578850774608153732145464298479231051167635774949462295256949766035947396243099534331040499420967788382700271447849406903707324910644415169605325656058677875741747211082743577431519406075798356362914332639781221894628744779811980722564671466405485013100965678631488009030374933887536418316513498254669467331611812336485439764932502617954935720430540218297487125110740401161140589991109306249231281311634054926257135672181862893278613883371802853505650359195274140086951092616754147679266803210923746708721360627833292238641361959412133927803611827632410600474097111104814000362334271451448333464167546635469973149475664342365949349684588455152415075637660508663282742479413606287604129064491382851945640264315322585862404314183866959063324506300039221319264762596269151090445769530144405461803785750303668621246227863975274666787012100339298487337501447560032210062235802934377495503203701273846816306102657030087227546296679688089058712767636106622572235222973920644309352432722810085997309513252863060110549791564479184500461804676240892892568091293059296064235702106152464620502324896659398732493396737695202399176089847457184353193664652912584806448019652016283879518949933675924148562613699594530728725453246329152911012876377060557060953137752775186792329213495524513308986796916512907384130216757323863757582008036357572800275449032795307990079944254110872569318801466793559583467643286887696661009739574996783659339784634695994895061049038364740950469522606385804675807306991229047408987916687211714752764471160440195271816950828973353714853092893704638442089329977112585684084660833993404568902678751600877546126798801546585652206121095349079670736553970257619943137663996060606110640695933082817187642604357342536175694378484849525010826648839515970049059838081210522111109194332395113605144645983421079905808209371646452312770402316007213854372346126726099787038565709199850759563461324846018840985019428768790226873455650051912154654406382925385127631766392205093834520430077301702994036261543400132276391091298832786392041230044555168405488980908077917463609243933491264116424009388074635660726233669584276458369826873481588196105857183576746200965052606592926354829149904576830721089324585707370166071739819448502884260396366074603118478622583105658087087030556759586134170074540296568763477417643105175103673286924555858208237203860178173940517513043799486882232004437804310317092103426167499800007301609481458637448877852227307633049538394434538277060876076354209844500830624763025357278103278346176697054428715531534001649707665719598504174819908720149087568603778359199471934335277294728553792578768483230110185936580071729118696761765505377503029303383070644891281141202550615089641100762382457448865518258105814034532012475472326908754750707857765973254284445935304499207001453874894822655644222369636554419422544133821222547749753549462482768053333698328415613869236344335855386847111143049824839899180316545863828935379913053522283343013795337295401625762322808113849949187614414132293376710656349252881452823950620902235787668465011666009738275366040544694165342223905210831458584703552935221992827276057482126606529138553034554974455147034493948686342945965843102419078592368022456076393678416627051855517870290407355730462063969245330779578224594971042018804300018388142900817303945050734278701312446686009277858181104091151172937487362788787490746528556543474888683106411005102302087510776891878152562273525155037953244485778727761700196485370355516765520911933934376286628461984402629525218367852236747510880978150709897841308624588152266096355140187449583692691779904712072649490573726428600521140358123107600669951853612486274675637589622529911649606687650826173417848478933729505673900787861792535144062104536625064046372881569823231750059626108092195521115085930295565496753886261297233991462835847604862762702730973920200143224870758233735491524608560821032888297418390647886992327369136004883743661522351705843770554521081551336126214291181561530175888257359489250710887926212864139244330938379733386780613179523731526677382085802470143352700924380326695174211950767088432634644274912755890774686358216216604274131517021245858605623363149316464691394656249747174195835421860774871105733845843368993964591374060338215935224359475162623918868530782282176398323730618020424656047752794310479618972429953302979249748168405289379104494700459086499187272734541350810198388186467360939257193051196864560185578245021823106588943798652243205067737996619695547244058592241795300682045179537004347245176289356677050849021310773662575169733552746230294303120359626095342357439724965921101065781782610874531887480318743082357369919515634095716270099244492974910548985151965866474014822510633536794973714251022934188258511737199449911509758374613010550506419772153192935487537119163026203032858865852848019350922587577559742527658401172134232364808402714335636754204637518255252494432965704386138786590196573880286840189408767281671413703366173265012057865391578070308871426151907500149257611292767519309672845397116021360630309054224396632067432358279788933232440577919927848463333977773765590187057480682867834796562414610289950848739969297075043275302997287229732793444298864641272534816060377970729829917302929630869580199631241330493935049332541235507105446118259114111645453471032988104784406778013807713146540009938630648126661433085820681139583831916954555825942689576984142889374346708410794631893253910696395578070602124597489829356461356078898347241997947856436204209461341238761319886535235831299686226894860840845665560687695450127448663140505473535174687300980632278046891224682146080672762770840240226615548502400895289165711761743902033758487784291128962324705919187469104200584832614067733375102719565399469716251724831223063391932870798380074848572651612343493327335666447335855643023528088392434827876088616494328939916639921048830784777704804572849145630335326507002958890626591549850940797276756712979501009822947622896189159144152003228387877348513097908101912926722710377889805396415636236416915498576840839846886168437540706512103906250612810766379904790887967477806973847317047525344215639038720123880632368803701794930895490077633152306354837425681665336160664198003018828712376748189833024683637148830925928337590227894258806008728603885916884973069394802051122176635913825152427867009440694235512020156837777885182467002565170850924962374772681369428435006293881442998790530105621737545918267997321773502936892806521002539626880749809264345801165571588670044350397650532347828732736884086354000274067678382196352222653929093980736739136408289872201777674716811819585613372158311905468293608323697611345028175783020293484598292500089568263027126329586629214765314223335179309338795135709534637718368409244442209631933129562030557551734006797374061416210792363342380564685009203716715264255637185388957141641977238742261059666739699717316816941543509528319355641770566862221521799115135563970714331289365755384464832620120642433801695586269856102246064606933079384785881436740700059976970364901927332882613532936311240365069865216063898725026723808740339674439783025829689425689674186433613497947524552629142652284241924308338810358005378702399954217211368655027534136221169314069466951318692810257479598560514500502171591331775160995786555198188619321128211070944228724044248115340605589595835581523201218460582056359269930347885113206862662758877144603599665610843072569650056306448918759946659677284717153957361210818084154727314266174893313417463266235422207260014601270120693463952056444554329166298666078308906811879009081529506362678207561438881578135113469536630387841209234694286873083932043233387277549680521030282154432472338884521534372725012858974769146080831440412586818154004918777228786980185345453700652665564917091542952275670922221747411206272065662298980603289167206874365494824610869736722554740481288924247185432360575341167285075755205713115669795458488739874222813588798584078313506054829055148278529489112190538319562422871948475940785939804790109419407067176443903273071213588738504999363883820550168340277749607027684488028191222063688863681104356952930065219552826152699127163727738841899328713056346468822739828876319864570983630891778648708667618548568004767255267541474285102814580740315299219781455775684368111018531749816701642664788409026268282444825802753209454991510451851771654631180490456798571325752811791365627815811128881656228587603087597496384943527567661216895926148503078536204527450775295063101248034180458405943292607985443562009370809182152392037179067812199228049606973823874331262673030679594396095495718957721791559730058869364684557667609245090608820221223571925453671519183487258742391941089044411595993276004450655620646116465566548759424736925233695599303035509581762617623184956190649483967300203776387436934399982943020914707361894793269276244518656023955905370512897816345542332011497599489627842432748378803270141867695262118097500640514975588965029300486760520801049153788541390942453169171998762894127722112946456829486028149318156024967788794981377721622935943781100444806079767242927624951078415344642915084276452000204276947069804177583220909702029165734725158290463091035903784297757265172087724474095226716630600546971638794317119687348468873818665675127929857501636341131462753049901913564682380432997069577015078933772865803571279091376742080565549362541 ''' # Source of 100,000 Pi digits http://www.geom.uiuc.edu/~huberty/math5337/groupe/digits.html import re pattern = re.compile(r'1234', re.I) matches = len(pattern.findall(pinumber)) label1234 = "10 32 32 32 40 34 41 32 32 32 32 32 32 95 32 32 32 32 32 32 95 95 95 32 32 32 32 32 95 95 95 95 32 32 32 95 32 95 32 32 32 32 32 32 40 34 41 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 95 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 95 95 95 32 32 32 32 32 95 10 32 32 32 32 92 124 32 32 32 32 32 47 32 124 32 32 32 32 124 95 32 32 41 32 32 32 124 95 95 32 47 32 32 124 32 124 32 124 32 32 32 32 32 32 92 124 32 32 32 32 32 32 111 32 79 32 79 32 32 95 95 32 95 32 32 32 32 95 32 95 32 32 32 32 32 95 95 124 32 124 32 32 32 32 32 111 32 79 32 79 32 32 32 124 32 95 32 92 32 32 32 40 95 41 10 32 32 32 32 32 32 32 32 32 32 32 124 32 124 32 32 32 32 32 47 32 47 32 32 32 32 32 124 95 32 92 32 32 124 95 32 32 95 124 32 32 32 32 32 32 32 32 32 32 32 32 111 32 32 32 32 32 32 47 32 95 96 32 124 32 32 124 32 39 32 92 32 32 32 47 32 95 96 32 124 32 32 32 32 111 32 32 32 32 32 32 32 32 124 32 32 95 47 32 32 32 124 32 124 10 32 32 95 95 95 95 95 32 32 32 95 124 95 124 95 32 32 32 47 95 95 95 124 32 32 32 124 95 95 95 47 32 32 32 95 124 95 124 95 32 32 32 95 95 95 95 95 32 32 32 66 73 95 95 91 79 93 32 92 95 95 44 95 124 32 32 124 95 124 124 95 124 32 32 92 95 95 44 95 124 32 32 32 76 76 95 95 91 79 93 32 32 95 124 95 124 95 32 32 32 95 124 95 124 95 10 95 124 32 32 32 32 32 124 95 124 34 34 34 34 34 124 95 124 34 34 34 34 34 124 95 124 34 34 34 34 34 124 95 124 34 34 34 34 34 124 95 124 32 32 32 32 32 124 32 123 61 61 61 61 61 61 124 95 124 34 34 34 34 34 124 95 124 34 34 34 34 34 124 95 124 34 34 34 34 34 124 32 123 61 61 61 61 61 61 124 95 124 32 34 34 34 32 124 95 124 34 34 34 34 34 124 10 34 96 45 48 45 48 45 39 34 96 45 48 45 48 45 39 34 96 45 48 45 48 45 39 34 96 45 48 45 48 45 39 34 96 45 48 45 48 45 39 34 96 45 48 45 48 45 39 46 47 111 45 45 48 48 48 39 34 96 45 48 45 48 45 39 34 96 45 48 45 48 45 39 34 96 45 48 45 48 45 39 46 47 111 45 45 48 48 48 39 34 96 45 48 45 48 45 39 34 96 45 48 45 48 45 39 10" print(''.join(chr(int(c)) for c in label1234.split())) print("/\ " * 27) print(" " * 5, "The numeric sequence '1234' occours {} times in 100,000 first decimal digits of number Pi.".format(matches)) print("\/ " * 27) exitflag = input("Press ENTER to exit.")
4,109.32
100,002
0.990782
736
102,733
138.296196
0.130435
0.007545
0.008371
0.008095
0.012605
0.011996
0.011161
0.010719
0.010237
0.0095
0
0.994361
0.007369
102,733
24
100,003
4,280.541667
0.003775
0.003241
0
0
0
0.083333
0.997382
0.976878
0
1
0
0
0
1
0
false
0
0.083333
0
0.083333
0.333333
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
1
1
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
8
ad038cb65cce5537c3085877fe8869e31c6e6218
204
py
Python
tests/filters/conftest.py
cuspymd/CredSweeper
376e7faff41d8b58f0d9e2a82955ad0929ee8290
[ "MIT" ]
17
2021-10-22T00:29:46.000Z
2022-03-21T03:05:56.000Z
tests/filters/conftest.py
shadowscatcher/CredSweeper
0387ed76aca4a12154e15c49db8dc0901a014275
[ "MIT" ]
29
2021-11-05T21:10:51.000Z
2022-03-30T10:41:08.000Z
tests/filters/conftest.py
shadowscatcher/CredSweeper
0387ed76aca4a12154e15c49db8dc0901a014275
[ "MIT" ]
16
2021-11-05T20:39:54.000Z
2022-03-11T00:57:32.000Z
import pytest success_line_list = ["Crackle4421", "passwd = Crackle4421", "passwd = 'Crackle4421'"] @pytest.fixture(params=success_line_list) def success_line(request) -> str: return request.param
22.666667
85
0.75
24
204
6.166667
0.583333
0.222973
0.202703
0
0
0
0
0
0
0
0
0.067039
0.122549
204
8
86
25.5
0.759777
0
0
0
0
0
0.259804
0
0
0
0
0
0
1
0.2
false
0.2
0.2
0.2
0.6
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
1
1
0
0
7
a8ebc341f3cf5cb41bc8f89a71b91fd2bef920c9
8,605
py
Python
tests/test_matcher.py
luyang1210/PPRL
3e59ab82c4474efbcc88ab5c13b562d7e5a7ff87
[ "Apache-2.0" ]
null
null
null
tests/test_matcher.py
luyang1210/PPRL
3e59ab82c4474efbcc88ab5c13b562d7e5a7ff87
[ "Apache-2.0" ]
null
null
null
tests/test_matcher.py
luyang1210/PPRL
3e59ab82c4474efbcc88ab5c13b562d7e5a7ff87
[ "Apache-2.0" ]
null
null
null
import unittest import networkx as nx from anonlink.entitymatch import greedy_solver from anonlink import network_flow __author__ = 'Brian Thorne' def convert_graph_to_sparse(G): res = [] for node in G.nodes(): if node.startswith("col"): for linked_node in G[node]: res.append([int(linked_node[3:]), G[node][linked_node]['weight'], int(node[3:])]) return res class TestNetworkMatching(unittest.TestCase): """Network tests on following graph: [row0] ---- [col0] [row1] ---- [col1] [row2] \__- [col2] [row3] __| [col3] A much weaker connection exists between row1 and col2. The desired behaviour is to ignore this lower rated connection """ def setUp(self): self.G = nx.Graph() score = 0.9 self.G.add_edge('row0', 'col0', weight=score, capacity=1.0) self.G.add_edge('row1', 'col1', weight=score, capacity=1.0) self.G.add_edge('row1', 'col2', weight=score / 3, capacity=1.0) self.G.add_node('row2') self.G.add_edge('row3', 'col2', weight=score, capacity=1.0) self.expected_map = { 0: 0, 1: 1, 3: 2 } def tearDown(self): del self.G def test_default(self): mapping = network_flow.calculate_entity_mapping(self.G) self.assertDictEqual(self.expected_map, mapping) def test_bipartite(self): mapping = network_flow.calculate_entity_mapping(self.G, method='bipartite') self.assertDictEqual(self.expected_map, mapping) def test_weighted(self): mapping = network_flow.calculate_entity_mapping(self.G, method='weighted') self.assertDictEqual(self.expected_map, mapping) def test_invalid(self): args = (self.G, 'random') self.assertRaises(NotImplementedError, network_flow.calculate_entity_mapping, *args) @unittest.expectedFailure def test_greedy_badorder(self): #sparse_similarity_matrix = convert_graph_to_sparse(self.G) sparse_similarity_matrix = [[1, 0.3, 2], [3, 0.9, 2], [1, 0.9, 1], [0, 0.9, 0]] mapping = greedy_solver(sparse_similarity_matrix) self.assertDictEqual(self.expected_map, mapping) def test_greedy_presorted(self): #sparse_similarity_matrix = convert_graph_to_sparse(self.G) sparse_similarity_matrix = [[3, 0.9, 2], [1, 0.9, 1], [0, 0.9, 0], [1, 0.3, 2]] mapping = greedy_solver(sparse_similarity_matrix) self.assertDictEqual(self.expected_map, mapping) class TestNetworkMatchingDuplicates(unittest.TestCase): """Tests network matching on following graph: [row0] ---- [col0] [row1] ---- [col1] [row2] -- [col2] [row3] __| [col3] [row4] A slightly weaker connection exists between row1 and col2. The expected output will not include a matching between these, as col2 is strongly connected to row3. """ def setUp(self): self.G = nx.Graph() score = 0.9 self.G.add_edge('row0', 'col0', weight=score, capacity=1.0) self.G.add_edge('row1', 'col1', weight=score, capacity=1.0) self.G.add_edge('row1', 'col2', weight=score / 3, capacity=1.0) self.G.add_node('row2') self.G.add_edge('row3', 'col2', weight=score, capacity=1.0) self.expected_map = { 0: 0, 1: 1, 3: 2 } def test_default(self): mapping = network_flow.calculate_entity_mapping(self.G) self.assertDictEqual(self.expected_map, mapping) def test_bipartite(self): mapping = network_flow.calculate_entity_mapping(self.G, method='bipartite') self.assertDictEqual(self.expected_map, mapping) def test_weighted(self): mapping = network_flow.calculate_entity_mapping(self.G, method='weighted') self.assertDictEqual(self.expected_map, mapping) def test_invalid(self): args = (self.G, 'random') self.assertRaises(NotImplementedError, network_flow.calculate_entity_mapping, *args) @unittest.expectedFailure def test_greedy_badorder(self): #sparse_similarity_matrix = convert_graph_to_sparse(self.G) sparse_similarity_matrix = [[0, 0.9, 0], [1, 0.3, 2], [3, 0.9, 2], [1, 0.9, 1]] mapping = greedy_solver(sparse_similarity_matrix) self.assertDictEqual(self.expected_map, mapping) def test_greedy_presorted(self): #sparse_similarity_matrix = convert_graph_to_sparse(self.G) sparse_similarity_matrix = [[0, 0.9, 0], [3, 0.9, 2], [1, 0.9, 1], [1, 0.3, 2]] mapping = greedy_solver(sparse_similarity_matrix) self.assertDictEqual(self.expected_map, mapping) class TestNetworkGreedyHard1(unittest.TestCase): """Tests network matching on following graph: [A] ---- 0.9 [1] [A] ---- 0.8 [2] [B] ---- 0.9 [3] [C] ---- 0.9 [1] The network methods should succeed, and if the greedy algorithm is too naive it might fail. Correct mapping is: {a: 2, b: 3, c: 1} """ def setUp(self): self.G = nx.Graph() self.G.add_edge('row0', 'col0', weight=0.9, capacity=1.0) self.G.add_edge('row0', 'col1', weight=0.8, capacity=1.0) self.G.add_edge('row1', 'col2', weight=0.9, capacity=1.0) self.G.add_edge('row2', 'col0', weight=0.9, capacity=1.0) self.expected_map = { 0: 1, 1: 2, 2: 0 } def test_default(self): mapping = network_flow.calculate_entity_mapping(self.G) self.assertDictEqual(self.expected_map, mapping) def test_bipartite(self): mapping = network_flow.calculate_entity_mapping(self.G, method='bipartite') self.assertDictEqual(self.expected_map, mapping) def test_weighted(self): mapping = network_flow.calculate_entity_mapping(self.G, method='weighted') self.assertDictEqual(self.expected_map, mapping) def test_invalid(self): args = (self.G, 'random') self.assertRaises(NotImplementedError, network_flow.calculate_entity_mapping, *args) def test_greedy_presorted(self): #sparse_similarity_matrix = convert_graph_to_sparse(self.G) sparse_similarity_matrix = [[2, 0.9, 0], [1, 0.9, 2], [0, 0.9, 0], [0, 0.8, 1]] mapping = greedy_solver(sparse_similarity_matrix) self.assertDictEqual(self.expected_map, mapping) @unittest.expectedFailure def test_greedy_unsorted(self): sparse_similarity_matrix = [[0, 0.9, 0], [0, 0.8, 1], [1, 0.9, 2], [2, 0.9, 0]] mapping = greedy_solver(sparse_similarity_matrix) self.assertDictEqual(self.expected_map, mapping) class TestNetworkGreedyHard2(unittest.TestCase): """Tests network matching on following graph: [A] ---- 0.9 [1] [A] ---- 0.0 [2] [B] ---- 0.9 [3] [C] ---- 0.8 [1] The network methods should succeed, and if the greedy algorithm is too naive it might fail. Correct mapping is: {a: 2, b: 3, c: 1} """ def setUp(self): self.G = nx.Graph() self.G.add_edge('row0', 'col0', weight=0.9, capacity=1.0) self.G.add_edge('row0', 'col1', weight=0.9, capacity=1.0) self.G.add_edge('row1', 'col2', weight=0.9, capacity=1.0) self.G.add_edge('row2', 'col0', weight=0.8, capacity=1.0) self.expected_map = { 0: 1, 1: 2, 2: 0 } def test_default(self): mapping = network_flow.calculate_entity_mapping(self.G) self.assertDictEqual(self.expected_map, mapping) def test_bipartite(self): mapping = network_flow.calculate_entity_mapping(self.G, method='bipartite') self.assertDictEqual(self.expected_map, mapping) def test_weighted(self): mapping = network_flow.calculate_entity_mapping(self.G, method='weighted') self.assertDictEqual(self.expected_map, mapping) def test_invalid(self): args = (self.G, 'random') self.assertRaises(NotImplementedError, network_flow.calculate_entity_mapping, *args) @unittest.expectedFailure def test_greedy(self): # This will pass if the sparse similarity matrix is in a particular order #sparse_similarity_matrix = convert_graph_to_sparse(self.G) sparse_similarity_matrix = [[0, 0.9, 0], [0, 0.9, 1], [1, 0.9, 2], [2, 0.8, 0]] mapping = greedy_solver(sparse_similarity_matrix) self.assertDictEqual(self.expected_map, mapping) if __name__ == "__main__": unittest.main()
31.87037
97
0.63312
1,144
8,605
4.579545
0.112762
0.042947
0.065852
0.112426
0.874213
0.866196
0.866196
0.860851
0.834129
0.828402
0
0.042731
0.235793
8,605
269
98
31.988848
0.753954
0.172574
0
0.75
0
0
0.036968
0
0
0
0
0
0.155405
1
0.195946
false
0
0.027027
0
0.256757
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d126fc5fa58200ed8c8b4e85a54bd5102359420b
520
py
Python
eval_medseg_timm-regnetx_002_MedianBlur.py
BrunoKrinski/segtool
cb604b5f38104c43a76450136e37c3d1c4b6d275
[ "MIT" ]
null
null
null
eval_medseg_timm-regnetx_002_MedianBlur.py
BrunoKrinski/segtool
cb604b5f38104c43a76450136e37c3d1c4b6d275
[ "MIT" ]
null
null
null
eval_medseg_timm-regnetx_002_MedianBlur.py
BrunoKrinski/segtool
cb604b5f38104c43a76450136e37c3d1c4b6d275
[ "MIT" ]
null
null
null
import os ls=["python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_0_MedianBlur.yml", "python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_1_MedianBlur.yml", "python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_2_MedianBlur.yml", "python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_3_MedianBlur.yml", "python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_4_MedianBlur.yml", ] for l in ls: os.system(l)
47.272727
98
0.840385
80
520
5.0875
0.3
0.12285
0.14742
0.233415
0.889435
0.889435
0.889435
0.889435
0.889435
0.889435
0
0.0409
0.059615
520
11
99
47.272727
0.791411
0
0
0
0
0
0.873321
0.633397
0
0
0
0
0
1
0
false
0
0.111111
0
0.111111
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
7e538009fa40a78ded69f59e96435082781e35d7
51,493
py
Python
env/lib/python3.8/site-packages/apache_beam/portability/api/schema_pb2.py
paulowe/apache-beam-redocumentation
d1b0f345d8e46f9893f56c2bb890edc07be09f2a
[ "MIT" ]
null
null
null
env/lib/python3.8/site-packages/apache_beam/portability/api/schema_pb2.py
paulowe/apache-beam-redocumentation
d1b0f345d8e46f9893f56c2bb890edc07be09f2a
[ "MIT" ]
null
null
null
env/lib/python3.8/site-packages/apache_beam/portability/api/schema_pb2.py
paulowe/apache-beam-redocumentation
d1b0f345d8e46f9893f56c2bb890edc07be09f2a
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: schema.proto """Generated protocol buffer code.""" from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='schema.proto', package='org.apache.beam.model.pipeline.v1', syntax='proto3', serialized_options=b'\n!org.apache.beam.model.pipeline.v1B\tSchemaApiZHgithub.com/apache/beam/sdks/v2/go/pkg/beam/model/pipeline_v1;pipeline_v1', create_key=_descriptor._internal_create_key, serialized_pb=b'\n\x0cschema.proto\x12!org.apache.beam.model.pipeline.v1\"\xaa\x01\n\x06Schema\x12\x38\n\x06\x66ields\x18\x01 \x03(\x0b\x32(.org.apache.beam.model.pipeline.v1.Field\x12\n\n\x02id\x18\x02 \x01(\t\x12:\n\x07options\x18\x03 \x03(\x0b\x32).org.apache.beam.model.pipeline.v1.Option\x12\x1e\n\x16\x65ncoding_positions_set\x18\x04 \x01(\x08\"\xc9\x01\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12:\n\x04type\x18\x03 \x01(\x0b\x32,.org.apache.beam.model.pipeline.v1.FieldType\x12\n\n\x02id\x18\x04 \x01(\x05\x12\x19\n\x11\x65ncoding_position\x18\x05 \x01(\x05\x12:\n\x07options\x18\x06 \x03(\x0b\x32).org.apache.beam.model.pipeline.v1.Option\"\xc6\x03\n\tFieldType\x12\x10\n\x08nullable\x18\x01 \x01(\x08\x12\x44\n\x0b\x61tomic_type\x18\x02 \x01(\x0e\x32-.org.apache.beam.model.pipeline.v1.AtomicTypeH\x00\x12\x42\n\narray_type\x18\x03 \x01(\x0b\x32,.org.apache.beam.model.pipeline.v1.ArrayTypeH\x00\x12H\n\riterable_type\x18\x04 \x01(\x0b\x32/.org.apache.beam.model.pipeline.v1.IterableTypeH\x00\x12>\n\x08map_type\x18\x05 \x01(\x0b\x32*.org.apache.beam.model.pipeline.v1.MapTypeH\x00\x12>\n\x08row_type\x18\x06 \x01(\x0b\x32*.org.apache.beam.model.pipeline.v1.RowTypeH\x00\x12\x46\n\x0clogical_type\x18\x07 \x01(\x0b\x32..org.apache.beam.model.pipeline.v1.LogicalTypeH\x00\x42\x0b\n\ttype_info\"O\n\tArrayType\x12\x42\n\x0c\x65lement_type\x18\x01 \x01(\x0b\x32,.org.apache.beam.model.pipeline.v1.FieldType\"R\n\x0cIterableType\x12\x42\n\x0c\x65lement_type\x18\x01 \x01(\x0b\x32,.org.apache.beam.model.pipeline.v1.FieldType\"\x8b\x01\n\x07MapType\x12>\n\x08key_type\x18\x01 \x01(\x0b\x32,.org.apache.beam.model.pipeline.v1.FieldType\x12@\n\nvalue_type\x18\x02 \x01(\x0b\x32,.org.apache.beam.model.pipeline.v1.FieldType\"D\n\x07RowType\x12\x39\n\x06schema\x18\x01 \x01(\x0b\x32).org.apache.beam.model.pipeline.v1.Schema\"\xf7\x01\n\x0bLogicalType\x12\x0b\n\x03urn\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\x0c\x12\x44\n\x0erepresentation\x18\x03 \x01(\x0b\x32,.org.apache.beam.model.pipeline.v1.FieldType\x12\x43\n\rargument_type\x18\x04 \x01(\x0b\x32,.org.apache.beam.model.pipeline.v1.FieldType\x12?\n\x08\x61rgument\x18\x05 \x01(\x0b\x32-.org.apache.beam.model.pipeline.v1.FieldValue\"\x90\x01\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12:\n\x04type\x18\x02 \x01(\x0b\x32,.org.apache.beam.model.pipeline.v1.FieldType\x12<\n\x05value\x18\x03 \x01(\x0b\x32-.org.apache.beam.model.pipeline.v1.FieldValue\"D\n\x03Row\x12=\n\x06values\x18\x01 \x03(\x0b\x32-.org.apache.beam.model.pipeline.v1.FieldValue\"\xd7\x03\n\nFieldValue\x12J\n\x0c\x61tomic_value\x18\x01 \x01(\x0b\x32\x32.org.apache.beam.model.pipeline.v1.AtomicTypeValueH\x00\x12H\n\x0b\x61rray_value\x18\x02 \x01(\x0b\x32\x31.org.apache.beam.model.pipeline.v1.ArrayTypeValueH\x00\x12N\n\x0eiterable_value\x18\x03 \x01(\x0b\x32\x34.org.apache.beam.model.pipeline.v1.IterableTypeValueH\x00\x12\x44\n\tmap_value\x18\x04 \x01(\x0b\x32/.org.apache.beam.model.pipeline.v1.MapTypeValueH\x00\x12;\n\trow_value\x18\x05 \x01(\x0b\x32&.org.apache.beam.model.pipeline.v1.RowH\x00\x12Q\n\x12logical_type_value\x18\x06 \x01(\x0b\x32\x33.org.apache.beam.model.pipeline.v1.LogicalTypeValueH\x00\x42\r\n\x0b\x66ield_value\"\xb6\x01\n\x0f\x41tomicTypeValue\x12\x0e\n\x04\x62yte\x18\x01 \x01(\x05H\x00\x12\x0f\n\x05int16\x18\x02 \x01(\x05H\x00\x12\x0f\n\x05int32\x18\x03 \x01(\x05H\x00\x12\x0f\n\x05int64\x18\x04 \x01(\x03H\x00\x12\x0f\n\x05\x66loat\x18\x05 \x01(\x02H\x00\x12\x10\n\x06\x64ouble\x18\x06 \x01(\x01H\x00\x12\x10\n\x06string\x18\x07 \x01(\tH\x00\x12\x11\n\x07\x62oolean\x18\x08 \x01(\x08H\x00\x12\x0f\n\x05\x62ytes\x18\t \x01(\x0cH\x00\x42\x07\n\x05value\"P\n\x0e\x41rrayTypeValue\x12>\n\x07\x65lement\x18\x01 \x03(\x0b\x32-.org.apache.beam.model.pipeline.v1.FieldValue\"S\n\x11IterableTypeValue\x12>\n\x07\x65lement\x18\x01 \x03(\x0b\x32-.org.apache.beam.model.pipeline.v1.FieldValue\"P\n\x0cMapTypeValue\x12@\n\x07\x65ntries\x18\x01 \x03(\x0b\x32/.org.apache.beam.model.pipeline.v1.MapTypeEntry\"\x88\x01\n\x0cMapTypeEntry\x12:\n\x03key\x18\x01 \x01(\x0b\x32-.org.apache.beam.model.pipeline.v1.FieldValue\x12<\n\x05value\x18\x02 \x01(\x0b\x32-.org.apache.beam.model.pipeline.v1.FieldValue\"P\n\x10LogicalTypeValue\x12<\n\x05value\x18\x01 \x01(\x0b\x32-.org.apache.beam.model.pipeline.v1.FieldValue*\x83\x01\n\nAtomicType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x08\n\x04\x42YTE\x10\x01\x12\t\n\x05INT16\x10\x02\x12\t\n\x05INT32\x10\x03\x12\t\n\x05INT64\x10\x04\x12\t\n\x05\x46LOAT\x10\x05\x12\n\n\x06\x44OUBLE\x10\x06\x12\n\n\x06STRING\x10\x07\x12\x0b\n\x07\x42OOLEAN\x10\x08\x12\t\n\x05\x42YTES\x10\tBx\n!org.apache.beam.model.pipeline.v1B\tSchemaApiZHgithub.com/apache/beam/sdks/v2/go/pkg/beam/model/pipeline_v1;pipeline_v1b\x06proto3' ) _ATOMICTYPE = _descriptor.EnumDescriptor( name='AtomicType', full_name='org.apache.beam.model.pipeline.v1.AtomicType', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='UNSPECIFIED', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='BYTE', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INT16', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INT32', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INT64', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FLOAT', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DOUBLE', index=6, number=6, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='STRING', index=7, number=7, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='BOOLEAN', index=8, number=8, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='BYTES', index=9, number=9, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=2859, serialized_end=2990, ) _sym_db.RegisterEnumDescriptor(_ATOMICTYPE) AtomicType = enum_type_wrapper.EnumTypeWrapper(_ATOMICTYPE) UNSPECIFIED = 0 BYTE = 1 INT16 = 2 INT32 = 3 INT64 = 4 FLOAT = 5 DOUBLE = 6 STRING = 7 BOOLEAN = 8 BYTES = 9 _SCHEMA = _descriptor.Descriptor( name='Schema', full_name='org.apache.beam.model.pipeline.v1.Schema', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='fields', full_name='org.apache.beam.model.pipeline.v1.Schema.fields', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='id', full_name='org.apache.beam.model.pipeline.v1.Schema.id', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='options', full_name='org.apache.beam.model.pipeline.v1.Schema.options', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='encoding_positions_set', full_name='org.apache.beam.model.pipeline.v1.Schema.encoding_positions_set', index=3, number=4, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=52, serialized_end=222, ) _FIELD = _descriptor.Descriptor( name='Field', full_name='org.apache.beam.model.pipeline.v1.Field', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='name', full_name='org.apache.beam.model.pipeline.v1.Field.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='description', full_name='org.apache.beam.model.pipeline.v1.Field.description', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='type', full_name='org.apache.beam.model.pipeline.v1.Field.type', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='id', full_name='org.apache.beam.model.pipeline.v1.Field.id', index=3, number=4, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='encoding_position', full_name='org.apache.beam.model.pipeline.v1.Field.encoding_position', index=4, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='options', full_name='org.apache.beam.model.pipeline.v1.Field.options', index=5, number=6, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=225, serialized_end=426, ) _FIELDTYPE = _descriptor.Descriptor( name='FieldType', full_name='org.apache.beam.model.pipeline.v1.FieldType', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='nullable', full_name='org.apache.beam.model.pipeline.v1.FieldType.nullable', index=0, number=1, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='atomic_type', full_name='org.apache.beam.model.pipeline.v1.FieldType.atomic_type', index=1, number=2, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='array_type', full_name='org.apache.beam.model.pipeline.v1.FieldType.array_type', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='iterable_type', full_name='org.apache.beam.model.pipeline.v1.FieldType.iterable_type', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='map_type', full_name='org.apache.beam.model.pipeline.v1.FieldType.map_type', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='row_type', full_name='org.apache.beam.model.pipeline.v1.FieldType.row_type', index=5, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='logical_type', full_name='org.apache.beam.model.pipeline.v1.FieldType.logical_type', index=6, number=7, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='type_info', full_name='org.apache.beam.model.pipeline.v1.FieldType.type_info', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=429, serialized_end=883, ) _ARRAYTYPE = _descriptor.Descriptor( name='ArrayType', full_name='org.apache.beam.model.pipeline.v1.ArrayType', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='element_type', full_name='org.apache.beam.model.pipeline.v1.ArrayType.element_type', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=885, serialized_end=964, ) _ITERABLETYPE = _descriptor.Descriptor( name='IterableType', full_name='org.apache.beam.model.pipeline.v1.IterableType', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='element_type', full_name='org.apache.beam.model.pipeline.v1.IterableType.element_type', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=966, serialized_end=1048, ) _MAPTYPE = _descriptor.Descriptor( name='MapType', full_name='org.apache.beam.model.pipeline.v1.MapType', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='key_type', full_name='org.apache.beam.model.pipeline.v1.MapType.key_type', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value_type', full_name='org.apache.beam.model.pipeline.v1.MapType.value_type', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1051, serialized_end=1190, ) _ROWTYPE = _descriptor.Descriptor( name='RowType', full_name='org.apache.beam.model.pipeline.v1.RowType', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='schema', full_name='org.apache.beam.model.pipeline.v1.RowType.schema', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1192, serialized_end=1260, ) _LOGICALTYPE = _descriptor.Descriptor( name='LogicalType', full_name='org.apache.beam.model.pipeline.v1.LogicalType', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='urn', full_name='org.apache.beam.model.pipeline.v1.LogicalType.urn', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='payload', full_name='org.apache.beam.model.pipeline.v1.LogicalType.payload', index=1, number=2, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='representation', full_name='org.apache.beam.model.pipeline.v1.LogicalType.representation', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='argument_type', full_name='org.apache.beam.model.pipeline.v1.LogicalType.argument_type', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='argument', full_name='org.apache.beam.model.pipeline.v1.LogicalType.argument', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1263, serialized_end=1510, ) _OPTION = _descriptor.Descriptor( name='Option', full_name='org.apache.beam.model.pipeline.v1.Option', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='name', full_name='org.apache.beam.model.pipeline.v1.Option.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='type', full_name='org.apache.beam.model.pipeline.v1.Option.type', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value', full_name='org.apache.beam.model.pipeline.v1.Option.value', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1513, serialized_end=1657, ) _ROW = _descriptor.Descriptor( name='Row', full_name='org.apache.beam.model.pipeline.v1.Row', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='values', full_name='org.apache.beam.model.pipeline.v1.Row.values', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1659, serialized_end=1727, ) _FIELDVALUE = _descriptor.Descriptor( name='FieldValue', full_name='org.apache.beam.model.pipeline.v1.FieldValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='atomic_value', full_name='org.apache.beam.model.pipeline.v1.FieldValue.atomic_value', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='array_value', full_name='org.apache.beam.model.pipeline.v1.FieldValue.array_value', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='iterable_value', full_name='org.apache.beam.model.pipeline.v1.FieldValue.iterable_value', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='map_value', full_name='org.apache.beam.model.pipeline.v1.FieldValue.map_value', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='row_value', full_name='org.apache.beam.model.pipeline.v1.FieldValue.row_value', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='logical_type_value', full_name='org.apache.beam.model.pipeline.v1.FieldValue.logical_type_value', index=5, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='field_value', full_name='org.apache.beam.model.pipeline.v1.FieldValue.field_value', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=1730, serialized_end=2201, ) _ATOMICTYPEVALUE = _descriptor.Descriptor( name='AtomicTypeValue', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='byte', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue.byte', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='int16', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue.int16', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='int32', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue.int32', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='int64', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue.int64', index=3, number=4, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='float', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue.float', index=4, number=5, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='double', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue.double', index=5, number=6, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='string', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue.string', index=6, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='boolean', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue.boolean', index=7, number=8, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='bytes', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue.bytes', index=8, number=9, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='value', full_name='org.apache.beam.model.pipeline.v1.AtomicTypeValue.value', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=2204, serialized_end=2386, ) _ARRAYTYPEVALUE = _descriptor.Descriptor( name='ArrayTypeValue', full_name='org.apache.beam.model.pipeline.v1.ArrayTypeValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='element', full_name='org.apache.beam.model.pipeline.v1.ArrayTypeValue.element', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2388, serialized_end=2468, ) _ITERABLETYPEVALUE = _descriptor.Descriptor( name='IterableTypeValue', full_name='org.apache.beam.model.pipeline.v1.IterableTypeValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='element', full_name='org.apache.beam.model.pipeline.v1.IterableTypeValue.element', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2470, serialized_end=2553, ) _MAPTYPEVALUE = _descriptor.Descriptor( name='MapTypeValue', full_name='org.apache.beam.model.pipeline.v1.MapTypeValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='entries', full_name='org.apache.beam.model.pipeline.v1.MapTypeValue.entries', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2555, serialized_end=2635, ) _MAPTYPEENTRY = _descriptor.Descriptor( name='MapTypeEntry', full_name='org.apache.beam.model.pipeline.v1.MapTypeEntry', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='key', full_name='org.apache.beam.model.pipeline.v1.MapTypeEntry.key', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value', full_name='org.apache.beam.model.pipeline.v1.MapTypeEntry.value', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2638, serialized_end=2774, ) _LOGICALTYPEVALUE = _descriptor.Descriptor( name='LogicalTypeValue', full_name='org.apache.beam.model.pipeline.v1.LogicalTypeValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='value', full_name='org.apache.beam.model.pipeline.v1.LogicalTypeValue.value', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2776, serialized_end=2856, ) _SCHEMA.fields_by_name['fields'].message_type = _FIELD _SCHEMA.fields_by_name['options'].message_type = _OPTION _FIELD.fields_by_name['type'].message_type = _FIELDTYPE _FIELD.fields_by_name['options'].message_type = _OPTION _FIELDTYPE.fields_by_name['atomic_type'].enum_type = _ATOMICTYPE _FIELDTYPE.fields_by_name['array_type'].message_type = _ARRAYTYPE _FIELDTYPE.fields_by_name['iterable_type'].message_type = _ITERABLETYPE _FIELDTYPE.fields_by_name['map_type'].message_type = _MAPTYPE _FIELDTYPE.fields_by_name['row_type'].message_type = _ROWTYPE _FIELDTYPE.fields_by_name['logical_type'].message_type = _LOGICALTYPE _FIELDTYPE.oneofs_by_name['type_info'].fields.append( _FIELDTYPE.fields_by_name['atomic_type']) _FIELDTYPE.fields_by_name['atomic_type'].containing_oneof = _FIELDTYPE.oneofs_by_name['type_info'] _FIELDTYPE.oneofs_by_name['type_info'].fields.append( _FIELDTYPE.fields_by_name['array_type']) _FIELDTYPE.fields_by_name['array_type'].containing_oneof = _FIELDTYPE.oneofs_by_name['type_info'] _FIELDTYPE.oneofs_by_name['type_info'].fields.append( _FIELDTYPE.fields_by_name['iterable_type']) _FIELDTYPE.fields_by_name['iterable_type'].containing_oneof = _FIELDTYPE.oneofs_by_name['type_info'] _FIELDTYPE.oneofs_by_name['type_info'].fields.append( _FIELDTYPE.fields_by_name['map_type']) _FIELDTYPE.fields_by_name['map_type'].containing_oneof = _FIELDTYPE.oneofs_by_name['type_info'] _FIELDTYPE.oneofs_by_name['type_info'].fields.append( _FIELDTYPE.fields_by_name['row_type']) _FIELDTYPE.fields_by_name['row_type'].containing_oneof = _FIELDTYPE.oneofs_by_name['type_info'] _FIELDTYPE.oneofs_by_name['type_info'].fields.append( _FIELDTYPE.fields_by_name['logical_type']) _FIELDTYPE.fields_by_name['logical_type'].containing_oneof = _FIELDTYPE.oneofs_by_name['type_info'] _ARRAYTYPE.fields_by_name['element_type'].message_type = _FIELDTYPE _ITERABLETYPE.fields_by_name['element_type'].message_type = _FIELDTYPE _MAPTYPE.fields_by_name['key_type'].message_type = _FIELDTYPE _MAPTYPE.fields_by_name['value_type'].message_type = _FIELDTYPE _ROWTYPE.fields_by_name['schema'].message_type = _SCHEMA _LOGICALTYPE.fields_by_name['representation'].message_type = _FIELDTYPE _LOGICALTYPE.fields_by_name['argument_type'].message_type = _FIELDTYPE _LOGICALTYPE.fields_by_name['argument'].message_type = _FIELDVALUE _OPTION.fields_by_name['type'].message_type = _FIELDTYPE _OPTION.fields_by_name['value'].message_type = _FIELDVALUE _ROW.fields_by_name['values'].message_type = _FIELDVALUE _FIELDVALUE.fields_by_name['atomic_value'].message_type = _ATOMICTYPEVALUE _FIELDVALUE.fields_by_name['array_value'].message_type = _ARRAYTYPEVALUE _FIELDVALUE.fields_by_name['iterable_value'].message_type = _ITERABLETYPEVALUE _FIELDVALUE.fields_by_name['map_value'].message_type = _MAPTYPEVALUE _FIELDVALUE.fields_by_name['row_value'].message_type = _ROW _FIELDVALUE.fields_by_name['logical_type_value'].message_type = _LOGICALTYPEVALUE _FIELDVALUE.oneofs_by_name['field_value'].fields.append( _FIELDVALUE.fields_by_name['atomic_value']) _FIELDVALUE.fields_by_name['atomic_value'].containing_oneof = _FIELDVALUE.oneofs_by_name['field_value'] _FIELDVALUE.oneofs_by_name['field_value'].fields.append( _FIELDVALUE.fields_by_name['array_value']) _FIELDVALUE.fields_by_name['array_value'].containing_oneof = _FIELDVALUE.oneofs_by_name['field_value'] _FIELDVALUE.oneofs_by_name['field_value'].fields.append( _FIELDVALUE.fields_by_name['iterable_value']) _FIELDVALUE.fields_by_name['iterable_value'].containing_oneof = _FIELDVALUE.oneofs_by_name['field_value'] _FIELDVALUE.oneofs_by_name['field_value'].fields.append( _FIELDVALUE.fields_by_name['map_value']) _FIELDVALUE.fields_by_name['map_value'].containing_oneof = _FIELDVALUE.oneofs_by_name['field_value'] _FIELDVALUE.oneofs_by_name['field_value'].fields.append( _FIELDVALUE.fields_by_name['row_value']) _FIELDVALUE.fields_by_name['row_value'].containing_oneof = _FIELDVALUE.oneofs_by_name['field_value'] _FIELDVALUE.oneofs_by_name['field_value'].fields.append( _FIELDVALUE.fields_by_name['logical_type_value']) _FIELDVALUE.fields_by_name['logical_type_value'].containing_oneof = _FIELDVALUE.oneofs_by_name['field_value'] _ATOMICTYPEVALUE.oneofs_by_name['value'].fields.append( _ATOMICTYPEVALUE.fields_by_name['byte']) _ATOMICTYPEVALUE.fields_by_name['byte'].containing_oneof = _ATOMICTYPEVALUE.oneofs_by_name['value'] _ATOMICTYPEVALUE.oneofs_by_name['value'].fields.append( _ATOMICTYPEVALUE.fields_by_name['int16']) _ATOMICTYPEVALUE.fields_by_name['int16'].containing_oneof = _ATOMICTYPEVALUE.oneofs_by_name['value'] _ATOMICTYPEVALUE.oneofs_by_name['value'].fields.append( _ATOMICTYPEVALUE.fields_by_name['int32']) _ATOMICTYPEVALUE.fields_by_name['int32'].containing_oneof = _ATOMICTYPEVALUE.oneofs_by_name['value'] _ATOMICTYPEVALUE.oneofs_by_name['value'].fields.append( _ATOMICTYPEVALUE.fields_by_name['int64']) _ATOMICTYPEVALUE.fields_by_name['int64'].containing_oneof = _ATOMICTYPEVALUE.oneofs_by_name['value'] _ATOMICTYPEVALUE.oneofs_by_name['value'].fields.append( _ATOMICTYPEVALUE.fields_by_name['float']) _ATOMICTYPEVALUE.fields_by_name['float'].containing_oneof = _ATOMICTYPEVALUE.oneofs_by_name['value'] _ATOMICTYPEVALUE.oneofs_by_name['value'].fields.append( _ATOMICTYPEVALUE.fields_by_name['double']) _ATOMICTYPEVALUE.fields_by_name['double'].containing_oneof = _ATOMICTYPEVALUE.oneofs_by_name['value'] _ATOMICTYPEVALUE.oneofs_by_name['value'].fields.append( _ATOMICTYPEVALUE.fields_by_name['string']) _ATOMICTYPEVALUE.fields_by_name['string'].containing_oneof = _ATOMICTYPEVALUE.oneofs_by_name['value'] _ATOMICTYPEVALUE.oneofs_by_name['value'].fields.append( _ATOMICTYPEVALUE.fields_by_name['boolean']) _ATOMICTYPEVALUE.fields_by_name['boolean'].containing_oneof = _ATOMICTYPEVALUE.oneofs_by_name['value'] _ATOMICTYPEVALUE.oneofs_by_name['value'].fields.append( _ATOMICTYPEVALUE.fields_by_name['bytes']) _ATOMICTYPEVALUE.fields_by_name['bytes'].containing_oneof = _ATOMICTYPEVALUE.oneofs_by_name['value'] _ARRAYTYPEVALUE.fields_by_name['element'].message_type = _FIELDVALUE _ITERABLETYPEVALUE.fields_by_name['element'].message_type = _FIELDVALUE _MAPTYPEVALUE.fields_by_name['entries'].message_type = _MAPTYPEENTRY _MAPTYPEENTRY.fields_by_name['key'].message_type = _FIELDVALUE _MAPTYPEENTRY.fields_by_name['value'].message_type = _FIELDVALUE _LOGICALTYPEVALUE.fields_by_name['value'].message_type = _FIELDVALUE DESCRIPTOR.message_types_by_name['Schema'] = _SCHEMA DESCRIPTOR.message_types_by_name['Field'] = _FIELD DESCRIPTOR.message_types_by_name['FieldType'] = _FIELDTYPE DESCRIPTOR.message_types_by_name['ArrayType'] = _ARRAYTYPE DESCRIPTOR.message_types_by_name['IterableType'] = _ITERABLETYPE DESCRIPTOR.message_types_by_name['MapType'] = _MAPTYPE DESCRIPTOR.message_types_by_name['RowType'] = _ROWTYPE DESCRIPTOR.message_types_by_name['LogicalType'] = _LOGICALTYPE DESCRIPTOR.message_types_by_name['Option'] = _OPTION DESCRIPTOR.message_types_by_name['Row'] = _ROW DESCRIPTOR.message_types_by_name['FieldValue'] = _FIELDVALUE DESCRIPTOR.message_types_by_name['AtomicTypeValue'] = _ATOMICTYPEVALUE DESCRIPTOR.message_types_by_name['ArrayTypeValue'] = _ARRAYTYPEVALUE DESCRIPTOR.message_types_by_name['IterableTypeValue'] = _ITERABLETYPEVALUE DESCRIPTOR.message_types_by_name['MapTypeValue'] = _MAPTYPEVALUE DESCRIPTOR.message_types_by_name['MapTypeEntry'] = _MAPTYPEENTRY DESCRIPTOR.message_types_by_name['LogicalTypeValue'] = _LOGICALTYPEVALUE DESCRIPTOR.enum_types_by_name['AtomicType'] = _ATOMICTYPE _sym_db.RegisterFileDescriptor(DESCRIPTOR) Schema = _reflection.GeneratedProtocolMessageType('Schema', (_message.Message,), { 'DESCRIPTOR' : _SCHEMA, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.Schema) }) _sym_db.RegisterMessage(Schema) Field = _reflection.GeneratedProtocolMessageType('Field', (_message.Message,), { 'DESCRIPTOR' : _FIELD, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.Field) }) _sym_db.RegisterMessage(Field) FieldType = _reflection.GeneratedProtocolMessageType('FieldType', (_message.Message,), { 'DESCRIPTOR' : _FIELDTYPE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.FieldType) }) _sym_db.RegisterMessage(FieldType) ArrayType = _reflection.GeneratedProtocolMessageType('ArrayType', (_message.Message,), { 'DESCRIPTOR' : _ARRAYTYPE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.ArrayType) }) _sym_db.RegisterMessage(ArrayType) IterableType = _reflection.GeneratedProtocolMessageType('IterableType', (_message.Message,), { 'DESCRIPTOR' : _ITERABLETYPE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.IterableType) }) _sym_db.RegisterMessage(IterableType) MapType = _reflection.GeneratedProtocolMessageType('MapType', (_message.Message,), { 'DESCRIPTOR' : _MAPTYPE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.MapType) }) _sym_db.RegisterMessage(MapType) RowType = _reflection.GeneratedProtocolMessageType('RowType', (_message.Message,), { 'DESCRIPTOR' : _ROWTYPE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.RowType) }) _sym_db.RegisterMessage(RowType) LogicalType = _reflection.GeneratedProtocolMessageType('LogicalType', (_message.Message,), { 'DESCRIPTOR' : _LOGICALTYPE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.LogicalType) }) _sym_db.RegisterMessage(LogicalType) Option = _reflection.GeneratedProtocolMessageType('Option', (_message.Message,), { 'DESCRIPTOR' : _OPTION, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.Option) }) _sym_db.RegisterMessage(Option) Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), { 'DESCRIPTOR' : _ROW, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.Row) }) _sym_db.RegisterMessage(Row) FieldValue = _reflection.GeneratedProtocolMessageType('FieldValue', (_message.Message,), { 'DESCRIPTOR' : _FIELDVALUE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.FieldValue) }) _sym_db.RegisterMessage(FieldValue) AtomicTypeValue = _reflection.GeneratedProtocolMessageType('AtomicTypeValue', (_message.Message,), { 'DESCRIPTOR' : _ATOMICTYPEVALUE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.AtomicTypeValue) }) _sym_db.RegisterMessage(AtomicTypeValue) ArrayTypeValue = _reflection.GeneratedProtocolMessageType('ArrayTypeValue', (_message.Message,), { 'DESCRIPTOR' : _ARRAYTYPEVALUE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.ArrayTypeValue) }) _sym_db.RegisterMessage(ArrayTypeValue) IterableTypeValue = _reflection.GeneratedProtocolMessageType('IterableTypeValue', (_message.Message,), { 'DESCRIPTOR' : _ITERABLETYPEVALUE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.IterableTypeValue) }) _sym_db.RegisterMessage(IterableTypeValue) MapTypeValue = _reflection.GeneratedProtocolMessageType('MapTypeValue', (_message.Message,), { 'DESCRIPTOR' : _MAPTYPEVALUE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.MapTypeValue) }) _sym_db.RegisterMessage(MapTypeValue) MapTypeEntry = _reflection.GeneratedProtocolMessageType('MapTypeEntry', (_message.Message,), { 'DESCRIPTOR' : _MAPTYPEENTRY, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.MapTypeEntry) }) _sym_db.RegisterMessage(MapTypeEntry) LogicalTypeValue = _reflection.GeneratedProtocolMessageType('LogicalTypeValue', (_message.Message,), { 'DESCRIPTOR' : _LOGICALTYPEVALUE, '__module__' : 'schema_pb2' # @@protoc_insertion_point(class_scope:org.apache.beam.model.pipeline.v1.LogicalTypeValue) }) _sym_db.RegisterMessage(LogicalTypeValue) DESCRIPTOR._options = None # @@protoc_insertion_point(module_scope)
44.93281
4,765
0.761968
6,708
51,493
5.525939
0.050239
0.040358
0.059162
0.06167
0.798182
0.769478
0.739856
0.720217
0.712501
0.653933
0
0.035922
0.109063
51,493
1,145
4,766
44.972052
0.772064
0.031402
0
0.658491
1
0.001887
0.226544
0.17212
0
0
0
0
0
1
0
false
0
0.004717
0
0.004717
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
0e18d17a35eb560a76ef8a70cb76744a6a30dc9f
85,333
py
Python
veriloggen/thread/ram.py
leonardt/veriloggen
bc3dacaa6a3e0b0652763881d0edf0421c6d3189
[ "Apache-2.0" ]
null
null
null
veriloggen/thread/ram.py
leonardt/veriloggen
bc3dacaa6a3e0b0652763881d0edf0421c6d3189
[ "Apache-2.0" ]
null
null
null
veriloggen/thread/ram.py
leonardt/veriloggen
bc3dacaa6a3e0b0652763881d0edf0421c6d3189
[ "Apache-2.0" ]
null
null
null
from __future__ import absolute_import from __future__ import print_function import functools import math import veriloggen.core.vtypes as vtypes import veriloggen.dataflow.dtypes as dtypes import veriloggen.types.fixed as fxd import veriloggen.types.util as util from veriloggen.seq.seq import Seq, TmpSeq from veriloggen.fsm.fsm import TmpFSM from veriloggen.types.ram import RAMInterface, mkRAMDefinition from veriloggen.dataflow.dataflow import DataflowManager from veriloggen.dataflow.dtypes import make_condition, read_multi from veriloggen.dataflow.dtypes import _Numeric as df_numeric from .ttypes import _MutexFunction class RAM(_MutexFunction): __intrinsics__ = ('read', 'write') + _MutexFunction.__intrinsics__ def __init__(self, m, name, clk, rst, datawidth=32, addrwidth=10, numports=1, nodataflow=False): self.m = m self.name = name self.clk = clk self.rst = rst self.datawidth = datawidth self.addrwidth = addrwidth self.numports = numports self.interfaces = [RAMInterface(m, name + '_%d' % i, datawidth, addrwidth) for i in range(numports)] self.definition = mkRAMDefinition(name, datawidth, addrwidth, numports) self.inst = self.m.Instance(self.definition, 'inst_' + name, ports=m.connect_ports(self.definition)) self.seq = Seq(m, name, clk, rst) if nodataflow: self.df = None else: self.df = DataflowManager(self.m, self.clk, self.rst) self._write_disabled = [False for i in range(numports)] self._port_disabled = [False for i in range(numports)] self.mutex = None def __getitem__(self, index): return self.interfaces[index] def _id(self): return id(self) @property def length(self): if isinstance(self.addrwidth, int): return 2 ** self.addrwidth return vtypes.Int(2) ** self.addrwidth def disable_write(self, port): self.seq( self.interfaces[port].wdata(0), self.interfaces[port].wenable(0) ) self._write_disabled[port] = True def disable_port(self, port): self.seq( self.interfaces[port].addr(0), ) self._port_disabled[port] = True def connect_rtl(self, port, addr, wdata=None, wenable=None, rdata=None): """ connect native signals to the internal RAM interface """ self.interfaces[port].addr.connect(addr) if wdata is not None: self.interfaces[port].wdata.connect(wdata) if wenable is not None: self.interfaces[port].wenable.connect(wenable) if rdata is not None: rdata.connect(self.interfaces[port].rdata) def read_rtl(self, addr, port=0, cond=None): """ @return data, valid """ if cond is not None: self.seq.If(cond) self.seq( self.interfaces[port].addr(addr) ) rdata = self.interfaces[port].rdata rvalid = self.m.TmpReg(initval=0) self.seq.Then().Delay(1)( rvalid(1) ) self.seq.Then().Delay(2)( rvalid(0) ) return rdata, rvalid def write_rtl(self, addr, wdata, port=0, cond=None): """ @return None """ if self._write_disabled[port]: raise TypeError('Write disabled.') if cond is not None: self.seq.If(cond) self.seq( self.interfaces[port].addr(addr), self.interfaces[port].wdata(wdata), self.interfaces[port].wenable(1) ) self.seq.Then().Delay(1)( self.interfaces[port].wenable(0) ) def read(self, fsm, addr, port=0): """ intrinsic read operation using a shared Seq object """ port = vtypes.to_int(port) cond = fsm.state == fsm.current rdata, rvalid = self.read_rtl(addr, port, cond) rdata_reg = self.m.TmpReg(self.datawidth, initval=0, signed=True) fsm.If(rvalid)( rdata_reg(rdata) ) fsm.Then().goto_next() return rdata_reg def write(self, fsm, addr, wdata, port=0, cond=None): """ intrinsic write operation using a shared Seq object """ port = vtypes.to_int(port) if cond is None: cond = fsm.state == fsm.current else: cond = vtypes.Ands(cond, fsm.state == fsm.current) self.write_rtl(addr, wdata, port, cond) fsm.goto_next() return 0 def read_dataflow(self, port, addr, length=1, stride=1, cond=None, point=0, signed=True): """ @return data, last, done """ data_valid = self.m.TmpReg(initval=0) last_valid = self.m.TmpReg(initval=0) data_ready = self.m.TmpWire() last_ready = self.m.TmpWire() data_ready.assign(1) last_ready.assign(1) data_ack = vtypes.Ors(data_ready, vtypes.Not(data_valid)) last_ack = vtypes.Ors(last_ready, vtypes.Not(last_valid)) ext_cond = make_condition(cond) data_cond = make_condition(data_ack, last_ack) prev_data_cond = self.seq.Prev(data_cond, 1) data = self.m.TmpWireLike(self.interfaces[port].rdata, signed=True) prev_data = self.seq.Prev(data, 1) data.assign(vtypes.Mux(prev_data_cond, self.interfaces[port].rdata, prev_data)) next_valid_on = self.m.TmpReg(initval=0) next_valid_off = self.m.TmpReg(initval=0) next_last = self.m.TmpReg(initval=0) last = self.m.TmpReg(initval=0) counter = self.m.TmpReg(length.bit_length() + 1, initval=0) self.seq.If(data_cond, next_valid_off)( last(0), data_valid(0), last_valid(0), next_valid_off(0) ) self.seq.If(data_cond, next_valid_on)( data_valid(1), last_valid(1), last(next_last), next_last(0), next_valid_on(0), next_valid_off(1) ) self.seq.If(ext_cond, counter == 0, vtypes.Not(next_last), vtypes.Not(last))( self.interfaces[port].addr(addr), counter(length - 1), next_valid_on(1), next_last(length == 1) ) self.seq.If(data_cond, counter > 0)( self.interfaces[port].addr(self.interfaces[port].addr + stride), counter.dec(), next_valid_on(1), next_last(0) ) self.seq.If(data_cond, counter == 1)( next_last(1) ) df_data = self.df.Variable(data, data_valid, data_ready, width=self.datawidth, point=point, signed=signed) df_last = self.df.Variable( last, last_valid, last_ready, width=1, signed=False) done = last return df_data, df_last, done def read_dataflow_pattern(self, port, addr, pattern, cond=None, point=0, signed=True): """ @return data, last, done """ if not isinstance(pattern, (tuple, list)): raise TypeError('pattern must be list or tuple.') if not pattern: raise ValueError( 'pattern must have one (size, stride) pair at least.') if not isinstance(pattern[0], (tuple, list)): pattern = (pattern,) data_valid = self.m.TmpReg(initval=0) last_valid = self.m.TmpReg(initval=0) data_ready = self.m.TmpWire() last_ready = self.m.TmpWire() data_ready.assign(1) last_ready.assign(1) data_ack = vtypes.Ors(data_ready, vtypes.Not(data_valid)) last_ack = vtypes.Ors(last_ready, vtypes.Not(last_valid)) ext_cond = make_condition(cond) data_cond = make_condition(data_ack, last_ack) prev_data_cond = self.seq.Prev(data_cond, 1) data = self.m.TmpWireLike(self.interfaces[port].rdata, signed=True) prev_data = self.seq.Prev(data, 1) data.assign(vtypes.Mux(prev_data_cond, self.interfaces[port].rdata, prev_data)) next_valid_on = self.m.TmpReg(initval=0) next_valid_off = self.m.TmpReg(initval=0) next_last = self.m.TmpReg(initval=0) last = self.m.TmpReg(initval=0) running = self.m.TmpReg(initval=0) next_addr = self.m.TmpWire(self.addrwidth) offset_addr = self.m.TmpWire(self.addrwidth) offsets = [self.m.TmpReg(self.addrwidth, initval=0) for _ in pattern[1:]] offset_addr_value = addr for offset in offsets: offset_addr_value = offset + offset_addr_value offset_addr.assign(offset_addr_value) offsets.insert(0, None) count_list = [self.m.TmpReg(out_size.bit_length() + 1, initval=0) for (out_size, out_stride) in pattern] self.seq.If(data_cond, next_valid_off)( last(0), data_valid(0), last_valid(0), next_valid_off(0) ) self.seq.If(data_cond, next_valid_on)( data_valid(1), last_valid(1), last(next_last), next_last(0), next_valid_on(0), next_valid_off(1) ) self.seq.If(ext_cond, vtypes.Not(running), vtypes.Not(next_last), vtypes.Not(last))( self.interfaces[port].addr(addr), running(1), next_valid_on(1) ) self.seq.If(data_cond, running)( self.interfaces[port].addr(next_addr), next_valid_on(1), next_last(0) ) update_count = None update_offset = None update_addr = None last_one = None stride_value = None carry = None for offset, count, (out_size, out_stride) in zip(offsets, count_list, pattern): self.seq.If(ext_cond, vtypes.Not(running), vtypes.Not(next_last), vtypes.Not(last))( count(out_size - 1) ) self.seq.If(data_cond, running, update_count)( count.dec() ) self.seq.If(data_cond, running, update_count, count == 0)( count(out_size - 1) ) if offset is not None: self.seq.If(ext_cond, vtypes.Not(running), vtypes.Not(next_last), vtypes.Not(last))( offset(0) ) self.seq.If(data_cond, running, update_offset, vtypes.Not(carry))( offset(offset + out_stride) ) self.seq.If(data_cond, running, update_offset, count == 0)( offset(0) ) if update_count is None: update_count = count == 0 else: update_count = vtypes.Ands(update_count, count == 0) if update_offset is None: update_offset = vtypes.Mux(out_size == 1, 1, count == 1) else: update_offset = vtypes.Ands(update_offset, count == carry) if update_addr is None: update_addr = count == 0 else: update_addr = vtypes.Mux(carry, count == 0, update_addr) if last_one is None: last_one = count == 0 else: last_one = vtypes.Ands(last_one, count == 0) if stride_value is None: stride_value = out_stride else: stride_value = vtypes.Mux(carry, out_stride, stride_value) if carry is None: carry = out_size == 1 else: carry = vtypes.Ands(carry, out_size == 1) next_addr.assign(vtypes.Mux(update_addr, offset_addr, self.interfaces[port].addr + stride_value)) self.seq.If(data_cond, running, last_one)( running(0), next_last(1) ) df_data = self.df.Variable(data, data_valid, data_ready, width=self.datawidth, point=point, signed=signed) df_last = self.df.Variable( last, last_valid, last_ready, width=1, signed=False) done = last return df_data, df_last, done def read_dataflow_multidim(self, port, addr, shape, order=None, cond=None, point=0, signed=True): """ @return data, last, done """ if order is None: order = list(reversed(range(len(shape)))) pattern = self._to_pattern(shape, order) return self.read_dataflow_pattern(port, addr, pattern, cond=cond, point=point, signed=signed) def read_dataflow_reuse(self, port, addr, length=1, stride=1, reuse_size=1, num_outputs=1, cond=None, point=0, signed=True): """ @return data, last, done """ if not isinstance(num_outputs, int): raise TypeError('num_outputs must be int') data_valid = [self.m.TmpReg(initval=0) for _ in range(num_outputs)] last_valid = self.m.TmpReg(initval=0) data_ready = [self.m.TmpWire() for _ in range(num_outputs)] last_ready = self.m.TmpWire() for r in data_ready: r.assign(1) last_ready.assign(1) data_ack = vtypes.Ands(*[vtypes.Ors(r, vtypes.Not(v)) for v, r in zip(data_valid, data_ready)]) last_ack = vtypes.Ors(last_ready, vtypes.Not(last_valid)) ext_cond = make_condition(cond) data_cond = make_condition(data_ack, last_ack) counter = self.m.TmpReg(length.bit_length() + 1, initval=0) last = self.m.TmpReg(initval=0) reuse_data = [self.m.TmpReg(self.datawidth, initval=0, signed=True) for _ in range(num_outputs)] next_reuse_data = [self.m.TmpReg(self.datawidth, initval=0, signed=True) for _ in range(num_outputs)] reuse_count = self.m.TmpReg(reuse_size.bit_length() + 1, initval=0) fill_reuse_count = self.m.TmpReg(initval=0) fetch_done = self.m.TmpReg(initval=0) fsm = TmpFSM(self.m, self.clk, self.rst) # initial state fsm.If(ext_cond)( self.interfaces[port].addr(addr - stride), fetch_done(0), counter(length) ) fsm.If(ext_cond, length > 0).goto_next() # initial prefetch state for n in next_reuse_data: fsm( self.interfaces[port].addr( self.interfaces[port].addr + stride), counter(vtypes.Mux(counter > 0, counter - 1, counter)) ) fsm.Delay(2)( n(self.interfaces[port].rdata) ) fsm.goto_next() fsm.goto_next() fsm.goto_next() # initial update state for n, r in zip(next_reuse_data, reuse_data): fsm( r(n) ) fsm( fill_reuse_count(1), fetch_done(counter == 0) ) fsm.Delay(1)( fill_reuse_count(0) ) fsm.goto_next() # prefetch state read_start_state = fsm.current for n in next_reuse_data: fsm( self.interfaces[port].addr( self.interfaces[port].addr + stride), counter(vtypes.Mux(counter > 0, counter - 1, counter)) ) fsm.Delay(2)( n(self.interfaces[port].rdata) ) fsm.goto_next() fsm.goto_next() fsm.goto_next() # update state for n, r in zip(next_reuse_data, reuse_data): fsm.If(data_cond, reuse_count == 0)( r(n) ) fsm.If(data_cond, reuse_count == 0)( fill_reuse_count(vtypes.Not(fetch_done)), fetch_done(counter == 0) ) fsm.Delay(1)( fill_reuse_count(0) ) # next -> prefetch state or initial state fsm.If(data_cond, reuse_count == 0, counter == 0).goto_init() fsm.If(data_cond, reuse_count == 0, counter > 0).goto(read_start_state) # output signal control self.seq.If(data_cond, last_valid)( last(0), [d(0) for d in data_valid], last_valid(0) ) self.seq.If(fill_reuse_count)( reuse_count(reuse_size) ) self.seq.If(data_cond, reuse_count > 0)( reuse_count.dec(), [d(1) for d in data_valid], last_valid(1), last(0) ) self.seq.If(data_cond, reuse_count == 1, fetch_done)( last(1) ) df_last = self.df.Variable( last, last_valid, last_ready, width=1, signed=False) done = last df_reuse_data = [self.df.Variable(d, v, r, width=self.datawidth, point=point, signed=signed) for d, v, r in zip(reuse_data, data_valid, data_ready)] return tuple(df_reuse_data + [df_last, done]) def read_dataflow_reuse_pattern(self, port, addr, pattern, reuse_size=1, num_outputs=1, cond=None, point=0, signed=True): """ @return data, last, done """ if not isinstance(pattern, (tuple, list)): raise TypeError('pattern must be list or tuple.') if not pattern: raise ValueError( 'pattern must have one (size, stride) pair at least.') if not isinstance(pattern[0], (tuple, list)): pattern = (pattern,) if not isinstance(num_outputs, int): raise TypeError('num_outputs must be int') data_valid = [self.m.TmpReg(initval=0) for _ in range(num_outputs)] last_valid = self.m.TmpReg(initval=0) data_ready = [self.m.TmpWire() for _ in range(num_outputs)] last_ready = self.m.TmpWire() for r in data_ready: r.assign(1) last_ready.assign(1) data_ack = vtypes.Ands(*[vtypes.Ors(r, vtypes.Not(v)) for v, r in zip(data_valid, data_ready)]) last_ack = vtypes.Ors(last_ready, vtypes.Not(last_valid)) ext_cond = make_condition(cond) data_cond = make_condition(data_ack, last_ack) next_addr = self.m.TmpWire(self.addrwidth) offset_addr = self.m.TmpWire(self.addrwidth) offsets = [self.m.TmpReg(self.addrwidth, initval=0) for _ in pattern[1:]] offset_addr_value = addr for offset in offsets: offset_addr_value = offset + offset_addr_value offset_addr.assign(offset_addr_value) offsets.insert(0, None) count_list = [self.m.TmpReg(out_size.bit_length() + 1, initval=0) for (out_size, out_stride) in pattern] last = self.m.TmpReg(initval=0) reuse_data = [self.m.TmpReg(self.datawidth, initval=0, signed=True) for _ in range(num_outputs)] next_reuse_data = [self.m.TmpReg(self.datawidth, initval=0, signed=True) for _ in range(num_outputs)] reuse_count = self.m.TmpReg(reuse_size.bit_length() + 1, initval=0) fill_reuse_count = self.m.TmpReg(initval=0) prefetch_done = self.m.TmpReg(initval=0) fetch_done = self.m.TmpReg(initval=0) update_addr = None stride_value = None carry = None for offset, count, (out_size, out_stride) in zip(offsets, count_list, pattern): if update_addr is None: update_addr = count == 0 else: update_addr = vtypes.Mux(carry, count == 0, update_addr) if stride_value is None: stride_value = out_stride else: stride_value = vtypes.Mux(carry, out_stride, stride_value) if carry is None: carry = out_size == 1 else: carry = vtypes.Ands(carry, out_size == 1) next_addr.assign(vtypes.Mux(update_addr, offset_addr, self.interfaces[port].addr + stride_value)) fsm = TmpFSM(self.m, self.clk, self.rst) # initial state fsm.If(ext_cond)( self.interfaces[port].addr(addr - stride_value), prefetch_done(0), fetch_done(0) ) first = True for offset, count, (out_size, out_stride) in zip(offsets, count_list, pattern): fsm.If(ext_cond)( count(out_size) if first else count(out_size - 1), ) if offset is not None: fsm.If(ext_cond)( offset(0) ) first = False fsm.If(ext_cond).goto_next() # initial prefetch state for n in next_reuse_data: update_count = None update_offset = None last_one = None carry = None for offset, count, (out_size, out_stride) in zip(offsets, count_list, pattern): fsm.If(update_count)( count.dec() ) fsm.If(update_count, count == 0)( count(out_size - 1) ) fsm( self.interfaces[port].addr(next_addr) ) fsm.Delay(2)( n(self.interfaces[port].rdata) ) if offset is not None: fsm.If(update_offset, vtypes.Not(carry))( offset(offset + out_stride) ) fsm.If(update_offset, count == 0)( offset(0) ) if update_count is None: update_count = count == 0 else: update_count = vtypes.Ands(update_count, count == 0) if update_offset is None: update_offset = vtypes.Mux(out_size == 1, 1, count == 1) else: update_offset = vtypes.Ands(update_offset, count == carry) if last_one is None: last_one = count == 0 else: last_one = vtypes.Ands(last_one, count == 0) if carry is None: carry = out_size == 1 else: carry = vtypes.Ands(carry, out_size == 1) fsm.goto_next() fsm.If(last_one)( prefetch_done(1) ) fsm.goto_next() fsm.goto_next() # initial update state for r, n in zip(reuse_data, next_reuse_data): fsm( r(n) ) fsm( fetch_done(prefetch_done), fill_reuse_count(vtypes.Not(fetch_done)) ) fsm.Delay(1)( fill_reuse_count(0) ) fsm.goto_next() # prefetch state read_start_state = fsm.current for n in next_reuse_data: update_count = None update_offset = None last_one = None carry = None for offset, count, (out_size, out_stride) in zip(offsets, count_list, pattern): fsm.If(update_count)( count.dec() ) fsm.If(update_count, count == 0)( count(out_size - 1) ) fsm( self.interfaces[port].addr(next_addr) ) fsm.Delay(2)( n(self.interfaces[port].rdata) ) if offset is not None: fsm.If(update_offset, vtypes.Not(carry))( offset(offset + out_stride) ) fsm.If(update_offset, count == 0)( offset(0) ) if update_count is None: update_count = count == 0 else: update_count = vtypes.Ands(update_count, count == 0) if update_offset is None: update_offset = vtypes.Mux(out_size == 1, 1, count == 1) else: update_offset = vtypes.Ands(update_offset, count == carry) if last_one is None: last_one = count == 0 else: last_one = vtypes.Ands(last_one, count == 0) if carry is None: carry = out_size == 1 else: carry = vtypes.Ands(carry, out_size == 1) fsm.goto_next() fsm.If(last_one)( prefetch_done(1) ) fsm.goto_next() fsm.goto_next() # update state for r, n in zip(reuse_data, next_reuse_data): fsm.If(data_cond, reuse_count == 0)( r(n) ) fsm.If(data_cond, reuse_count == 0)( fetch_done(prefetch_done), fill_reuse_count(vtypes.Not(fetch_done)) ) fsm.Delay(1)( fill_reuse_count(0) ) # next -> prefetch state or initial state fsm.If(data_cond, reuse_count == 0, fetch_done).goto_init() fsm.If(data_cond, reuse_count == 0, vtypes.Not(fetch_done)).goto(read_start_state) # output signal control self.seq.If(data_cond, last_valid)( last(0), [d(0) for d in data_valid], last_valid(0) ) self.seq.If(fill_reuse_count)( reuse_count(reuse_size) ) self.seq.If(data_cond, reuse_count > 0)( reuse_count.dec(), [d(1) for d in data_valid], last_valid(1), last(0) ) self.seq.If(data_cond, reuse_count == 1, fetch_done)( last(1) ) df_last = self.df.Variable( last, last_valid, last_ready, width=1, signed=False) done = last df_reuse_data = [self.df.Variable(d, v, r, width=self.datawidth, point=point, signed=signed) for d, v, r in zip(reuse_data, data_valid, data_ready)] return tuple(df_reuse_data + [df_last, done]) def read_dataflow_reuse_multidim(self, port, addr, shape, order=None, reuse_size=1, num_outputs=1, cond=None, point=0, signed=True): """ @return data, last, done """ if order is None: order = list(reversed(range(len(shape)))) pattern = self._to_pattern(shape, order) return self.read_dataflow_pattern(port, addr, pattern, reuse_size, num_outputs, cond=cond, point=point, signed=signed) def write_dataflow(self, port, addr, data, length=1, stride=1, cond=None, when=None): """ @return done 'data' and 'when' must be dataflow variables """ if self._write_disabled[port]: raise TypeError('Write disabled.') counter = self.m.TmpReg(length.bit_length() + 1, initval=0) last = self.m.TmpReg(initval=0) ext_cond = make_condition(cond) data_cond = make_condition(counter > 0, vtypes.Not(last)) if when is None or not isinstance(when, df_numeric): raw_data, raw_valid = data.read(cond=data_cond) else: data_list, raw_valid = read_multi( self.m, data, when, cond=data_cond) raw_data = data_list[0] when = data_list[1] when_cond = make_condition(when, ready=data_cond) if when_cond is not None: raw_valid = vtypes.Ands(when_cond, raw_valid) self.seq.If(ext_cond, counter == 0)( self.interfaces[port].addr(addr - stride), counter(length), ) self.seq.If(raw_valid, counter > 0)( self.interfaces[port].addr(self.interfaces[port].addr + stride), self.interfaces[port].wdata(raw_data), self.interfaces[port].wenable(1), counter.dec() ) self.seq.If(raw_valid, counter == 1)( last(1) ) # de-assert self.seq.Delay(1)( self.interfaces[port].wenable(0), last(0) ) done = last return done def write_dataflow_pattern(self, port, addr, data, pattern, cond=None, when=None): """ @return done 'data' and 'when' must be dataflow variables """ if self._write_disabled[port]: raise TypeError('Write disabled.') if not isinstance(pattern, (tuple, list)): raise TypeError('pattern must be list or tuple.') if not pattern: raise ValueError( 'pattern must have one (size, stride) pair at least.') if not isinstance(pattern[0], (tuple, list)): pattern = (pattern,) last = self.m.TmpReg(initval=0) running = self.m.TmpReg(initval=0) ext_cond = make_condition(cond) data_cond = make_condition(running, vtypes.Not(last)) if when is None or not isinstance(when, df_numeric): raw_data, raw_valid = data.read(cond=data_cond) else: data_list, raw_valid = read_multi( self.m, data, when, cond=data_cond) raw_data = data_list[0] when = data_list[1] when_cond = make_condition(when, ready=data_cond) if when_cond is not None: raw_valid = vtypes.Ands(when_cond, raw_valid) offset_addr = self.m.TmpWire(self.addrwidth) offsets = [self.m.TmpReg(self.addrwidth, initval=0) for _ in pattern] offset_addr_value = addr for offset in offsets: offset_addr_value = offset + offset_addr_value offset_addr.assign(offset_addr_value) count_list = [self.m.TmpReg(out_size.bit_length() + 1, initval=0) for (out_size, out_stride) in pattern] self.seq.If(ext_cond, vtypes.Not(running))( running(1) ) self.seq.If(raw_valid, running)( self.interfaces[port].addr(offset_addr), self.interfaces[port].wdata(raw_data), self.interfaces[port].wenable(1) ) update_count = None last_one = None for offset, count, (out_size, out_stride) in zip(offsets, count_list, pattern): self.seq.If(ext_cond, vtypes.Not(running))( count(out_size - 1), offset(0) ) self.seq.If(raw_valid, running, update_count)( count.dec(), offset(offset + out_stride) ) self.seq.If(raw_valid, running, update_count, count == 0)( count(out_size - 1), offset(0) ) if update_count is None: update_count = count == 0 else: update_count = vtypes.Ands(update_count, count == 0) if last_one is None: last_one = count == 0 else: last_one = vtypes.Ands(last_one, count == 0) self.seq.If(raw_valid, last_one)( running(0), last(1) ) # de-assert self.seq.Delay(1)( self.interfaces[port].wenable(0), last(0) ) done = last return done def write_dataflow_multidim(self, port, addr, data, shape, order=None, cond=None, when=None): """ @return done 'data' and 'when' must be dataflow variables """ if order is None: order = list(reversed(range(len(shape)))) pattern = self._to_pattern(shape, order) return self.write_dataflow_pattern(port, addr, data, pattern, cond=cond, when=when) def _to_pattern(self, shape, order): pattern = [] for p in order: if not isinstance(p, int): raise TypeError( "Values of 'order' must be 'int', not %s" % str(type(p))) size = shape[p] basevalue = 1 if isinstance(size, int) else vtypes.Int(1) stride = functools.reduce(lambda x, y: x * y, shape[p + 1:], basevalue) pattern.append((size, stride)) return pattern class FixedRAM(RAM): def __init__(self, m, name, clk, rst, datawidth=32, addrwidth=10, numports=1, point=0): RAM.__init__(self, m, name, clk, rst, datawidth, addrwidth, numports) self.point = point def read(self, fsm, addr, port=0, raw=False): raw_value = RAM.read(self, fsm, addr, port) if raw: return raw_value return fxd.as_fixed(raw_value, self.point) def write(self, fsm, addr, wdata, port=0, cond=None, raw=False): if raw: fixed_wdata = wdata else: fixed_wdata = fxd.write_adjust(wdata, self.point) return RAM.write(self, fsm, addr, fixed_wdata, port, cond) def extract_rams(rams): ret = [] for ram in rams: if isinstance(ram, MultibankRAM): ret.extend(extract_rams(ram.rams)) else: ret.append(ram) return ret class MultibankRAM(object): __intrinsics__ = ( 'read', 'write', 'read_bank', 'write_bank', 'dma_read_bank', 'dma_read_bank_async', 'dma_write_bank', 'dma_write_bank_async', 'dma_read_block', 'dma_read_block_async', 'dma_write_block', 'dma_write_block_async') + _MutexFunction.__intrinsics__ def __init__(self, m, name, clk, rst, datawidth=32, addrwidth=10, numports=1, numbanks=2): if numbanks < 2: raise ValueError('numbanks must be 2 or more') self.m = m self.name = name self.clk = clk self.rst = rst self.orig_datawidth = datawidth self.datawidth = datawidth * numbanks self.addrwidth = addrwidth self.numports = numports self.numbanks = numbanks self.shift = util.log2(self.numbanks) self.rams = [RAM(m, '_'.join([name, '%d' % i]), clk, rst, datawidth, addrwidth, numports) for i in range(numbanks)] self.keep_hierarchy = False self.seq = None self.df = DataflowManager(self.m, self.clk, self.rst) # key: (axi._id(), port, ram_method_name) self.cache_dma_reqs = {} self.mutex = None def __getitem__(self, index): return self.rams[index] def _id(self): _ids = [ram._id() for ram in self.rams] return tuple(_ids) @property def length(self): if isinstance(self.addrwidth, int): return (2 ** self.addrwidth) * self.numbanks return (vtypes.Int(2) ** self.addrwidth) * self.numbanks def disable_write(self, port): for ram in self.rams: ram.seq( ram.interfaces[port].wdata(0), ram.interfaces[port].wenable(0) ) ram._write_disabled[port] = True def connect_rtl(self, port, addr, wdata=None, wenable=None, rdata=None): """ connect native signals to the internal RAM interface """ if math.log(self.numbanks, 2) % 1.0 != 0.0: raise ValueError('numbanks must be power-of-2') if self.seq is None: self.seq = Seq(self.m, self.name, self.clk, self.rst) bank = self.m.TmpWire(self.shift) bank.assign(addr) addr = addr >> self.shift rdata_list = [] for i, ram in enumerate(self.rams): ram.interfaces[port].addr.connect(addr) if wdata is not None: ram.interfaces[port].wdata.connect(wdata) bank_wenable = vtypes.Ands(wenable, bank == i) if wenable is not None: ram.interfaces[port].wenable.connect(bank_wenable) rdata_list.append(ram.interfaces[port].rdata) bank_reg = self.seq.Prev(bank, 1, initval=0) pat = [(bank_reg == i, rdata_list[i]) for i, ram in enumerate(self.rams)] pat.append((None, 0)) rdata_wire = self.m.TmpWire(self.orig_datawidth, signed=True) rdata_wire.assign(vtypes.PatternMux(pat)) if rdata is not None: rdata.connect(rdata_wire) def read_rtl(self, addr, port=0, cond=None): """ @return data, valid """ if math.log(self.numbanks, 2) % 1.0 != 0.0: raise ValueError('numbanks must be power-of-2') if self.seq is None: self.seq = Seq(self.m, self.name, self.clk, self.rst) rdata_list = [] rvalid_list = [] bank = self.m.TmpWire(self.shift) bank.assign(addr) addr = addr >> self.shift bank_reg = self.seq.Prev(bank, 2, initval=0) for ram in self.rams: rdata, rvalid = ram.read_rtl(addr, port, cond) rdata_list.append(rdata) rvalid_list.append(rvalid) rdata_wire = self.m.TmpWire(self.orig_datawidth, signed=True) rvalid_wire = self.m.TmpWire() pat = [(bank_reg == i, rdata_list[i]) for i, ram in enumerate(self.rams)] pat.append((None, 0)) rdata_wire.assign(vtypes.PatternMux(pat)) rvalid_wire.assign(rvalid_list[0]) return rdata_wire, rvalid_wire def write_rtl(self, addr, data, port=0, cond=None): """ @return None """ if math.log(self.numbanks, 2) % 1.0 != 0.0: raise ValueError('numbanks must be power-of-2') bank = self.m.TmpWire(self.shift) bank.assign(addr) addr = addr >> self.shift for i, ram in enumerate(self.rams): bank_cond = vtypes.Ands(cond, bank == i) ram.write_rtl(addr, data, port, bank_cond) return 0 def _read_recursive(self, ram, port, addr, cond): if isinstance(ram, MultibankRAM): if math.log(ram.numbanks, 2) % 1.0 != 0.0: raise ValueError('numbanks must be power-of-2') rdata_list = [] rvalid_list = [] bank = self.m.TmpWire(ram.shift) bank.assign(addr) addr = addr >> ram.shift for sub in ram.rams: rdata, rvalid = self._read_recursive(sub, port, addr, cond) rdata_list.append(rdata) rvalid_list.append(rvalid) rdata_wire = self.m.TmpWire(ram.orig_datawidth, signed=True) patterns = [(bank == i, rdata) for i, rdata in enumerate(rdata_list)] patterns.append((None, 0)) rdata_wire.assign(vtypes.PatternMux(*patterns)) return rdata_wire, rvalid_list[0] rdata, rvalid = ram.read_rtl(addr, port, cond) return rdata, rvalid def read(self, fsm, addr, port=0): if math.log(self.numbanks, 2) % 1.0 != 0.0: raise ValueError('numbanks must be power-of-2') port = vtypes.to_int(port) cond = fsm.state == fsm.current rdata_list = [] rvalid_list = [] bank = self.m.TmpWire(self.shift) bank.assign(addr) addr = addr >> self.shift for ram in self.rams: rdata, rvalid = self._read_recursive(ram, port, addr, cond) rdata_list.append(rdata) rvalid_list.append(rvalid) rdata_reg = self.m.TmpReg(self.orig_datawidth, initval=0, signed=True) for i, ram in enumerate(self.rams): fsm.If(rvalid_list[i], bank == i)( rdata_reg(rdata_list[i]) ) fsm.If(rvalid_list[0]).goto_next() return rdata_reg def _write_recursive(self, ram, port, addr, wdata, cond=None): if isinstance(ram, MultibankRAM): if math.log(ram.numbanks, 2) % 1.0 != 0.0: raise ValueError('numbanks must be power-of-2') bank = self.m.TmpWire(ram.shift) bank.assign(addr) addr = addr >> ram.shift for i, sub in enumerate(ram.rams): bank_cond = vtypes.Ands(cond, bank == i) self._write_recursive(sub, port, addr, wdata, bank_cond) return ram.write_rtl(addr, wdata, port, cond) def write(self, fsm, addr, wdata, port=0, cond=None): if math.log(self.numbanks, 2) % 1.0 != 0.0: raise ValueError('numbanks must be power-of-2') if cond is None: cond = fsm.state == fsm.current else: cond = vtypes.Ands(cond, fsm.state == fsm.current) bank = self.m.TmpWire(self.shift) bank.assign(addr) addr = addr >> self.shift for i, ram in enumerate(self.rams): bank_cond = vtypes.Ands(cond, bank == i) self._write_recursive(ram, port, addr, wdata, bank_cond) fsm.goto_next() return 0 def read_bank(self, fsm, bank, addr, port=0): port = vtypes.to_int(port) cond = fsm.state == fsm.current rdata_list = [] rvalid_list = [] for ram in self.rams: rdata, rvalid = self._read_recursive(ram, port, addr, cond) rdata_list.append(rdata) rvalid_list.append(rvalid) rdata_reg = self.m.TmpReg(self.orig_datawidth, initval=0, signed=True) for i, ram in enumerate(self.rams): fsm.If(rvalid_list[i], bank == i)( rdata_reg(rdata_list[i]) ) fsm.If(rvalid_list[0]).goto_next() return rdata_reg def write_bank(self, fsm, bank, addr, wdata, port=0, cond=None): if cond is None: cond = fsm.state == fsm.current else: cond = vtypes.Ands(cond, fsm.state == fsm.current) for i, ram in enumerate(self.rams): bank_cond = vtypes.Ands(cond, bank == i) self._write_recursive(ram, port, addr, wdata, bank_cond) fsm.goto_next() return 0 def dma_read_bank(self, fsm, bank, bus, local_addr, global_addr, size, local_stride=1, port=0): if bus.enable_async: bus.dma_wait_read(fsm) self._dma_read_bank(fsm, bank, bus, local_addr, global_addr, size, local_stride, port) bus.dma_wait_read(fsm) def dma_read_bank_async(self, fsm, bank, bus, local_addr, global_addr, size, local_stride=1, port=0): if not bus.enable_async: raise ValueError( "Async mode is disabled. Set 'True' to AXIM.enable_async.") bus.dma_wait_read(fsm) self._dma_read_bank(fsm, bank, bus, local_addr, global_addr, size, local_stride, port) def _dma_read_bank(self, fsm, bank, bus, local_addr, global_addr, size, local_stride=1, port=0): check = fsm.current fsm.set_index(check + 1) starts = [] ends = [] for i, ram in enumerate(self.rams): starts.append(fsm.current) bus._dma_read(fsm, ram, local_addr, global_addr, size, local_stride, port) ends.append(fsm.current) fsm.set_index(fsm.current + 1) fin = fsm.current for i, (s, e) in enumerate(zip(starts, ends)): fsm.goto_from(check, s, cond=bank == i) fsm.goto_from(e, fin) def dma_write_bank(self, fsm, bank, bus, local_addr, global_addr, size, local_stride=1, port=0): if bus.enable_async: bus.dma_wait_write(fsm) self._dma_write_bank(fsm, bank, bus, local_addr, global_addr, size, local_stride, port) bus.dma_wait_write(fsm) def dma_write_bank_async(self, fsm, bank, bus, local_addr, global_addr, size, local_stride=1, port=0): if not bus.enable_async: raise ValueError( "Async mode is disabled. Set 'True' to AXIM.enable_async.") bus.dma_wait_write(fsm) self._dma_write_bank(fsm, bank, bus, local_addr, global_addr, size, local_stride, port) def _dma_write_bank(self, fsm, bank, bus, local_addr, global_addr, size, local_stride=1, port=0): check = fsm.current fsm.set_index(check + 1) starts = [] ends = [] for i, ram in enumerate(self.rams): starts.append(fsm.current) bus._dma_write(fsm, ram, local_addr, global_addr, size, local_stride, port) ends.append(fsm.current) fsm.set_index(fsm.current + 1) fin = fsm.current for i, (s, e) in enumerate(zip(starts, ends)): fsm.goto_from(check, s, cond=bank == i) fsm.goto_from(e, fin) def dma_read_block(self, fsm, bus, local_addr, global_addr, size, block_size=1, local_stride=1, port=0): if bus.enable_async: bus.dma_wait_read(fsm) self._dma_read_block(fsm, bus, local_addr, global_addr, size, block_size, local_stride, port) bus.dma_wait_read(fsm) def dma_read_block_async(self, fsm, bus, local_addr, global_addr, size, block_size=1, local_stride=1, port=0): if not bus.enable_async: raise ValueError( "Async mode is disabled. Set 'True' to AXIM.enable_async.") bus.dma_wait_read(fsm) self._dma_read_block(fsm, bus, local_addr, global_addr, size, block_size, local_stride, port) def _dma_read_block(self, fsm, bus, local_addr, global_addr, size, block_size=1, local_stride=1, port=0): cache_key = (id(bus), port) if cache_key in self.cache_dma_reqs: info = self.cache_dma_reqs[cache_key] seq = info[0] req_block_size = info[1] else: seq = TmpSeq(bus.m, bus.clk, bus.rst) req_block_size = self.m.TmpReg(self.addrwidth, initval=0, prefix='req_block_size') info = (seq, req_block_size) self.cache_dma_reqs[cache_key] = info set_req = bus._set_flag(fsm, prefix='set_req') seq.If(set_req)( req_block_size(block_size) ) ram_method = functools.partial(self.write_dataflow_block, block_size=req_block_size) bus._dma_read(fsm, self, local_addr, global_addr, size, local_stride, port, ram_method) def dma_write_block(self, fsm, bus, local_addr, global_addr, size, block_size=1, local_stride=1, port=0): if bus.enable_async: bus.dma_wait_write(fsm) self._dma_write_block(fsm, bus, local_addr, global_addr, size, block_size, local_stride, port) bus.dma_wait_write(fsm) def dma_write_block_async(self, fsm, bus, local_addr, global_addr, size, block_size=1, local_stride=1, port=0): if not bus.enable_async: raise ValueError( "Async mode is disabled. Set 'True' to AXIM.enable_async.") bus.dma_wait_write(fsm) self._dma_write_block(fsm, bus, local_addr, global_addr, size, block_size, local_stride, port) def _dma_write_block(self, fsm, bus, local_addr, global_addr, size, block_size=1, local_stride=1, port=0): cache_key = (id(bus), port) if cache_key in self.cache_dma_reqs: info = self.cache_dma_reqs[cache_key] seq = info[0] req_block_size = info[1] else: seq = TmpSeq(bus.m, bus.clk, bus.rst) req_block_size = self.m.TmpReg(self.addrwidth, initval=0, prefix='req_block_size') info = (seq, req_block_size) self.cache_dma_reqs[cache_key] = info set_req = bus._set_flag(fsm, prefix='set_req') seq.If(set_req)( req_block_size(block_size) ) ram_method = functools.partial(self.read_dataflow_block, block_size=req_block_size) bus._dma_write(fsm, self, local_addr, global_addr, size, local_stride, port, ram_method) def read_dataflow(self, port, addr, length=1, stride=1, cond=None, point=0, signed=True): """ @return data, last, done """ data_list = [] last_list = [] done_list = [] for ram in self.rams: data, last, done = ram.read_dataflow( port, addr, length, stride, cond, point, signed) data_list.insert(0, data) last_list.insert(0, last) done_list.insert(0, done) merged_data = dtypes.Cat(*data_list) merged_last = last_list[-1] merged_done = done_list[-1] return merged_data, merged_last, merged_done def read_dataflow_interleave(self, port, addr, length=1, stride=1, cond=None, point=0, signed=True): """ @return data, last, done """ if self.seq is None: self.seq = Seq(self.m, self.name, self.clk, self.rst) data_valid = self.m.TmpReg(initval=0) last_valid = self.m.TmpReg(initval=0) data_ready = self.m.TmpWire() last_ready = self.m.TmpWire() data_ready.assign(1) last_ready.assign(1) data_ack = vtypes.Ors(data_ready, vtypes.Not(data_valid)) last_ack = vtypes.Ors(last_ready, vtypes.Not(last_valid)) ext_cond = dtypes.make_condition(cond) data_cond = dtypes.make_condition(data_ack, last_ack) prev_data_cond = self.seq.Prev(data_cond, 1) data_list = [self.m.TmpWireLike(ram.interfaces[port].rdata, signed=True) for ram in self.rams] prev_data_list = [self.seq.Prev(data, 1) for data in data_list] for data, prev_data, ram in zip(data_list, prev_data_list, self.rams): data.assign(vtypes.Mux(prev_data_cond, ram.interfaces[port].rdata, prev_data)) log_numbanks = util.log2(self.numbanks) reg_addr = self.m.TmpReg(self.addrwidth + log_numbanks, initval=0) next_addr = self.m.TmpWire(self.addrwidth + log_numbanks) next_addr.assign(reg_addr + stride) ram_addr_list = [self.m.TmpWire(ram.addrwidth) for ram in self.rams] for ram_addr in ram_addr_list: ram_addr.assign(next_addr >> log_numbanks) bank_sel = self.m.TmpWire(log_numbanks) bank_sel.assign(reg_addr[0:log_numbanks]) reg_bank_sel = self.m.TmpReg(log_numbanks, initval=0) prev_reg_bank_sel = self.seq.Prev(reg_bank_sel, 1) self.seq.If(data_cond)( reg_bank_sel(bank_sel) ) patterns = [(reg_bank_sel == i, data) for i, data in enumerate(data_list)] patterns.append((None, 0)) prev_patterns = [(prev_reg_bank_sel == i, data) for i, data in enumerate(prev_data_list)] prev_patterns.append((None, 0)) data = self.m.TmpWire(self.orig_datawidth, signed=True) data.assign(vtypes.Mux(prev_data_cond, vtypes.PatternMux(*patterns), vtypes.PatternMux(*prev_patterns))) next_valid_on = self.m.TmpReg(initval=0) next_valid_off = self.m.TmpReg(initval=0) next_last = self.m.TmpReg(initval=0) last = self.m.TmpReg(initval=0) counter = self.m.TmpReg(length.bit_length() + 1, initval=0) self.seq.If(data_cond, next_valid_off)( last(0), data_valid(0), last_valid(0), next_valid_off(0) ) self.seq.If(data_cond, next_valid_on)( data_valid(1), last_valid(1), last(next_last), next_last(0), next_valid_on(0), next_valid_off(1) ) self.seq.If(ext_cond, counter == 0, vtypes.Not(next_last), vtypes.Not(last))( reg_addr(addr), counter(length - 1), next_valid_on(1), next_last(length == 1) ) for ram in self.rams: ram.seq.If(ext_cond, counter == 0, vtypes.Not(next_last), vtypes.Not(last))( ram.interfaces[port].addr(addr >> log_numbanks) ) self.seq.If(data_cond, counter > 0)( reg_addr(reg_addr + stride), counter.dec(), next_valid_on(1), next_last(0) ) for ram, ram_addr in zip(self.rams, ram_addr_list): ram.seq.If(data_cond, counter > 0)( ram.interfaces[port].addr(ram_addr) ) self.seq.If(data_cond, counter == 1)( next_last(1) ) df = self.df if self.df is not None else dataflow df_data = df.Variable(data, data_valid, data_ready, width=self.orig_datawidth, point=point, signed=signed) df_last = df.Variable(last, last_valid, last_ready, width=1) done = last return df_data, df_last, done def read_dataflow_pattern_interleave(self, port, addr, pattern, cond=None, point=0, signed=True): """ @return data, last, done """ if self.seq is None: self.seq = Seq(self.m, self.name, self.clk, self.rst) if not isinstance(pattern, (tuple, list)): raise TypeError('pattern must be list or tuple.') if not pattern: raise ValueError( 'pattern must have one (size, stride) pair at least.') if not isinstance(pattern[0], (tuple, list)): pattern = (pattern,) data_valid = self.m.TmpReg(initval=0) last_valid = self.m.TmpReg(initval=0) data_ready = self.m.TmpWire() last_ready = self.m.TmpWire() data_ready.assign(1) last_ready.assign(1) data_ack = vtypes.Ors(data_ready, vtypes.Not(data_valid)) last_ack = vtypes.Ors(last_ready, vtypes.Not(last_valid)) ext_cond = dtypes.make_condition(cond) data_cond = dtypes.make_condition(data_ack, last_ack) prev_data_cond = self.seq.Prev(data_cond, 1) data_list = [self.m.TmpWireLike(ram.interfaces[port].rdata, signed=True) for ram in self.rams] prev_data_list = [self.seq.Prev(data, 1) for data in data_list] for data, prev_data, ram in zip(data_list, prev_data_list, self.rams): data.assign(vtypes.Mux(prev_data_cond, ram.interfaces[port].rdata, prev_data)) log_numbanks = util.log2(self.numbanks) reg_addr = self.m.TmpReg(self.addrwidth + log_numbanks, initval=0) bank_sel = self.m.TmpWire(log_numbanks) bank_sel.assign(reg_addr[0:log_numbanks]) reg_bank_sel = self.m.TmpReg(log_numbanks, initval=0) prev_reg_bank_sel = self.seq.Prev(reg_bank_sel, 1) self.seq.If(data_cond)( reg_bank_sel(bank_sel) ) patterns = [(reg_bank_sel == i, data) for i, data in enumerate(data_list)] patterns.append((None, 0)) prev_patterns = [(prev_reg_bank_sel == i, data) for i, data in enumerate(prev_data_list)] prev_patterns.append((None, 0)) data = self.m.TmpWire(self.orig_datawidth, signed=True) data.assign(vtypes.Mux(prev_data_cond, vtypes.PatternMux(*patterns), vtypes.PatternMux(*prev_patterns))) next_valid_on = self.m.TmpReg(initval=0) next_valid_off = self.m.TmpReg(initval=0) next_last = self.m.TmpReg(initval=0) last = self.m.TmpReg(initval=0) running = self.m.TmpReg(initval=0) next_addr = self.m.TmpWire(self.addrwidth + log_numbanks) offset_addr = self.m.TmpWire(self.addrwidth + log_numbanks) offsets = [self.m.TmpReg(self.addrwidth + log_numbanks, initval=0) for _ in pattern[1:]] ram_addr_list = [self.m.TmpWire(ram.addrwidth) for ram in self.rams] for ram_addr in ram_addr_list: ram_addr.assign(next_addr >> log_numbanks) offset_addr_value = addr for offset in offsets: offset_addr_value = offset + offset_addr_value offset_addr.assign(offset_addr_value) offsets.insert(0, None) count_list = [self.m.TmpReg(out_size.bit_length() + 1, initval=0) for (out_size, out_stride) in pattern] self.seq.If(data_cond, next_valid_off)( last(0), data_valid(0), last_valid(0), next_valid_off(0) ) self.seq.If(data_cond, next_valid_on)( data_valid(1), last_valid(1), last(next_last), next_last(0), next_valid_on(0), next_valid_off(1) ) self.seq.If(ext_cond, vtypes.Not(running), vtypes.Not(next_last), vtypes.Not(last))( reg_addr(addr), running(1), next_valid_on(1) ) for ram in self.rams: ram.seq.If(ext_cond, vtypes.Not(running), vtypes.Not(next_last), vtypes.Not(last))( ram.interfaces[port].addr(addr >> log_numbanks) ) self.seq.If(data_cond, running)( reg_addr(next_addr), next_valid_on(1), next_last(0) ) for ram in self.rams: ram.seq.If(data_cond, running)( ram.interfaces[port].addr(ram_addr) ) update_count = None update_offset = None update_addr = None last_one = None stride_value = None carry = None for offset, count, (out_size, out_stride) in zip(offsets, count_list, pattern): self.seq.If(ext_cond, vtypes.Not(running), vtypes.Not(next_last), vtypes.Not(last))( count(out_size - 1) ) self.seq.If(data_cond, running, update_count)( count.dec() ) self.seq.If(data_cond, running, update_count, count == 0)( count(out_size - 1) ) if offset is not None: self.seq.If(ext_cond, vtypes.Not(running), vtypes.Not(next_last), vtypes.Not(last))( offset(0) ) self.seq.If(data_cond, running, update_offset, vtypes.Not(carry))( offset(offset + out_stride) ) self.seq.If(data_cond, running, update_offset, count == 0)( offset(0) ) if update_count is None: update_count = count == 0 else: update_count = vtypes.Ands(update_count, count == 0) if update_offset is None: update_offset = vtypes.Mux(out_size == 1, 1, count == 1) else: update_offset = vtypes.Ands(update_offset, count == carry) if update_addr is None: update_addr = count == 0 else: update_addr = vtypes.Mux(carry, count == 0, update_addr) if last_one is None: last_one = count == 0 else: last_one = vtypes.Ands(last_one, count == 0) if stride_value is None: stride_value = out_stride else: stride_value = vtypes.Mux(carry, out_stride, stride_value) if carry is None: carry = out_size == 1 else: carry = vtypes.Ands(carry, out_size == 1) next_addr.assign(vtypes.Mux(update_addr, offset_addr, reg_addr + stride_value)) self.seq.If(data_cond, running, last_one)( running(0), next_last(1) ) df = self.df if self.df is not None else dataflow df_data = df.Variable(data, data_valid, data_ready, width=self.datawidth, point=point, signed=signed) df_last = df.Variable(last, last_valid, last_ready, width=1) done = last return df_data, df_last, done def read_dataflow_multidim_interleave(self, port, addr, shape, order=None, cond=None, point=0, signed=True): """ @return data, last, done """ if order is None: order = list(reversed(range(len(shape)))) pattern = self._to_pattern(shape, order) return self.read_dataflow_pattern_interleave(port, addr, pattern, cond=cond, point=point, signed=signed) def read_dataflow_block(self, port, addr, length=1, block_size=1, stride=1, cond=None, point=0, signed=True): """ @return data, last, done """ if self.keep_hierarchy and isinstance(self.rams[0], MultibankRAM): return self._read_dataflow_block_nested(port, addr, length, block_size, stride, cond, point, signed) if self.seq is None: self.seq = Seq(self.m, self.name, self.clk, self.rst) data_valid = self.m.TmpReg(initval=0) last_valid = self.m.TmpReg(initval=0) data_ready = self.m.TmpWire() last_ready = self.m.TmpWire() data_ready.assign(1) last_ready.assign(1) data_ack = vtypes.Ors(data_ready, vtypes.Not(data_valid)) last_ack = vtypes.Ors(last_ready, vtypes.Not(last_valid)) ext_cond = dtypes.make_condition(cond) data_cond = dtypes.make_condition(data_ack, last_ack) prev_data_cond = self.seq.Prev(data_cond, 1) data_list = [self.m.TmpWireLike(ram.interfaces[port].rdata, signed=True) for ram in self.rams] prev_data_list = [self.seq.Prev(data, 1) for data in data_list] for data, prev_data, ram in zip(data_list, prev_data_list, self.rams): data.assign(vtypes.Mux(prev_data_cond, ram.interfaces[port].rdata, prev_data)) log_numbanks = util.log2(self.numbanks) reg_addr_list = [self.m.TmpReg(self.addrwidth, initval=0) for ram in self.rams] next_addr_list = [self.m.TmpWire(self.addrwidth) for ram in self.rams] for next_addr, reg_addr in zip(next_addr_list, reg_addr_list): next_addr.assign(reg_addr + stride) ram_addr_list = [self.m.TmpWire(ram.addrwidth) for ram in self.rams] for ram_addr, next_addr in zip(ram_addr_list, next_addr_list): ram_addr.assign(next_addr) bank_sel = self.m.TmpReg(log_numbanks, initval=0) reg_bank_sel = self.m.TmpReg(log_numbanks, initval=0) prev_reg_bank_sel = self.seq.Prev(reg_bank_sel, 1) self.seq.If(data_cond)( reg_bank_sel(bank_sel) ) patterns = [(reg_bank_sel == i, data) for i, data in enumerate(data_list)] patterns.append((None, 0)) prev_patterns = [(prev_reg_bank_sel == i, data) for i, data in enumerate(prev_data_list)] prev_patterns.append((None, 0)) data = self.m.TmpWire(self.orig_datawidth, signed=True) data.assign(vtypes.Mux(prev_data_cond, vtypes.PatternMux(*patterns), vtypes.PatternMux(*prev_patterns))) next_valid_on = self.m.TmpReg(initval=0) next_valid_off = self.m.TmpReg(initval=0) next_last = self.m.TmpReg(initval=0) last = self.m.TmpReg(initval=0) block_counter = self.m.TmpReg(block_size.bit_length() + 1, initval=0) counter = self.m.TmpReg(length.bit_length() + 1, initval=0) self.seq.If(data_cond, next_valid_off)( last(0), data_valid(0), last_valid(0), next_valid_off(0) ) self.seq.If(data_cond, next_valid_on)( data_valid(1), last_valid(1), last(next_last), next_last(0), next_valid_on(0), next_valid_off(1) ) self.seq.If(ext_cond, counter == 0, vtypes.Not(next_last), vtypes.Not(last))( bank_sel(0), reg_bank_sel(0), block_counter(block_size - 1), counter(length - 1), next_valid_on(1), next_last(length == 1) ) for reg_addr in reg_addr_list: self.seq.If(ext_cond, counter == 0, vtypes.Not(next_last), vtypes.Not(last))( reg_addr(addr) ) for ram in self.rams: ram.seq.If(ext_cond, counter == 0, vtypes.Not(next_last), vtypes.Not(last))( ram.interfaces[port].addr(addr) ) self.seq.If(data_cond, counter > 0)( block_counter.dec(), counter.dec(), next_valid_on(1), next_last(0) ) self.seq.If(data_cond, counter > 0, block_counter == 0)( block_counter(block_size - 1), bank_sel.inc() ) self.seq.If(data_cond, counter > 0, block_counter == 0, bank_sel == self.numbanks - 1)( bank_sel(0) ) for i, (reg_addr, next_addr) in enumerate(zip(reg_addr_list, next_addr_list)): self.seq.If(data_cond, counter > 0, bank_sel == i)( reg_addr(next_addr) ) for i, (ram, ram_addr) in enumerate(zip(self.rams, ram_addr_list)): ram.seq.If(data_cond, counter > 0, bank_sel == i)( ram.interfaces[port].addr(ram_addr) ) self.seq.If(data_cond, counter == 1)( next_last(1) ) df = self.df if self.df is not None else dataflow df_data = df.Variable(data, data_valid, data_ready, width=self.orig_datawidth, point=point, signed=signed) df_last = df.Variable(last, last_valid, last_ready, width=1) done = last return df_data, df_last, done def _read_dataflow_block_nested(self, port, addr, length=1, block_size=1, stride=1, cond=None, point=0, signed=True): """ @return data, last, done """ len_rams = 0 for ram in self.rams: if not isinstance(ram, MultibankRAM): raise TypeError('All sub-bank RAMs must be MultibankRAM.') if len_rams == 0: len_rams = len(ram.rams) elif len_rams != len(ram.rams): raise ValueError( 'All sub-bank RAMs must have the same number of RAMs.') rams = [[] for i in range(len_rams)] for ram in self.rams: for i, sub in enumerate(ram.rams): rams[i].append(sub) rams = [to_multibank_ram(ram_list, keep_hierarchy=True) for ram_list in rams] data_list = [] last_list = [] done_list = [] for ram in rams: data, last, done = ram.read_dataflow_block( port, addr, length, block_size, stride, cond, point, signed) data_list.insert(0, data) last_list.insert(0, last) done_list.insert(0, done) merged_data = dtypes.Cat(*data_list) merged_last = last_list[-1] merged_done = done_list[-1] return merged_data, merged_last, merged_done def write_dataflow(self, port, addr, data, length=1, stride=1, cond=None, when=None): """ @return done """ done_list = [] lsb = 0 msb = 0 for ram in self.rams: msb = msb + ram.datawidth bank_data = dtypes.Slice(data, msb - 1, lsb) done = ram.write_dataflow( port, addr, bank_data, length, stride, cond, when) done_list.append(done) lsb = msb merged_done = done_list[0] return merged_done def write_dataflow_interleave(self, port, addr, data, length=1, stride=1, cond=None, when=None): """ @return done 'data' and 'when' must be dataflow variables """ if self.seq is None: self.seq = Seq(self.m, self.name, self.clk, self.rst) for ram in self.rams: if ram._write_disabled[port]: raise TypeError('Write disabled.') counter = self.m.TmpReg(length.bit_length() + 1, initval=0) last = self.m.TmpReg(initval=0) ext_cond = dtypes.make_condition(cond) data_cond = dtypes.make_condition(counter > 0, vtypes.Not(last)) if when is None or not isinstance(when, dtypes._Numeric): raw_data, raw_valid = data.read(cond=data_cond) else: data_list, raw_valid = dtypes.read_multi( self.m, data, when, cond=data_cond) raw_data = data_list[0] when = data_list[1] when_cond = dtypes.make_condition(when, ready=data_cond) if when_cond is not None: raw_valid = vtypes.Ands(when_cond, raw_valid) log_numbanks = util.log2(self.numbanks) reg_addr = self.m.TmpReg(self.addrwidth + log_numbanks, initval=0) next_addr = self.m.TmpWire(self.addrwidth + log_numbanks) next_addr.assign(reg_addr + stride) ram_addr_list = [self.m.TmpWire(ram.addrwidth) for ram in self.rams] for ram_addr in ram_addr_list: ram_addr.assign(next_addr >> log_numbanks) bank_sel = self.m.TmpWire(log_numbanks) bank_sel.assign(next_addr) self.seq.If(ext_cond, counter == 0)( reg_addr(addr - stride), counter(length), ) self.seq.If(raw_valid, counter > 0)( reg_addr(next_addr), counter.dec() ) for i, (ram, ram_addr) in enumerate(zip(self.rams, ram_addr_list)): ram.seq.If(raw_valid, counter > 0)( ram.interfaces[port].addr(ram_addr), ram.interfaces[port].wdata(raw_data), ram.interfaces[port].wenable(bank_sel == i) ) self.seq.If(raw_valid, counter == 1)( last(1) ) # de-assert self.seq.Delay(1)( last(0) ) for ram in self.rams: ram.seq.Delay(1)( ram.interfaces[port].wenable(0) ) done = last return done def write_dataflow_pattern_interleave(self, port, addr, data, pattern, cond=None, when=None): """ @return done 'data' and 'when' must be dataflow variables """ if self.seq is None: self.seq = Seq(self.m, self.name, self.clk, self.rst) for ram in self.rams: if ram._write_disabled[port]: raise TypeError('Write disabled.') if not isinstance(pattern, (tuple, list)): raise TypeError('pattern must be list or tuple.') if not pattern: raise ValueError( 'pattern must have one (size, stride) pair at least.') if not isinstance(pattern[0], (tuple, list)): pattern = (pattern,) last = self.m.TmpReg(initval=0) running = self.m.TmpReg(initval=0) ext_cond = dtypes.make_condition(cond) data_cond = dtypes.make_condition(running, vtypes.Not(last)) if when is None or not isinstance(when, dtypes._Numeric): raw_data, raw_valid = data.read(cond=data_cond) else: data_list, raw_valid = dtypes.read_multi( self.m, data, when, cond=data_cond) raw_data = data_list[0] when = data_list[1] when_cond = dtypes.make_condition(when, ready=data_cond) if when_cond is not None: raw_valid = vtypes.Ands(when_cond, raw_valid) offset_addr = self.m.TmpWire(self.addrwidth) offsets = [self.m.TmpReg(self.addrwidth, initval=0) for _ in pattern] offset_addr_value = addr for offset in offsets: offset_addr_value = offset + offset_addr_value offset_addr.assign(offset_addr_value) log_numbanks = util.log2(self.numbanks) ram_addr_list = [self.m.TmpWire(ram.addrwidth) for ram in self.rams] for ram_addr in ram_addr_list: ram_addr.assign(offset_addr >> log_numbanks) bank_sel = self.m.TmpWire(log_numbanks) bank_sel.assign(offset_addr) count_list = [self.m.TmpReg(out_size.bit_length() + 1, initval=0) for (out_size, out_stride) in pattern] self.seq.If(ext_cond, vtypes.Not(running))( running(1) ) for i, (ram, ram_addr) in enumerate(zip(self.rams, ram_addr_list)): ram.seq.If(raw_valid, running)( ram.interfaces[port].addr(ram_addr), ram.interfaces[port].wdata(raw_data), ram.interfaces[port].wenable(bank_sel == i) ) update_count = None last_one = None for offset, count, (out_size, out_stride) in zip(offsets, count_list, pattern): self.seq.If(ext_cond, vtypes.Not(running))( count(out_size - 1), offset(0) ) self.seq.If(raw_valid, running, update_count)( count.dec(), offset(offset + out_stride) ) self.seq.If(raw_valid, running, update_count, count == 0)( count(out_size - 1), offset(0) ) if update_count is None: update_count = count == 0 else: update_count = vtypes.Ands(update_count, count == 0) if last_one is None: last_one = count == 0 else: last_one = vtypes.Ands(last_one, count == 0) self.seq.If(raw_valid, last_one)( running(0), last(1) ) # de-assert self.seq.Delay(1)( last(0) ) for ram in self.rams: ram.seq.Delay(1)( ram.interfaces[port].wenable(0) ) done = last return done def write_dataflow_multidim_interleave(self, port, addr, data, shape, order=None, cond=None, when=None): """ @return done 'data' and 'when' must be dataflow variables """ if order is None: order = list(reversed(range(len(shape)))) pattern = self._to_pattern(shape, order) return self.write_dataflow_pattern_interleave(port, addr, data, pattern, cond=cond, when=when) def write_dataflow_bcast(self, port, addr, data, length=1, stride=1, cond=None, when=None): """ @return done """ done_list = [] for ram in self.rams: done = ram.write_dataflow( port, addr, data, length, stride, cond, when) done_list.append(done) merged_done = done_list[0] return merged_done def write_dataflow_pattern_bcast(self, port, addr, data, pattern, cond=None, when=None): """ @return done 'data' and 'when' must be dataflow variables """ done_list = [] for ram in self.rams: done = ram.write_dataflow_pattern( port, addr, data, pattern, cond, when) done_list.append(done) merged_done = done_list[0] return merged_done def write_dataflow_multidim_bcast(self, port, addr, data, shape, order=None, cond=None, when=None): """ @return done 'data' and 'when' must be dataflow variables """ if order is None: order = list(reversed(range(len(shape)))) pattern = self._to_pattern(shape, order) return self.write_dataflow_pattern_bcast(port, addr, data, pattern, cond=cond, when=when) def write_dataflow_block(self, port, addr, data, length=1, block_size=1, stride=1, cond=None, when=None): """ @return done 'data' and 'when' must be dataflow variables """ if self.keep_hierarchy and isinstance(self.rams[0], MultibankRAM): return self._write_dataflow_block_nested(port, addr, data, length, block_size, stride, cond, when) if self.seq is None: self.seq = Seq(self.m, self.name, self.clk, self.rst) for ram in self.rams: if ram._write_disabled[port]: raise TypeError('Write disabled.') block_counter = self.m.TmpReg(block_size.bit_length() + 1, initval=0) counter = self.m.TmpReg(length.bit_length() + 1, initval=0) last = self.m.TmpReg(initval=0) ext_cond = dtypes.make_condition(cond) data_cond = dtypes.make_condition(counter > 0, vtypes.Not(last)) if when is None or not isinstance(when, dtypes._Numeric): raw_data, raw_valid = data.read(cond=data_cond) else: data_list, raw_valid = dtypes.read_multi( self.m, data, when, cond=data_cond) raw_data = data_list[0] when = data_list[1] when_cond = dtypes.make_condition(when, ready=data_cond) if when_cond is not None: raw_valid = vtypes.Ands(when_cond, raw_valid) log_numbanks = util.log2(self.numbanks) reg_addr_list = [self.m.TmpReg(self.addrwidth, initval=0) for ram in self.rams] next_addr_list = [self.m.TmpWire(self.addrwidth) for ram in self.rams] for next_addr, reg_addr in zip(next_addr_list, reg_addr_list): next_addr.assign(reg_addr + stride) ram_addr_list = [self.m.TmpWire(ram.addrwidth) for ram in self.rams] for ram_addr, next_addr in zip(ram_addr_list, next_addr_list): ram_addr.assign(next_addr) bank_sel = self.m.TmpReg(log_numbanks, initval=0) self.seq.If(ext_cond, counter == 0)( bank_sel(0), block_counter(block_size - 1), counter(length), ) for reg_addr in reg_addr_list: self.seq.If(ext_cond, counter == 0)( reg_addr(addr - stride) ) self.seq.If(raw_valid, counter > 0)( block_counter.dec(), counter.dec() ) self.seq.If(raw_valid, counter > 0, block_counter == 0)( block_counter(block_size - 1), bank_sel.inc() ) self.seq.If(raw_valid, counter > 0, block_counter == 0, bank_sel == self.numbanks - 1)( bank_sel(0) ) for i, (reg_addr, next_addr) in enumerate(zip(reg_addr_list, next_addr_list)): self.seq.If(raw_valid, counter > 0, bank_sel == i)( reg_addr(next_addr) ) for i, (ram, ram_addr) in enumerate(zip(self.rams, ram_addr_list)): ram.seq.If(raw_valid, counter > 0)( ram.interfaces[port].addr(ram_addr), ram.interfaces[port].wdata(raw_data), ram.interfaces[port].wenable(bank_sel == i) ) self.seq.If(raw_valid, counter == 1)( last(1) ) # de-assert self.seq.Delay(1)( last(0) ) for ram in self.rams: ram.seq.Delay(1)( ram.interfaces[port].wenable(0) ) done = last return done def _write_dataflow_block_nested(self, port, addr, data, length=1, block_size=1, stride=1, cond=None, when=None): """ @return done 'data' and 'when' must be dataflow variables """ len_rams = 0 for ram in self.rams: if not isinstance(ram, MultibankRAM): raise TypeError('All sub-bank RAMs must be MultibankRAM.') if len_rams == 0: len_rams = len(ram.rams) elif len_rams != len(ram.rams): raise ValueError( 'All sub-bank RAMs must have the same number of RAMs.') rams = [[] for i in range(len_rams)] for ram in self.rams: for i, sub in enumerate(ram.rams): rams[i].append(sub) rams = [to_multibank_ram(ram_list, keep_hierarchy=True) for ram_list in rams] done_list = [] lsb = 0 msb = 0 for ram in rams: msb = msb + ram.orig_datawidth bank_data = dtypes.Slice(data, msb - 1, lsb) done = ram.write_dataflow_block( port, addr, bank_data, length, block_size, stride, cond, when) done_list.append(done) lsb = msb merged_done = done_list[0] return merged_done class _PackedMultibankRAM(MultibankRAM): def __init__(self, src=None, name=None, keep_hierarchy=False): if not isinstance(src, (tuple, list)): src = [src] if not keep_hierarchy: src = extract_rams(src) if len(src) < 2: raise ValueError('numbanks must be 2 or more') max_datawidth = 0 for ram in src: max_datawidth = max(max_datawidth, ram.datawidth) max_addrwidth = 0 for ram in src: max_addrwidth = max(max_addrwidth, ram.addrwidth) max_numports = src[0].numports for ram in src[1:]: if max_numports != ram.numports: raise ValueError('numports must be same') self.m = src[0].m self.name = ('_'.join([ram.name for ram in src]) if name is None else name) self.clk = src[0].clk self.rst = src[0].rst self.orig_datawidth = max_datawidth self.datawidth = max_datawidth * len(src) self.addrwidth = max_addrwidth self.numports = max_numports self.numbanks = len(src) self.shift = util.log2(self.numbanks) self.rams = src self.keep_hierarchy = keep_hierarchy self.seq = None for ram in self.rams: if ram.seq is not None: self.seq = ram.seq break self.df = DataflowManager(self.m, self.clk, self.rst) # key: (axi._id(), port, ram_method_name) self.cache_dma_reqs = {} self.mutex = None multibank_ram_cache = {} def to_multibank_ram(rams, name=None, keep_hierarchy=False): ids = tuple([ram._id() for ram in rams]) if ids in multibank_ram_cache: return multibank_ram_cache[ids] ram = _PackedMultibankRAM(rams, name, keep_hierarchy) multibank_ram_cache[ids] = ram return ram
32.607184
91
0.541772
10,568
85,333
4.177044
0.025454
0.019482
0.022676
0.020796
0.921211
0.898806
0.884942
0.863149
0.853182
0.834968
0
0.01262
0.354611
85,333
2,616
92
32.619648
0.788917
0.018879
0
0.778299
0
0
0.017786
0.000253
0
0
0
0
0
1
0.035472
false
0
0.007825
0.001565
0.074074
0.000522
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
0e1a112dea17289dd592d1f776f59cac3a969b47
8,680
py
Python
src/models/model_anomary_detection.py
koonn/bunseki
deb397e40a02bb709825c70c9be81f54449ac195
[ "BSD-3-Clause" ]
null
null
null
src/models/model_anomary_detection.py
koonn/bunseki
deb397e40a02bb709825c70c9be81f54449ac195
[ "BSD-3-Clause" ]
null
null
null
src/models/model_anomary_detection.py
koonn/bunseki
deb397e40a02bb709825c70c9be81f54449ac195
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """異常検知系のモデルを記載するモジュール Absモデルを継承したモデルを作成する - One-class SVM - LOF - IsolationForest TODO: - OCSVM, LOFがまだ動作確認できていない """ import os import joblib from sklearn.svm import OneClassSVM from sklearn.neighbors import LocalOutlierFactor from sklearn.ensemble import IsolationForest from sklearn.preprocessing import StandardScaler import config from .interface import AbsModel from .util import scale_scores class ModelOCSVM(AbsModel): """One-class SVMのモデルクラス 特徴量を標準化して、One-class SVMで異常度スコアを算出するモデル Attributes: run_name(str): 実行の名前とfoldの番号を組み合わせた名前 params(dict): ハイパーパラメータ features_to_scale(Optional[List[str]]): スケール対象の特徴量を指定する model(Model): train後に学習済みモデルを保持. trainを実行するまでは、初期値のNoneをかえす. scaler(Model): train後に学習済みスケーラーを保持. trainを実行するまでは、初期値のNoneをかえす. """ def __init__(self, params, features_to_scale=None): super().__init__(params) self.features_to_scale = features_to_scale self.scaler = None def train(self, train_x, train_y=None, valid_x=None, valid_y=None): """モデルの学習を行う関数 Args: train_x(pd.DataFrame of [n_samples, n_features]): 学習データの特徴量 train_y(1-D array-like shape of [n_samples]): 学習データのラベル配列. 教師なしモデルのためtrain_yは受け取るが使用しない valid_x(array-like shape of [n_samples, n_features]): バリデーションデータの特徴量 valid_y(1-D array-like shape of [n_samples]): バリデーションデータのラベル配列 Notes: 教師なしモデルのためtrain_yは受け取るが使用しない 教師ありモデルと同じtrain.pyで実行できるよう、train_yは引数として受け取っている TODO: - 少なくともモデル側ではtrain_yを使わないことを明示するため、**kwargsで書き換える(書き換えられるか試す) """ # データのスケーリング # スケールするカラムを指定 if self.features_to_scale is None: self.features_to_scale = train_x.columns # スケーラを作成 scaler = StandardScaler() scaler.fit(train_x[self.features_to_scale]) # スケーリングを実行 train_x.loc[:, self.features_to_scale] = scaler.transform(train_x[self.features_to_scale]) # モデルの構築・学習 model = OneClassSVM(**self.params) model = model.fit(train_x) # モデル・スケーラーを保持する self.model = model self.scaler = scaler def predict(self, x): """異常度スコアを算出する関数""" # スケールするカラムを指定 if self.features_to_scale is None: self.features_to_scale = x.columns # xの前処理の変換 x.loc[:, self.features_to_scale] = self.scaler.transform(x[self.features_to_scale]) # 異常度スコアの算出 scores = self.model.score_samples(x) # スコアを0-1で1の方が異常になるように変換 scaled_scores = scale_scores(scores, is_reversed=True) return scaled_scores def save_model(self): """モデルを保存する関数""" # パスの設定 model_dir = os.path.join(config.MODEL_OUTPUT_DIR, 'ocsvm') model_path = os.path.join(model_dir, f'{self.run_name}-model.pkl') scaler_path = os.path.join(model_dir, f'{self.run_name}-scaler.pkl') # 保存先のディレクトリがなければ作成 os.makedirs(model_dir, exist_ok=True) # モデル・スケーラーの保存 joblib.dump(self.model, model_path) joblib.dump(self.scaler, scaler_path) def load_model(self): """モデルを読み込む関数""" model_dir = os.path.join(config.MODEL_OUTPUT_DIR, 'ocsvm') model_path = os.path.join(model_dir, f'{self.run_name}-model.pkl') scaler_path = os.path.join(model_dir, f'{self.run_name}-scaler.pkl') self.model = joblib.load(model_path) self.scaler = joblib.load(scaler_path) class ModelLOF(AbsModel): """LOFのモデルクラス 特徴量を標準化して、One-class SVMで異常度スコアを算出するモデル Attributes: run_name(str): 実行の名前とfoldの番号を組み合わせた名前 params(dict): ハイパーパラメータ features_to_scale(Optional[List[str]]): スケール対象の特徴量を指定する model(Model): train後に学習済みモデルを保持. trainを実行するまでは、初期値のNoneをかえす. scaler(Model): train後に学習済みスケーラーを保持. trainを実行するまでは、初期値のNoneをかえす. """ def __init__(self, params, features_to_scale=None): super().__init__(params) self.features_to_scale = features_to_scale self.scaler = None def train(self, train_x, train_y=None, valid_x=None, valid_y=None): """モデルの学習を行う関数 LOFでは、事前にモデルを学習するのではなく、fit_transformでそのデータセット内での異常度を計算することになるので、 trainではスケーラーだけを学習するようにする Args: train_x(pd.DataFrame of [n_samples, n_features]): 学習データの特徴量 train_y(1-D array-like shape of [n_samples]): 学習データのラベル配列. 教師なしモデルのためtrain_yは受け取るが使用しない valid_x(array-like shape of [n_samples, n_features]): バリデーションデータの特徴量 valid_y(1-D array-like shape of [n_samples]): バリデーションデータのラベル配列 Notes: 教師なしモデルのためtrain_yは受け取るが使用しない 教師ありモデルと同じtrain.pyで実行できるよう、train_yは引数として受け取っている TODO: - 少なくともモデル側ではtrain_yを使わないことを明示するため、**kwargsで書き換える(書き換えられるか試す) - 今のtrain.pyとの組み合わせだとvalidごとに学習することになっているので、allで学習できるように修正 - save_modelのモデルの方は意味ないのでどうするか検討する """ # データのスケーリング # スケールするカラムを指定 if self.features_to_scale is None: self.features_to_scale = train_x.columns # スケーラを作成 scaler = StandardScaler() scaler.fit(train_x[self.features_to_scale]) # スケーリングを実行 train_x.loc[:, self.features_to_scale] = scaler.transform(train_x[self.features_to_scale]) # モデルインスタンスを作成(学習はしない) model = LocalOutlierFactor(**self.params) # モデル・スケーラーを保持する self.model = model self.scaler = scaler def predict(self, x): """異常度スコアを算出する関数""" # スケールするカラムを指定 if self.features_to_scale is None: self.features_to_scale = x.columns # xの前処理の変換 x.loc[:, self.features_to_scale] = self.scaler.transform(x[self.features_to_scale]) # モデルの学習と、異常度スコアの算出 self.model.fit(x) scores = self.model.negative_outlier_factor_ # スコアを0-1で1の方が異常になるように変換 scaled_scores = scale_scores(scores, is_reversed=True) return scaled_scores def save_model(self): """モデルを保存する関数""" # パスの設定 model_dir = os.path.join(config.MODEL_OUTPUT_DIR, 'lof') model_path = os.path.join(model_dir, f'{self.run_name}-model.pkl') scaler_path = os.path.join(model_dir, f'{self.run_name}-scaler.pkl') # 保存先のディレクトリがなければ作成 os.makedirs(model_dir, exist_ok=True) # モデル・スケーラーの保存 joblib.dump(self.model, model_path) joblib.dump(self.scaler, scaler_path) def load_model(self): """モデルを読み込む関数""" model_dir = os.path.join(config.MODEL_OUTPUT_DIR, 'lof') model_path = os.path.join(model_dir, f'{self.run_name}-model.pkl') scaler_path = os.path.join(model_dir, f'{self.run_name}-scaler.pkl') self.model = joblib.load(model_path) self.scaler = joblib.load(scaler_path) class ModelIsolationForest(AbsModel): """One-class SVMのモデルクラス 特徴量をIsolationForestにかける """ def __init__(self, params): super().__init__(params) def train(self, train_x, train_y=None, valid_x=None, valid_y=None): """モデルの学習を行う関数 Args: train_x(pd.DataFrame of [n_samples, n_features]): 学習データの特徴量 train_y(1-D array-like shape of [n_samples]): 学習データのラベル配列. 教師なしモデルのためtrain_yは受け取るが使用しない valid_x(array-like shape of [n_samples, n_features]): バリデーションデータの特徴量 valid_y(1-D array-like shape of [n_samples]): バリデーションデータのラベル配列 Notes: 教師なしモデルのためtrain_yは受け取るが使用しない 教師ありモデルと同じtrain.pyで実行できるよう、train_yは引数として受け取っている TODO: - 少なくともモデル側ではtrain_yを使わないことを明示するため、**kwargsで書き換える(書き換えられるか試す) """ # モデルの構築・学習 model = IsolationForest(**self.params) model = model.fit(train_x) # モデル・スケーラーを保持する self.model = model def predict(self, x): """異常度スコアを算出する関数""" # 異常度スコアの算出 scores = self.model.score_samples(x) # スコアを0-1で1の方が異常になるように変換 scaled_scores = scale_scores(scores, is_reversed=True) return scaled_scores def save_model(self): """モデルを保存する関数""" # パスの設定 model_dir = os.path.join(config.MODEL_OUTPUT_DIR, 'if') model_path = os.path.join(model_dir, f'{self.run_name}-model.pkl') # 保存先のディレクトリがなければ作成 os.makedirs(model_dir, exist_ok=True) # モデルの保存 joblib.dump(self.model, model_path) def load_model(self): """モデルを読み込む関数""" model_dir = os.path.join(config.MODEL_OUTPUT_DIR, 'if') model_path = os.path.join(model_dir, f'{self.run_name}-model.pkl') self.model = joblib.load(model_path)
30.56338
99
0.651843
993
8,680
5.471299
0.151057
0.047856
0.071784
0.069943
0.842076
0.836923
0.832505
0.827351
0.827351
0.818885
0
0.002452
0.248387
8,680
283
100
30.671378
0.829246
0.349309
0
0.818182
0
0
0.053183
0.049301
0
0
0
0.014134
0
1
0.151515
false
0
0.090909
0
0.30303
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
8
7ef2573468f381fcdd9b4b6d02d6f55f54dc41db
192,188
py
Python
pycatia/navigator_interfaces/n_4D_navigator_setting_att.py
evereux/catia_python
08948585899b12587b0415ce3c9191a408b34897
[ "MIT" ]
90
2019-02-21T10:05:28.000Z
2022-03-19T01:53:41.000Z
pycatia/navigator_interfaces/n_4D_navigator_setting_att.py
Luanee/pycatia
ea5eef8178f73de12404561c00baf7a7ca30da59
[ "MIT" ]
99
2019-05-21T08:29:12.000Z
2022-03-25T09:55:15.000Z
pycatia/navigator_interfaces/n_4D_navigator_setting_att.py
Luanee/pycatia
ea5eef8178f73de12404561c00baf7a7ca30da59
[ "MIT" ]
26
2019-04-04T06:31:36.000Z
2022-03-30T07:24:47.000Z
#! usr/bin/python3.6 """ Module initially auto generated using V5Automation files from CATIA V5 R28 on 2020-07-06 14:02:20.222384 .. warning:: The notes denoted "CAA V5 Visual Basic Help" are to be used as reference only. They are there as a guide as to how the visual basic / catscript functions work and thus help debugging in pycatia. """ from pycatia.system_interfaces.setting_controller import SettingController class N4DNavigatorSettingAtt(SettingController): """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | System.IUnknown | System.IDispatch | System.CATBaseUnknown | System.CATBaseDispatch | System.AnyObject | System.SettingController | N4DNavigatorSettingAtt | | Interface to handle the settings of the DMU Navigator | workbench. | | | The different settings are: | | DMUClashPreview: | Display of the preview viewer when editing an interference. | | DMUDistancePreview: | Display of the preview viewer when editing a distance. | | DMUGroupPreview: | Display of the preview viewer when editing a group. | | DMUSectionPreview: | Display of the preview viewer when editing a section. | | DMUShuttlePreview: | Display of the preview viewer when editing a shuttle. | | DMUThicknessPreview: | Display of the preview viewer for the thickness command. | | DMUOffsetPreview: | Display of the preview viewer for the offset command. | | DMUSweptVolPreview: | Display of the preview viewer for the swept volume command. | | DMUSilhouettePreview: | Display of the preview viewer for the silhouette command. | | DMUWrappingPreview: | Display of the preview viewer for the wrapping command. | | DMUFreeSpacePreview: | Display of the preview viewer for the free space command. | | DMUSimplifPreview: | Display of the preview viewer for the simplification command. | | DMUVibrationVolPreview: | Display of the preview viewer for the vibration volume | command. | | DMUCut3DPreview: | Display of the preview viewer for the 3D cut command. | | DMUMergerPreview: | Display of the preview viewer for the merger command. | | NumUrlName: | Display of the hyperlink name. | | MarkerAutoUpdate: | Update on product structure modifications and scenes | activation. | | MarkerDefaultsColor: | Default color of an annotation. | | SceneDefaultsColor: | Default background color for scene environment. | | MarkerTextColor: | Default color of a text annotation. | | MarkerDefaultsWeight: | Default weight value of an annotation. | | MarkerDefaultsDashed: | Default dashed value of an annotation. | | MarkerDefaultsSize: | Default size value of an annotation. | | MarkerDefaultsFont: | Default font of an annotation. | | MarkerTextDashed: | Default dashed value of a text annotation. | | MarkerTextWeight: | Default weight value of a text annotation. | | PublishAutoLaunchBrowser: | Automatic launching of publish results in a browser. | | Marker2DAutoNaming: | Automatically use a Part's name as the default for the creation of text | annotations. | | Marker3DAutoNaming: | Activation of the mechanism that enables to transform temporary markers into | persistent 3D annotations. | | DMUReviewName: | The desired default name for DMU Reviews | | ForceVoxel: | Force users of the Spatial Query command to use the defined Released | Accuracy. | | ClearanceVoxel: | Definition of the Clearance value. | | ForceClearanceVoxel: | Force users of the Spatial Query command to use the defined Clearance | value. | | InsertMode: | Mode for the Import applicative data command. | | DMUGroupPreviewHiddenObjectsDisplayMode: | Display mode for hidden objects of a DMU Group in its preview: visualized as in | main 3D viewer or visualized with customized graphic | properties | | DMUGroupPreviewHiddenObjectsColor: | Color for hidden objects in DMU Group Preview. | | DMUGroupPreviewHiddenObjectsOpacity: | Opacity for hidden objects in DMU Group Preview. | | DMUGroupPreviewHiddenObjectsLowIntMode: | Hidden objects are low intensified or not in DMU Group | Preview. | | DMUGroupPreviewHiddenObjectsPickMode: | Hidden objects can be picked or not in DMU Group Preview. """ def __init__(self, com_object): super().__init__(com_object) self.n_4d_navigator_setting_att = com_object @property def clearance_voxel(self) -> float: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property ClearanceVoxel() As float | | Returns or sets the clearance value (oValue the clearance value in mm). :return: float :rtype: float """ return self.n_4d_navigator_setting_att.ClearanceVoxel @clearance_voxel.setter def clearance_voxel(self, value: float): """ :param float value: """ self.n_4d_navigator_setting_att.ClearanceVoxel = value @property def dmu_clash_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUClashPreview() As boolean | | Returns or sets the preview activation state for Interference (TRUE the | preview window is automatically displayed, FALSE the preview window is not | displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUClashPreview @dmu_clash_preview.setter def dmu_clash_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUClashPreview = value @property def dmu_cut_3d_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUCut3DPreview() As boolean | | Returns or sets the preview activation state for 3D Cut (TRUE the preview | window is automatically displayed, FALSE the preview window is not displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUCut3DPreview @dmu_cut_3d_preview.setter def dmu_cut_3d_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUCut3DPreview = value @property def dmu_distance_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUDistancePreview() As boolean | | Returns or sets the preview activation state for Distance (TRUE the preview | window is automatically displayed, FALSE the preview window is not displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUDistancePreview @dmu_distance_preview.setter def dmu_distance_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUDistancePreview = value @property def dmu_free_space_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUFreeSpacePreview() As boolean | | Returns or sets the preview activation state for Free Space (TRUE the | preview window is automatically displayed, FALSE the preview window is not | displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUFreeSpacePreview @dmu_free_space_preview.setter def dmu_free_space_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUFreeSpacePreview = value @property def dmu_group_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUGroupPreview() As boolean | | Returns or sets the preview activation state for Group (TRUE the preview | window is automatically displayed, FALSE the preview window is not displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUGroupPreview @dmu_group_preview.setter def dmu_group_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUGroupPreview = value @property def dmu_group_preview_hidden_objects_display_mode(self) -> int: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUGroupPreviewHiddenObjectsDisplayMode() As | CatDMUGroupPreviewHiddenObjectsDisplayMode | | Returns or sets the mode for the display of hidden objects in DMU Group | Preview. :return: int :rtype: int """ return self.n_4d_navigator_setting_att.DMUGroupPreviewHiddenObjectsDisplayMode @dmu_group_preview_hidden_objects_display_mode.setter def dmu_group_preview_hidden_objects_display_mode(self, value: int): """ :param int value: """ self.n_4d_navigator_setting_att.DMUGroupPreviewHiddenObjectsDisplayMode = value @property def dmu_group_preview_hidden_objects_low_int(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUGroupPreviewHiddenObjectsLowInt() As boolean | | Returns or sets the Low Intensity mode for the display of hidden objects in | DMU Group Preview. :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUGroupPreviewHiddenObjectsLowInt @dmu_group_preview_hidden_objects_low_int.setter def dmu_group_preview_hidden_objects_low_int(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUGroupPreviewHiddenObjectsLowInt = value @property def dmu_group_preview_hidden_objects_opacity(self) -> int: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUGroupPreviewHiddenObjectsOpacity() As long | | Returns or sets the opacity for the display of hidden objects in DMU Group | Preview. :return: int :rtype: int """ return self.n_4d_navigator_setting_att.DMUGroupPreviewHiddenObjectsOpacity @dmu_group_preview_hidden_objects_opacity.setter def dmu_group_preview_hidden_objects_opacity(self, value: int): """ :param int value: """ self.n_4d_navigator_setting_att.DMUGroupPreviewHiddenObjectsOpacity = value @property def dmu_group_preview_hidden_objects_pick(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUGroupPreviewHiddenObjectsPick() As boolean | | Returns or sets the pick mode for the display of hidden objects in DMU | Group Preview. :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUGroupPreviewHiddenObjectsPick @dmu_group_preview_hidden_objects_pick.setter def dmu_group_preview_hidden_objects_pick(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUGroupPreviewHiddenObjectsPick = value @property def dmu_merger_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUMergerPreview() As boolean | | Returns or sets the preview activation state for Merger (TRUE the preview | window is automatically displayed, FALSE the preview window is not displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUMergerPreview @dmu_merger_preview.setter def dmu_merger_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUMergerPreview = value @property def dmu_offset_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUOffsetPreview() As boolean | | Returns or sets the preview activation state for Offset (TRUE the preview | window is automatically displayed, FALSE the preview window is not displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUOffsetPreview @dmu_offset_preview.setter def dmu_offset_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUOffsetPreview = value @property def dmu_review_name(self) -> str: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUReviewName() As CATBSTR | | Returns or sets the default name for the DMU Reviews (oValue, the DMU | Review name). :return: str :rtype: str """ return self.n_4d_navigator_setting_att.DMUReviewName @dmu_review_name.setter def dmu_review_name(self, value: str): """ :param str value: """ self.n_4d_navigator_setting_att.DMUReviewName = value @property def dmu_section_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUSectionPreview() As boolean | | Returns or sets the preview activation state for Section (TRUE the preview | window is automatically displayed, FALSE the preview window is not displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUSectionPreview @dmu_section_preview.setter def dmu_section_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUSectionPreview = value @property def dmu_shuttle_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUShuttlePreview() As boolean | | Returns or sets the preview activation state for Shuttle (TRUE the preview | window is automatically displayed, FALSE the preview window is not displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUShuttlePreview @dmu_shuttle_preview.setter def dmu_shuttle_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUShuttlePreview = value @property def dmu_silhouette_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUSilhouettePreview() As boolean | | Returns or sets the preview activation state for Silhouette (TRUE the | preview window is automatically displayed, FALSE the preview window is not | displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUSilhouettePreview @dmu_silhouette_preview.setter def dmu_silhouette_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUSilhouettePreview = value @property def dmu_simplif_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUSimplifPreview() As boolean | | Returns or sets the preview activation state for Simplification (TRUE the | preview window is automatically displayed, FALSE the preview window is not | displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUSimplifPreview @dmu_simplif_preview.setter def dmu_simplif_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUSimplifPreview = value @property def dmu_swept_vol_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUSweptVolPreview() As boolean | | Returns or sets the preview activation state for Swept Volume (TRUE the | preview window is automatically displayed, FALSE the preview window is not | displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUSweptVolPreview @dmu_swept_vol_preview.setter def dmu_swept_vol_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUSweptVolPreview = value @property def dmu_thickness_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUThicknessPreview() As boolean | | Returns or sets the preview activation state for Thickness (TRUE the | preview window is automatically displayed, FALSE the preview window is not | displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUThicknessPreview @dmu_thickness_preview.setter def dmu_thickness_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUThicknessPreview = value @property def dmu_vibration_vol_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUVibrationVolPreview() As boolean | | Returns or sets the preview activation state for Vibration volume (TRUE the | preview window is automatically displayed, FALSE the preview window is not | displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUVibrationVolPreview @dmu_vibration_vol_preview.setter def dmu_vibration_vol_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUVibrationVolPreview = value @property def dmu_wrapping_preview(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property DMUWrappingPreview() As boolean | | Returns or sets the preview activation state for Wrapping (TRUE the preview | window is automatically displayed, FALSE the preview window is not displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.DMUWrappingPreview @dmu_wrapping_preview.setter def dmu_wrapping_preview(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.DMUWrappingPreview = value @property def force_clearance_voxel(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property ForceClearanceVoxel() As boolean | | Returns or sets the activation state for the use of the clearance value | (TRUE the clearance value is used, FALSE the clearance value is not used); :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.ForceClearanceVoxel @force_clearance_voxel.setter def force_clearance_voxel(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.ForceClearanceVoxel = value @property def force_voxel(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property ForceVoxel() As boolean | | Returns or sets the activation state for the use of the Released accuracy | value (TRUE the released accuracy value is used, FALSE the released accuracy | value is not used); :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.ForceVoxel @force_voxel.setter def force_voxel(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.ForceVoxel = value @property def insert_level(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property InsertLevel() As boolean | | Returns or sets the level for the Import Applicative Data command | (True : at highest review level, False : in current review). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.InsertLevel @insert_level.setter def insert_level(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.InsertLevel = value @property def insert_mode(self) -> int: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property InsertMode() As CatSacSettingsEnum | | Returns or sets the mode for the Import Applicative Data command | (CatSacSettingsEnumNoInsert no import of applicative data, | CatSacSettingsEnumAutomatic the import of applicative is automatic, | CatSacSettingsEnumUserPrompt the user can select the applicative data to | import). :return: int :rtype: int """ return self.n_4d_navigator_setting_att.InsertMode @insert_mode.setter def insert_mode(self, value: int): """ :param int value: """ self.n_4d_navigator_setting_att.InsertMode = value @property def marker_2d_auto_naming(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property Marker2DAutoNaming() As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.get_Marker2DAutoNaming This method will be replaced by | MarkerSettingAtt.put_Marker2DAutoNaming Returns or sets the activation state | for 2D annotations automatic naming (TRUE naming is automatic, FALSE the naming | is not automatic). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.Marker2DAutoNaming @marker_2d_auto_naming.setter def marker_2d_auto_naming(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.Marker2DAutoNaming = value @property def marker_3d_auto_naming(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property Marker3DAutoNaming() As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.get_Marker3DAutoNaming This method will be replaced by | MarkerSettingAtt.put_Marker3DAutoNaming Returns or sets the activation state | for 3D annotations automatic naming (TRUE naming is automatic, FALSE the naming | is not automatic). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.Marker3DAutoNaming @marker_3d_auto_naming.setter def marker_3d_auto_naming(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.Marker3DAutoNaming = value @property def marker_auto_update(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property MarkerAutoUpdate() As boolean | | Returns or sets the activation of the automatic update on product structure | modification (TRUE update is done automatically, FALSE update is done | manually). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.MarkerAutoUpdate @marker_auto_update.setter def marker_auto_update(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.MarkerAutoUpdate = value @property def marker_defaults_dashed(self) -> int: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property MarkerDefaultsDashed() As long | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.get_MarkerDefaultsDashed This method will be replaced by | MarkerSettingAtt.put_MarkerDefaultsDashed Returns or sets the default dashed | value of an annotation (oValue the dashed value). :return: int :rtype: int """ return self.n_4d_navigator_setting_att.MarkerDefaultsDashed @marker_defaults_dashed.setter def marker_defaults_dashed(self, value: int): """ :param int value: """ self.n_4d_navigator_setting_att.MarkerDefaultsDashed = value @property def marker_defaults_font(self) -> str: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property MarkerDefaultsFont() As CATBSTR | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.get_MarkerTextDefaultsFont2D This method will be replaced by | MarkerSettingAtt.put_MarkerTextDefaultsFont2D Returns or sets the default font | of an annotation (oValue the font name). :return: str :rtype: str """ return self.n_4d_navigator_setting_att.MarkerDefaultsFont @marker_defaults_font.setter def marker_defaults_font(self, value: str): """ :param str value: """ self.n_4d_navigator_setting_att.MarkerDefaultsFont = value @property def marker_defaults_size(self) -> int: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property MarkerDefaultsSize() As long | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.get_MarkerTextDefaultsSize2D This method will be replaced by | MarkerSettingAtt.put_MarkerTextDefaultsSize2D Returns or sets the default size | value of an annotation (oValue the size value).. :return: int :rtype: int """ return self.n_4d_navigator_setting_att.MarkerDefaultsSize @marker_defaults_size.setter def marker_defaults_size(self, value: int): """ :param int value: """ self.n_4d_navigator_setting_att.MarkerDefaultsSize = value @property def marker_defaults_weight(self) -> int: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property MarkerDefaultsWeight() As long | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.get_MarkerDefaultsWeight This method will be replaced by | MarkerSettingAtt.put_MarkerDefaultsWeight Returns or sets the default weight | value of an annotation (oValue the weight value). :return: int :rtype: int """ return self.n_4d_navigator_setting_att.MarkerDefaultsWeight @marker_defaults_weight.setter def marker_defaults_weight(self, value: int): """ :param int value: """ self.n_4d_navigator_setting_att.MarkerDefaultsWeight = value @property def marker_text_dashed(self) -> int: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property MarkerTextDashed() As long | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.get_MarkerTextDashed2D This method will be replaced by | MarkerSettingAtt.put_MarkerTextDashed2D Returns or sets the default dashed | value of a text annotation (oValue the dashed value). :return: int :rtype: int """ return self.n_4d_navigator_setting_att.MarkerTextDashed @marker_text_dashed.setter def marker_text_dashed(self, value: int): """ :param int value: """ self.n_4d_navigator_setting_att.MarkerTextDashed = value @property def marker_text_weight(self) -> int: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property MarkerTextWeight() As long | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.get_MarkerTextWeight2D This method will be replaced by | MarkerSettingAtt.put_MarkerTextWeight2D Returns or sets the default weight | value of a text annotation (oValue the weight value). :return: int :rtype: int """ return self.n_4d_navigator_setting_att.MarkerTextWeight @marker_text_weight.setter def marker_text_weight(self, value: int): """ :param int value: """ self.n_4d_navigator_setting_att.MarkerTextWeight = value @property def num_url_name(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property NumUrlName() As boolean | | Returns or sets the name activation state for Hyperlink (TRUE the hyperlink | name is displayed, FALSE the hyperlink name is not displayed). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.NumUrlName @num_url_name.setter def num_url_name(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.NumUrlName = value @property def publish_auto_launch_browser(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384) | o Property PublishAutoLaunchBrowser() As boolean | | Returns or sets the activation state of the automatic launching of the | publish browser (TRUE the publish browser is automatically opened, FALSE the | publish browser is not automatically opened). :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.PublishAutoLaunchBrowser @publish_auto_launch_browser.setter def publish_auto_launch_browser(self, value: bool): """ :param bool value: """ self.n_4d_navigator_setting_att.PublishAutoLaunchBrowser = value def get_clearance_voxel_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetClearanceVoxelInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the ClearanceVoxel | parameter. | Role:Retrieves the state of the ClearanceVoxel parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetClearanceVoxelInfo(io_admin_level, io_locked) def get_dmu_clash_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUClashPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUClashPreview | parameter. | Role:Retrieves the state of the DMUClashPreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUClashPreviewInfo(io_admin_level, io_locked) def get_dmu_cut_3d_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUCut3DPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUCut3DPreview | parameter. | Role:Retrieves the state of the DMUCut3DPreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUCut3DPreviewInfo(io_admin_level, io_locked) def get_dmu_distance_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUDistancePreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUDistancePreview | parameter. | Role:Retrieves the state of the DMUDistancePreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUDistancePreviewInfo(io_admin_level, io_locked) def get_dmu_free_space_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUFreeSpacePreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUFreeSpacePreview | parameter. | Role:Retrieves the state of the DMUFreeSpacePreview parameter in the | current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUFreeSpacePreviewInfo(io_admin_level, io_locked) def get_dmu_group_preview_hidden_objects_color(self, o_red: int, o_green: int, o_blue: int) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub GetDMUGroupPreviewHiddenObjectsColor(long oRed, | long oGreen, | long oBlue) | | Returns the color for the display of hidden objects in DMU Group Preview. :param int o_red: :param int o_green: :param int o_blue: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.GetDMUGroupPreviewHiddenObjectsColor(o_red, o_green, o_blue) def get_dmu_group_preview_hidden_objects_color_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUGroupPreviewHiddenObjectsColorInfo(CATBSTR | ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the | DMUGroupPreviewHiddenObjectsColor parameter. | Role:Retrieves the state of the DMUGroupPreviewHiddenObjectsColor parameter | in the current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUGroupPreviewHiddenObjectsColorInfo(io_admin_level, io_locked) def get_dmu_group_preview_hidden_objects_display_mode_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUGroupPreviewHiddenObjectsDisplayModeInfo(CATBSTR | ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the | DMUGroupPreviewHiddenObjectsDisplayMode parameter. | Role:Retrieves the state of the DMUGroupPreviewHiddenObjectsDisplayMode | parameter in the current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUGroupPreviewHiddenObjectsDisplayModeInfo(io_admin_level, io_locked) def get_dmu_group_preview_hidden_objects_low_int_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUGroupPreviewHiddenObjectsLowIntInfo(CATBSTR | ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the | DMUGroupPreviewHiddenObjectsLowInt parameter. | Role:Retrieves the state of the DMUGroupPreviewHiddenObjectsLowInt | parameter in the current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUGroupPreviewHiddenObjectsLowIntInfo(io_admin_level, io_locked) def get_dmu_group_preview_hidden_objects_opacity_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUGroupPreviewHiddenObjectsOpacityInfo(CATBSTR | ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the | DMUGroupPreviewHiddenObjectsOpacity parameter. | Role:Retrieves the state of the DMUGroupPreviewHiddenObjectsOpacity | parameter in the current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUGroupPreviewHiddenObjectsOpacityInfo(io_admin_level, io_locked) def get_dmu_group_preview_hidden_objects_pick_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUGroupPreviewHiddenObjectsPickInfo(CATBSTR | ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUGroupPreviewHiddenObjectsPick | parameter. | Role:Retrieves the state of the DMUGroupPreviewHiddenObjectsPick parameter | in the current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUGroupPreviewHiddenObjectsPickInfo(io_admin_level, io_locked) def get_dmu_group_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUGroupPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUGroupPreview | parameter. | Role:Retrieves the state of the DMUGroupPreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUGroupPreviewInfo(io_admin_level, io_locked) def get_dmu_merger_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUMergerPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUMergerPreview | parameter. | Role:Retrieves the state of the DMUMergerPreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUMergerPreviewInfo(io_admin_level, io_locked) def get_dmu_offset_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUOffsetPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUOffsetPreview | parameter. | Role:Retrieves the state of the DMUOffsetPreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUOffsetPreviewInfo(io_admin_level, io_locked) def get_dmu_review_name_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUReviewNameInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUReviewName | parameter. | Role:Retrieves the state of the DMUReviewName parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUReviewNameInfo(io_admin_level, io_locked) def get_dmu_section_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUSectionPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUSectionPreview | parameter. | Role:Retrieves the state of the DMUSectionPreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUSectionPreviewInfo(io_admin_level, io_locked) def get_dmu_shuttle_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUShuttlePreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUShuttlePreview | parameter. | Role:Retrieves the state of the DMUShuttlePreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUShuttlePreviewInfo(io_admin_level, io_locked) def get_dmu_silhouette_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUSilhouettePreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUSilhouettePreview | parameter. | Role:Retrieves the state of the DMUSilhouettePreview parameter in the | current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUSilhouettePreviewInfo(io_admin_level, io_locked) def get_dmu_simplif_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUSimplifPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUSimplifPreview | parameter. | Role:Retrieves the state of the DMUSimplifPreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUSimplifPreviewInfo(io_admin_level, io_locked) def get_dmu_swept_vol_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUSweptVolPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUSweptVolPreview | parameter. | Role:Retrieves the state of the DMUSweptVolPreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUSweptVolPreviewInfo(io_admin_level, io_locked) def get_dmu_thickness_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUThicknessPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUThicknessPreview | parameter. | Role:Retrieves the state of the DMUThicknessPreview parameter in the | current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUThicknessPreviewInfo(io_admin_level, io_locked) def get_dmu_vibration_vol_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUVibrationVolPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUVibrationVolPreview | parameter. | Role:Retrieves the state of the DMUVibrationVolPreview parameter in the | current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUVibrationVolPreviewInfo(io_admin_level, io_locked) def get_dmu_wrapping_preview_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetDMUWrappingPreviewInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the DMUWrappingPreview | parameter. | Role:Retrieves the state of the DMUWrappingPreview parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetDMUWrappingPreviewInfo(io_admin_level, io_locked) def get_force_clearance_voxel_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetForceClearanceVoxelInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the ForceClearanceVoxel | parameter. | Role:Retrieves the state of the ForceClearanceVoxel parameter in the | current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetForceClearanceVoxelInfo(io_admin_level, io_locked) def get_force_voxel_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetForceVoxelInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the ForceVoxel | parameter. | Role:Retrieves the state of the ForceVoxel parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetForceVoxelInfo(io_admin_level, io_locked) def get_insert_level_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetInsertLevelInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the InsertLevel | parameter. | Role:Retrieves the state of the InsertLevel parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetInsertLevelInfo(io_admin_level, io_locked) def get_insert_mode_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetInsertModeInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the InsertMode | parameter. | Role:Retrieves the state of the InsertMode parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetInsertModeInfo(io_admin_level, io_locked) def get_marker_2d_auto_naming_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarker2DAutoNamingInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarker2DAutoNamingInfo Retrieves environment informations | for the Marker2DAutoNaming parameter. | Role:Retrieves the state of the Marker2DAutoNaming parameter in the | current environment. | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarker2DAutoNamingInfo(io_admin_level, io_locked) def get_marker_3d_auto_naming_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarker3DAutoNamingInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarker3DAutoNamingInfo Retrieves environment informations | for the Marker3DAutoNaming parameter. | Role:Retrieves the state of the Marker3DAutoNaming parameter in the | current environment. | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarker3DAutoNamingInfo(io_admin_level, io_locked) def get_marker_auto_update_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarkerAutoUpdateInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the MarkerAutoUpdate | parameter. | Role:Retrieves the state of the MarkerAutoUpdate parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarkerAutoUpdateInfo(io_admin_level, io_locked) def get_marker_defaults_color(self, o_red: int, o_green: int, o_blue: int) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub GetMarkerDefaultsColor(long oRed, | long oGreen, | long oBlue) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarkerDefaultsColor Returns the default color of an | annotation (oRed, oGreen, oBlue: RGB values of the color). :param int o_red: :param int o_green: :param int o_blue: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.GetMarkerDefaultsColor(o_red, o_green, o_blue) def get_marker_defaults_color_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarkerDefaultsColorInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarkerDefaultsColorInfo Retrieves environment informations | for the MarkerDefaultsColor parameter. | Role:Retrieves the state of the MarkerDefaultsColor parameter in the | current environment. | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarkerDefaultsColorInfo(io_admin_level, io_locked) def get_marker_defaults_dashed_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarkerDefaultsDashedInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarkerDefaultsDashedInfo Retrieves environment informations | for the MarkerDefaultsDashed parameter. | Role:Retrieves the state of the MarkerDefaultsDashed parameter in the | current environment. | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarkerDefaultsDashedInfo(io_admin_level, io_locked) def get_marker_defaults_font_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarkerDefaultsFontInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarkerDefaultsFont2DInfo Retrieves environment informations | for the MarkerDefaultsFont parameter. | Role:Retrieves the state of the MarkerDefaultsFont parameter in the | current environment. | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarkerDefaultsFontInfo(io_admin_level, io_locked) def get_marker_defaults_size_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarkerDefaultsSizeInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Deprecated: | R17 This method will be replaced by CATIAmarkerSettingAtt Retrieves | environment informations for the MarkerDefaultsSize | parameter. | Role:Retrieves the state of the MarkerDefaultsSize parameter in the | current environment. | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarkerDefaultsSizeInfo(io_admin_level, io_locked) def get_marker_defaults_weight_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarkerDefaultsWeightInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarkerDefaultsWeightInfo Retrieves environment informations | for the MarkerDefaultsWeight parameter. | Role:Retrieves the state of the MarkerDefaultsWeight parameter in the | current environment. | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarkerDefaultsWeightInfo(io_admin_level, io_locked) def get_marker_text_color(self, o_red: int, o_green: int, o_blue: int) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub GetMarkerTextColor(long oRed, | long oGreen, | long oBlue) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarkerTextColor2DInfo Returns the default color of a text | annotation (oRed, oGreen, oBlue: RGB values of the color). :param int o_red: :param int o_green: :param int o_blue: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.GetMarkerTextColor(o_red, o_green, o_blue) def get_marker_text_color_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarkerTextColorInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarkerTextColor2DInfo Retrieves environment informations | for the MarkerTextColor parameter. | Role:Retrieves the state of the MarkerTextColor parameter in the | current environment. | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarkerTextColorInfo(io_admin_level, io_locked) def get_marker_text_dashed_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarkerTextDashedInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarkerTextDashed2DInfo Retrieves environment informations | for the MarkerTextDashed parameter. | Role:Retrieves the state of the MarkerTextDashed parameter in the | current environment. | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarkerTextDashedInfo(io_admin_level, io_locked) def get_marker_text_weight_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetMarkerTextWeightInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.GetMarkerTextWeight2DInfo Retrieves environment informations | for the MarkerTextWeight parameter. | Role:Retrieves the state of the MarkerTextWeight parameter in the | current environment. | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetMarkerTextWeightInfo(io_admin_level, io_locked) def get_num_url_name_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetNumUrlNameInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the NumUrlName | parameter. | Role:Retrieves the state of the NumUrlName parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetNumUrlNameInfo(io_admin_level, io_locked) def get_publish_auto_launch_browser_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetPublishAutoLaunchBrowserInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the PublishAutoLaunchBrowser | parameter. | Role:Retrieves the state of the PublishAutoLaunchBrowser parameter in the | current environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetPublishAutoLaunchBrowserInfo(io_admin_level, io_locked) def get_scene_defaults_color(self, o_r: int, o_g: int, o_b: int) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub GetSceneDefaultsColor(long oR, | long oG, | long oB) | | Returns the scene background color (oRed, oGreen, oBlue: RGB values of the | color). :param int o_r: :param int o_g: :param int o_b: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.GetSceneDefaultsColor(o_r, o_g, o_b) def get_scene_defaults_color_info(self, io_admin_level: str, io_locked: str) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Func GetSceneDefaultsColorInfo(CATBSTR ioAdminLevel, | CATBSTR ioLocked) As boolean | | Retrieves environment informations for the SceneDefaultsColor | parameter. | Role:Retrieves the state of the SceneDefaultsColor parameter in the current | environment. | | Parameters: | | ioAdminLevel | | If the parameter is locked, AdminLevel gives the administration | level that imposes the value of the parameter. | If the parameter is not locked, AdminLevel gives the administration | level that will give the value of the parameter after a reset. | | ioLocked | Indicates if the parameter has been locked. | | Returns: | Indicates if the parameter has been explicitly modified or remain to | the administrated value. :param str io_admin_level: :param str io_locked: :return: bool :rtype: bool """ return self.n_4d_navigator_setting_att.GetSceneDefaultsColorInfo(io_admin_level, io_locked) def set_clearance_voxel_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetClearanceVoxelLock(boolean iLocked) | | Locks or unlocks the ClearanceVoxel parameter. | Role:Locks or unlocks the ClearanceVoxel parameter if it is possible in the | current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetClearanceVoxelLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_clearance_voxel_lock' # # vba_code = """ # # Public Function set_clearance_voxel_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetClearanceVoxelLock iLocked # # set_clearance_voxel_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_clash_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUClashPreviewLock(boolean iLocked) | | Locks or unlocks the DMUClashPreview parameter. | Role:Locks or unlocks the DMUClashPreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUClashPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_clash_preview_lock' # # vba_code = """ # # Public Function set_dmu_clash_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUClashPreviewLock iLocked # # set_dmu_clash_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_cut_3d_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUCut3DPreviewLock(boolean iLocked) | | Locks or unlocks the DMUCut3DPreview parameter. | Role:Locks or unlocks the DMUCut3DPreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUCut3DPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_cut_3d_preview_lock' # # vba_code = """ # # Public Function set_dmu_cut_3d_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUCut3DPreviewLock iLocked # # set_dmu_cut_3d_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_distance_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUDistancePreviewLock(boolean iLocked) | | Locks or unlocks the DMUDistancePreview parameter. | Role:Locks or unlocks the DMUDistancePreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUDistancePreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_distance_preview_lock' # # vba_code = """ # # Public Function set_dmu_distance_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUDistancePreviewLock iLocked # # set_dmu_distance_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_free_space_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUFreeSpacePreviewLock(boolean iLocked) | | Locks or unlocks the DMUFreeSpacePreview parameter. | Role:Locks or unlocks the DMUFreeSpacePreview parameter if it is possible | in the current administrative context. In user mode this method will always | return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUFreeSpacePreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_free_space_preview_lock' # # vba_code = """ # # Public Function set_dmu_free_space_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUFreeSpacePreviewLock iLocked # # set_dmu_free_space_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_group_preview_hidden_objects_color(self, i_red: int, i_green: int, i_blue: int) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUGroupPreviewHiddenObjectsColor(long iRed, | long iGreen, | long iBlue) | | Sets the color for the display of hidden objects in DMU Group Preview. :param int i_red: :param int i_green: :param int i_blue: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsColor(i_red, i_green, i_blue) def set_dmu_group_preview_hidden_objects_color_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUGroupPreviewHiddenObjectsColorLock(boolean | iLocked) | | Locks or unlocks the DMUGroupPreviewHiddenObjectsColor | parameter. | Role:Locks or unlocks the DMUGroupPreviewHiddenObjectsColor parameter if it | is possible in the current administrative context. In user mode this method | will always return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsColorLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_group_preview_hidden_objects_color_lock' # # vba_code = """ # # Public Function set_dmu_group_preview_hidden_objects_color_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsColorLock iLocked # # set_dmu_group_preview_hidden_objects_color_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_group_preview_hidden_objects_display_mode_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUGroupPreviewHiddenObjectsDisplayModeLock(boolean | iLocked) | | Locks or unlocks the DMUGroupPreviewHiddenObjectsDisplayMode | parameter. | Role:Locks or unlocks the DMUGroupPreviewHiddenObjectsDisplayMode parameter | if it is possible in the current administrative context. In user mode this | method will always return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsDisplayModeLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_group_preview_hidden_objects_display_mode_lock' # # vba_code = """ # # Public Function set_dmu_group_preview_hidden_objects_display_mode_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsDisplayModeLock iLocked # # set_dmu_group_preview_hidden_objects_display_mode_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_group_preview_hidden_objects_low_int_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUGroupPreviewHiddenObjectsLowIntLock(boolean | iLocked) | | Locks or unlocks the DMUGroupPreviewHiddenObjectsLowInt | parameter. | Role:Locks or unlocks the DMUGroupPreviewHiddenObjectsLowInt parameter if | it is possible in the current administrative context. In user mode this method | will always return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsLowIntLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_group_preview_hidden_objects_low_int_lock' # # vba_code = """ # # Public Function set_dmu_group_preview_hidden_objects_low_int_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsLowIntLock iLocked # # set_dmu_group_preview_hidden_objects_low_int_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_group_preview_hidden_objects_opacity_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUGroupPreviewHiddenObjectsOpacityLock(boolean | iLocked) | | Locks or unlocks the DMUGroupPreviewHiddenObjectsOpacity | parameter. | Role:Locks or unlocks the DMUGroupPreviewHiddenObjectsOpacity parameter if | it is possible in the current administrative context. In user mode this method | will always return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsOpacityLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_group_preview_hidden_objects_opacity_lock' # # vba_code = """ # # Public Function set_dmu_group_preview_hidden_objects_opacity_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsOpacityLock iLocked # # set_dmu_group_preview_hidden_objects_opacity_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_group_preview_hidden_objects_pick_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUGroupPreviewHiddenObjectsPickLock(boolean | iLocked) | | Locks or unlocks the DMUGroupPreviewHiddenObjectsPick | parameter. | Role:Locks or unlocks the DMUGroupPreviewHiddenObjectsPick parameter if it | is possible in the current administrative context. In user mode this method | will always return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsPickLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_group_preview_hidden_objects_pick_lock' # # vba_code = """ # # Public Function set_dmu_group_preview_hidden_objects_pick_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUGroupPreviewHiddenObjectsPickLock iLocked # # set_dmu_group_preview_hidden_objects_pick_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_group_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUGroupPreviewLock(boolean iLocked) | | Locks or unlocks the DMUGroupPreview parameter. | Role:Locks or unlocks the DMUGroupPreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUGroupPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_group_preview_lock' # # vba_code = """ # # Public Function set_dmu_group_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUGroupPreviewLock iLocked # # set_dmu_group_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_merger_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUMergerPreviewLock(boolean iLocked) | | Locks or unlocks the DMUMergerPreview parameter. | Role:Locks or unlocks the DMUMergerPreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUMergerPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_merger_preview_lock' # # vba_code = """ # # Public Function set_dmu_merger_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUMergerPreviewLock iLocked # # set_dmu_merger_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_offset_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUOffsetPreviewLock(boolean iLocked) | | Locks or unlocks the DMUOffsetPreview parameter. | Role:Locks or unlocks the DMUOffsetPreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUOffsetPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_offset_preview_lock' # # vba_code = """ # # Public Function set_dmu_offset_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUOffsetPreviewLock iLocked # # set_dmu_offset_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_review_name_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUReviewNameLock(boolean iLocked) | | Locks or unlocks the DMUReviewName parameter. | Role:Locks or unlocks the DMUReviewName parameter if it is possible in the | current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUReviewNameLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_review_name_lock' # # vba_code = """ # # Public Function set_dmu_review_name_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUReviewNameLock iLocked # # set_dmu_review_name_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_section_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUSectionPreviewLock(boolean iLocked) | | Locks or unlocks the DMUSectionPreview parameter. | Role:Locks or unlocks the DMUSectionPreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUSectionPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_section_preview_lock' # # vba_code = """ # # Public Function set_dmu_section_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUSectionPreviewLock iLocked # # set_dmu_section_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_shuttle_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUShuttlePreviewLock(boolean iLocked) | | Locks or unlocks the DMUShuttlePreview parameter. | Role:Locks or unlocks the DMUShuttlePreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUShuttlePreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_shuttle_preview_lock' # # vba_code = """ # # Public Function set_dmu_shuttle_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUShuttlePreviewLock iLocked # # set_dmu_shuttle_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_silhouette_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUSilhouettePreviewLock(boolean iLocked) | | Locks or unlocks the DMUSilhouettePreview parameter. | Role:Locks or unlocks the DMUSilhouettePreview parameter if it is possible | in the current administrative context. In user mode this method will always | return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUSilhouettePreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_silhouette_preview_lock' # # vba_code = """ # # Public Function set_dmu_silhouette_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUSilhouettePreviewLock iLocked # # set_dmu_silhouette_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_simplif_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUSimplifPreviewLock(boolean iLocked) | | Locks or unlocks the DMUSimplifPreview parameter. | Role:Locks or unlocks the DMUSimplifPreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUSimplifPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_simplif_preview_lock' # # vba_code = """ # # Public Function set_dmu_simplif_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUSimplifPreviewLock iLocked # # set_dmu_simplif_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_swept_vol_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUSweptVolPreviewLock(boolean iLocked) | | Locks or unlocks the DMUSweptVolPreview parameter. | Role:Locks or unlocks the DMUSweptVolPreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUSweptVolPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_swept_vol_preview_lock' # # vba_code = """ # # Public Function set_dmu_swept_vol_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUSweptVolPreviewLock iLocked # # set_dmu_swept_vol_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_thickness_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUThicknessPreviewLock(boolean iLocked) | | Locks or unlocks the DMUThicknessPreview parameter. | Role:Locks or unlocks the DMUThicknessPreview parameter if it is possible | in the current administrative context. In user mode this method will always | return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUThicknessPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_thickness_preview_lock' # # vba_code = """ # # Public Function set_dmu_thickness_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUThicknessPreviewLock iLocked # # set_dmu_thickness_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_vibration_vol_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUVibrationVolPreviewLock(boolean iLocked) | | Locks or unlocks the DMUVibrationVolPreview parameter. | Role:Locks or unlocks the DMUVibrationVolPreview parameter if it is | possible in the current administrative context. In user mode this method will | always return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUVibrationVolPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_vibration_vol_preview_lock' # # vba_code = """ # # Public Function set_dmu_vibration_vol_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUVibrationVolPreviewLock iLocked # # set_dmu_vibration_vol_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_dmu_wrapping_preview_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetDMUWrappingPreviewLock(boolean iLocked) | | Locks or unlocks the DMUWrappingPreview parameter. | Role:Locks or unlocks the DMUWrappingPreview parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetDMUWrappingPreviewLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_dmu_wrapping_preview_lock' # # vba_code = """ # # Public Function set_dmu_wrapping_preview_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetDMUWrappingPreviewLock iLocked # # set_dmu_wrapping_preview_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_force_clearance_voxel_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetForceClearanceVoxelLock(boolean iLocked) | | Locks or unlocks the ForceClearanceVoxel parameter. | Role:Locks or unlocks the ForceClearanceVoxel parameter if it is possible | in the current administrative context. In user mode this method will always | return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetForceClearanceVoxelLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_force_clearance_voxel_lock' # # vba_code = """ # # Public Function set_force_clearance_voxel_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetForceClearanceVoxelLock iLocked # # set_force_clearance_voxel_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_force_voxel_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetForceVoxelLock(boolean iLocked) | | Locks or unlocks the ForceVoxel parameter. | Role:Locks or unlocks the ForceVoxel parameter if it is possible in the | current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetForceVoxelLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_force_voxel_lock' # # vba_code = """ # # Public Function set_force_voxel_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetForceVoxelLock iLocked # # set_force_voxel_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_insert_level_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetInsertLevelLock(boolean iLocked) | | Locks or unlocks the InsertMode parameter. | Role:Locks or unlocks the InsertMode parameter if it is possible in the | current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetInsertLevelLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_insert_level_lock' # # vba_code = """ # # Public Function set_insert_level_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetInsertLevelLock iLocked # # set_insert_level_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_insert_mode_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetInsertModeLock(boolean iLocked) | | Locks or unlocks the InsertMode parameter. | Role:Locks or unlocks the InsertMode parameter if it is possible in the | current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetInsertModeLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_insert_mode_lock' # # vba_code = """ # # Public Function set_insert_mode_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetInsertModeLock iLocked # # set_insert_mode_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_2d_auto_naming_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarker2DAutoNamingLock(boolean iLocked) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarker2DAutoNamingLock Locks or unlocks the | Marker2DAutoNaming parameter. | Role:Locks or unlocks the Marker2DAutoNaming parameter if it is | possible in the current administrative context. In user mode this method will | always return E_FAIL. | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarker2DAutoNamingLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_2d_auto_naming_lock' # # vba_code = """ # # Public Function set_marker_2d_auto_naming_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarker2DAutoNamingLock iLocked # # set_marker_2d_auto_naming_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_3d_auto_naming_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarker3DAutoNamingLock(boolean iLocked) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarker3DAutoNamingLock Locks or unlocks the | Marker3DAutoNaming parameter. | Role:Locks or unlocks the Marker3DAutoNaming parameter if it is | possible in the current administrative context. In user mode this method will | always return E_FAIL. | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarker3DAutoNamingLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_3d_auto_naming_lock' # # vba_code = """ # # Public Function set_marker_3d_auto_naming_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarker3DAutoNamingLock iLocked # # set_marker_3d_auto_naming_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_auto_update_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerAutoUpdateLock(boolean iLocked) | | Locks or unlocks the MarkerAutoUpdate parameter. | Role:Locks or unlocks the MarkerAutoUpdate parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerAutoUpdateLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_auto_update_lock' # # vba_code = """ # # Public Function set_marker_auto_update_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarkerAutoUpdateLock iLocked # # set_marker_auto_update_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_defaults_color(self, i_red: int, i_green: int, i_blue: int) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerDefaultsColor(long iRed, | long iGreen, | long iBlue) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarkerDefaultsColor Sets the default color of an annotation | (iRed, iGreen, iBlue: RGB values for the desired color) :param int i_red: :param int i_green: :param int i_blue: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerDefaultsColor(i_red, i_green, i_blue) def set_marker_defaults_color_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerDefaultsColorLock(boolean iLocked) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarkerDefaultsColorLock Locks or unlocks the | MarkerDefaultsColor parameter. | Role:Locks or unlocks the MarkerDefaultsColor parameter if it is | possible in the current administrative context. In user mode this method will | always return E_FAIL. | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerDefaultsColorLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_defaults_color_lock' # # vba_code = """ # # Public Function set_marker_defaults_color_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarkerDefaultsColorLock iLocked # # set_marker_defaults_color_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_defaults_dashed_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerDefaultsDashedLock(boolean iLocked) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarkerDefaultsDashedLock Locks or unlocks the | MarkerDefaultsDashed parameter. | Role:Locks or unlocks the MarkerDefaultsDashed parameter if it is | possible in the current administrative context. In user mode this method will | always return E_FAIL. | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerDefaultsDashedLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_defaults_dashed_lock' # # vba_code = """ # # Public Function set_marker_defaults_dashed_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarkerDefaultsDashedLock iLocked # # set_marker_defaults_dashed_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_defaults_font_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerDefaultsFontLock(boolean iLocked) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarkerDefaultsFont2DLock Locks or unlocks the | MarkerDefaultsFont parameter. | Role:Locks or unlocks the MarkerDefaultsSize parameter if it is | possible in the current administrative context. In user mode this method will | always return E_FAIL. | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerDefaultsFontLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_defaults_font_lock' # # vba_code = """ # # Public Function set_marker_defaults_font_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarkerDefaultsFontLock iLocked # # set_marker_defaults_font_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_defaults_size_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerDefaultsSizeLock(boolean iLocked) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarkerTextDefaultsSize2DLock Locks or unlocks the | MarkerDefaultsSize parameter. | Role:Locks or unlocks the MarkerDefaultsSize parameter if it is | possible in the current administrative context. In user mode this method will | always return E_FAIL. | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerDefaultsSizeLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_defaults_size_lock' # # vba_code = """ # # Public Function set_marker_defaults_size_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarkerDefaultsSizeLock iLocked # # set_marker_defaults_size_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_defaults_weight_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerDefaultsWeightLock(boolean iLocked) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarkerDefaultsWeightLock Locks or unlocks the | MarkerDefaultsWeight parameter. | Role:Locks or unlocks the MarkerDefaultsColor parameter if it is | possible in the current administrative context. In user mode this method will | always return E_FAIL. | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerDefaultsWeightLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_defaults_weight_lock' # # vba_code = """ # # Public Function set_marker_defaults_weight_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarkerDefaultsWeightLock iLocked # # set_marker_defaults_weight_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_text_color(self, i_red: int, i_green: int, i_blue: int) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerTextColor(long iRed, | long iGreen, | long iBlue) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarkerTextColor2D Sets the default color of a text | annotation (iRed, iGreen, iBlue: RGB values for the desired color). :param int i_red: :param int i_green: :param int i_blue: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerTextColor(i_red, i_green, i_blue) def set_marker_text_color_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerTextColorLock(boolean iLocked) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarkerTextColor2DLock Locks or unlocks the MarkerTextColor | parameter. | Role:Locks or unlocks the MarkerTextColor parameter if it is possible | in the current administrative context. In user mode this method will always | return E_FAIL. | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerTextColorLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_text_color_lock' # # vba_code = """ # # Public Function set_marker_text_color_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarkerTextColorLock iLocked # # set_marker_text_color_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_text_dashed_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerTextDashedLock(boolean iLocked) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarkerTextDashed2DLock Locks or unlocks the | MarkerTextDashed parameter. | Role:Locks or unlocks the MarkerTextDashed parameter if it is possible | in the current administrative context. In user mode this method will always | return E_FAIL. | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerTextDashedLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_text_dashed_lock' # # vba_code = """ # # Public Function set_marker_text_dashed_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarkerTextDashedLock iLocked # # set_marker_text_dashed_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_marker_text_weight_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetMarkerTextWeightLock(boolean iLocked) | | Deprecated: | R17 This method will be replaced by | MarkerSettingAtt.SetMarkerTextWeight2DLock Locks or unlocks the | MarkerTextWeight parameter. | Role:Locks or unlocks the MarkerTextWeight parameter if it is possible | in the current administrative context. In user mode this method will always | return E_FAIL. | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetMarkerTextWeightLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_marker_text_weight_lock' # # vba_code = """ # # Public Function set_marker_text_weight_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetMarkerTextWeightLock iLocked # # set_marker_text_weight_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_num_url_name_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetNumUrlNameLock(boolean iLocked) | | Locks or unlocks the NumUrlName parameter. | Role:Locks or unlocks the NumUrlName parameter if it is possible in the | current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetNumUrlNameLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_num_url_name_lock' # # vba_code = """ # # Public Function set_num_url_name_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetNumUrlNameLock iLocked # # set_num_url_name_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_publish_auto_launch_browser_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetPublishAutoLaunchBrowserLock(boolean iLocked) | | Locks or unlocks the PublishAutoLaunchBrowser parameter. | Role:Locks or unlocks the PublishAutoLaunchBrowser parameter if it is | possible in the current administrative context. In user mode this method will | always return E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetPublishAutoLaunchBrowserLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_publish_auto_launch_browser_lock' # # vba_code = """ # # Public Function set_publish_auto_launch_browser_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetPublishAutoLaunchBrowserLock iLocked # # set_publish_auto_launch_browser_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def set_scene_defaults_color(self, i_r: int, i_g: int, i_b: int) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetSceneDefaultsColor(long iR, | long iG, | long iB) | | Sets the scene background color (iRed, iGreen, iBlue: RGB values for the | desired color) :param int i_r: :param int i_g: :param int i_b: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetSceneDefaultsColor(i_r, i_g, i_b) def set_scene_defaults_color_lock(self, i_locked: bool) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub SetSceneDefaultsColorLock(boolean iLocked) | | Locks or unlocks the SceneDefaultsColor parameter. | Role:Locks or unlocks the SceneDefaultsColor parameter if it is possible in | the current administrative context. In user mode this method will always return | E_FAIL. | | Parameters: | | iLocked | the locking operation to be performed Legal | values: | TRUE : to lock the parameter. | FALSE: to unlock the parameter. :param bool i_locked: :return: None :rtype: None """ return self.n_4d_navigator_setting_att.SetSceneDefaultsColorLock(i_locked) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'set_scene_defaults_color_lock' # # vba_code = """ # # Public Function set_scene_defaults_color_lock(n_4d_navigator_setting_att) # # Dim iLocked (2) # # n_4d_navigator_setting_att.SetSceneDefaultsColorLock iLocked # # set_scene_defaults_color_lock = iLocked # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def __repr__(self): return f'N4DNavigatorSettingAtt(name="{self.name}")'
42.481874
120
0.544358
18,946
192,188
5.353795
0.025916
0.037858
0.028511
0.045143
0.885836
0.865034
0.800489
0.76133
0.73411
0.725336
0
0.027072
0.397262
192,188
4,523
121
42.491267
0.848566
0.699778
0
0.090226
1
0
0.001296
0.001296
0
0
0
0
0
1
0.406015
false
0
0.002506
0.002506
0.724311
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
9
7efabbacddeaa069ab8f2dbc8d21860c2b7636fd
21,926
py
Python
google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/pagers.py
LaudateCorpus1/python-websecurityscanner
7f1c666ea384c81ad3fe50a0c5926c2f1ec8e9bf
[ "Apache-2.0" ]
14
2020-09-19T18:32:07.000Z
2022-03-31T04:19:12.000Z
google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/pagers.py
LaudateCorpus1/python-websecurityscanner
7f1c666ea384c81ad3fe50a0c5926c2f1ec8e9bf
[ "Apache-2.0" ]
45
2020-01-30T22:23:03.000Z
2022-03-31T22:49:19.000Z
google/cloud/websecurityscanner_v1alpha/services/web_security_scanner/pagers.py
LaudateCorpus1/python-websecurityscanner
7f1c666ea384c81ad3fe50a0c5926c2f1ec8e9bf
[ "Apache-2.0" ]
6
2020-01-30T20:44:21.000Z
2022-01-29T08:15:07.000Z
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from typing import ( Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, ) from google.cloud.websecurityscanner_v1alpha.types import crawled_url from google.cloud.websecurityscanner_v1alpha.types import finding from google.cloud.websecurityscanner_v1alpha.types import scan_config from google.cloud.websecurityscanner_v1alpha.types import scan_run from google.cloud.websecurityscanner_v1alpha.types import web_security_scanner class ListScanConfigsPager: """A pager for iterating through ``list_scan_configs`` requests. This class thinly wraps an initial :class:`google.cloud.websecurityscanner_v1alpha.types.ListScanConfigsResponse` object, and provides an ``__iter__`` method to iterate through its ``scan_configs`` field. If there are more pages, the ``__iter__`` method will make additional ``ListScanConfigs`` requests and continue to iterate through the ``scan_configs`` field on the corresponding responses. All the usual :class:`google.cloud.websecurityscanner_v1alpha.types.ListScanConfigsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., web_security_scanner.ListScanConfigsResponse], request: web_security_scanner.ListScanConfigsRequest, response: web_security_scanner.ListScanConfigsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.websecurityscanner_v1alpha.types.ListScanConfigsRequest): The initial request object. response (google.cloud.websecurityscanner_v1alpha.types.ListScanConfigsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = web_security_scanner.ListScanConfigsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterator[web_security_scanner.ListScanConfigsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[scan_config.ScanConfig]: for page in self.pages: yield from page.scan_configs def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListScanConfigsAsyncPager: """A pager for iterating through ``list_scan_configs`` requests. This class thinly wraps an initial :class:`google.cloud.websecurityscanner_v1alpha.types.ListScanConfigsResponse` object, and provides an ``__aiter__`` method to iterate through its ``scan_configs`` field. If there are more pages, the ``__aiter__`` method will make additional ``ListScanConfigs`` requests and continue to iterate through the ``scan_configs`` field on the corresponding responses. All the usual :class:`google.cloud.websecurityscanner_v1alpha.types.ListScanConfigsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., Awaitable[web_security_scanner.ListScanConfigsResponse]], request: web_security_scanner.ListScanConfigsRequest, response: web_security_scanner.ListScanConfigsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.websecurityscanner_v1alpha.types.ListScanConfigsRequest): The initial request object. response (google.cloud.websecurityscanner_v1alpha.types.ListScanConfigsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = web_security_scanner.ListScanConfigsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property async def pages( self, ) -> AsyncIterator[web_security_scanner.ListScanConfigsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[scan_config.ScanConfig]: async def async_generator(): async for page in self.pages: for response in page.scan_configs: yield response return async_generator() def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListScanRunsPager: """A pager for iterating through ``list_scan_runs`` requests. This class thinly wraps an initial :class:`google.cloud.websecurityscanner_v1alpha.types.ListScanRunsResponse` object, and provides an ``__iter__`` method to iterate through its ``scan_runs`` field. If there are more pages, the ``__iter__`` method will make additional ``ListScanRuns`` requests and continue to iterate through the ``scan_runs`` field on the corresponding responses. All the usual :class:`google.cloud.websecurityscanner_v1alpha.types.ListScanRunsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., web_security_scanner.ListScanRunsResponse], request: web_security_scanner.ListScanRunsRequest, response: web_security_scanner.ListScanRunsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.websecurityscanner_v1alpha.types.ListScanRunsRequest): The initial request object. response (google.cloud.websecurityscanner_v1alpha.types.ListScanRunsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = web_security_scanner.ListScanRunsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterator[web_security_scanner.ListScanRunsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[scan_run.ScanRun]: for page in self.pages: yield from page.scan_runs def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListScanRunsAsyncPager: """A pager for iterating through ``list_scan_runs`` requests. This class thinly wraps an initial :class:`google.cloud.websecurityscanner_v1alpha.types.ListScanRunsResponse` object, and provides an ``__aiter__`` method to iterate through its ``scan_runs`` field. If there are more pages, the ``__aiter__`` method will make additional ``ListScanRuns`` requests and continue to iterate through the ``scan_runs`` field on the corresponding responses. All the usual :class:`google.cloud.websecurityscanner_v1alpha.types.ListScanRunsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., Awaitable[web_security_scanner.ListScanRunsResponse]], request: web_security_scanner.ListScanRunsRequest, response: web_security_scanner.ListScanRunsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.websecurityscanner_v1alpha.types.ListScanRunsRequest): The initial request object. response (google.cloud.websecurityscanner_v1alpha.types.ListScanRunsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = web_security_scanner.ListScanRunsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property async def pages(self) -> AsyncIterator[web_security_scanner.ListScanRunsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[scan_run.ScanRun]: async def async_generator(): async for page in self.pages: for response in page.scan_runs: yield response return async_generator() def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListCrawledUrlsPager: """A pager for iterating through ``list_crawled_urls`` requests. This class thinly wraps an initial :class:`google.cloud.websecurityscanner_v1alpha.types.ListCrawledUrlsResponse` object, and provides an ``__iter__`` method to iterate through its ``crawled_urls`` field. If there are more pages, the ``__iter__`` method will make additional ``ListCrawledUrls`` requests and continue to iterate through the ``crawled_urls`` field on the corresponding responses. All the usual :class:`google.cloud.websecurityscanner_v1alpha.types.ListCrawledUrlsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., web_security_scanner.ListCrawledUrlsResponse], request: web_security_scanner.ListCrawledUrlsRequest, response: web_security_scanner.ListCrawledUrlsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.websecurityscanner_v1alpha.types.ListCrawledUrlsRequest): The initial request object. response (google.cloud.websecurityscanner_v1alpha.types.ListCrawledUrlsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = web_security_scanner.ListCrawledUrlsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterator[web_security_scanner.ListCrawledUrlsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[crawled_url.CrawledUrl]: for page in self.pages: yield from page.crawled_urls def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListCrawledUrlsAsyncPager: """A pager for iterating through ``list_crawled_urls`` requests. This class thinly wraps an initial :class:`google.cloud.websecurityscanner_v1alpha.types.ListCrawledUrlsResponse` object, and provides an ``__aiter__`` method to iterate through its ``crawled_urls`` field. If there are more pages, the ``__aiter__`` method will make additional ``ListCrawledUrls`` requests and continue to iterate through the ``crawled_urls`` field on the corresponding responses. All the usual :class:`google.cloud.websecurityscanner_v1alpha.types.ListCrawledUrlsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., Awaitable[web_security_scanner.ListCrawledUrlsResponse]], request: web_security_scanner.ListCrawledUrlsRequest, response: web_security_scanner.ListCrawledUrlsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.websecurityscanner_v1alpha.types.ListCrawledUrlsRequest): The initial request object. response (google.cloud.websecurityscanner_v1alpha.types.ListCrawledUrlsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = web_security_scanner.ListCrawledUrlsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property async def pages( self, ) -> AsyncIterator[web_security_scanner.ListCrawledUrlsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[crawled_url.CrawledUrl]: async def async_generator(): async for page in self.pages: for response in page.crawled_urls: yield response return async_generator() def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListFindingsPager: """A pager for iterating through ``list_findings`` requests. This class thinly wraps an initial :class:`google.cloud.websecurityscanner_v1alpha.types.ListFindingsResponse` object, and provides an ``__iter__`` method to iterate through its ``findings`` field. If there are more pages, the ``__iter__`` method will make additional ``ListFindings`` requests and continue to iterate through the ``findings`` field on the corresponding responses. All the usual :class:`google.cloud.websecurityscanner_v1alpha.types.ListFindingsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., web_security_scanner.ListFindingsResponse], request: web_security_scanner.ListFindingsRequest, response: web_security_scanner.ListFindingsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.websecurityscanner_v1alpha.types.ListFindingsRequest): The initial request object. response (google.cloud.websecurityscanner_v1alpha.types.ListFindingsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = web_security_scanner.ListFindingsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterator[web_security_scanner.ListFindingsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[finding.Finding]: for page in self.pages: yield from page.findings def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListFindingsAsyncPager: """A pager for iterating through ``list_findings`` requests. This class thinly wraps an initial :class:`google.cloud.websecurityscanner_v1alpha.types.ListFindingsResponse` object, and provides an ``__aiter__`` method to iterate through its ``findings`` field. If there are more pages, the ``__aiter__`` method will make additional ``ListFindings`` requests and continue to iterate through the ``findings`` field on the corresponding responses. All the usual :class:`google.cloud.websecurityscanner_v1alpha.types.ListFindingsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., Awaitable[web_security_scanner.ListFindingsResponse]], request: web_security_scanner.ListFindingsRequest, response: web_security_scanner.ListFindingsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.websecurityscanner_v1alpha.types.ListFindingsRequest): The initial request object. response (google.cloud.websecurityscanner_v1alpha.types.ListFindingsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = web_security_scanner.ListFindingsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property async def pages(self) -> AsyncIterator[web_security_scanner.ListFindingsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[finding.Finding]: async def async_generator(): async for page in self.pages: for response in page.findings: yield response return async_generator() def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
40.010949
96
0.683618
2,379
21,926
6.055065
0.081967
0.053315
0.051232
0.092468
0.935925
0.935925
0.935925
0.925304
0.913363
0.908504
0
0.003703
0.236386
21,926
547
97
40.084095
0.856657
0.457904
0
0.777328
0
0
0.007441
0
0
0
0
0
0
1
0.145749
false
0
0.024292
0.064777
0.283401
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
7efc0083e1d5535d6d28883c7890c23a8cb0a1dd
7,466
py
Python
oldfiles/oldcode/tools/assembler/instructions.py
MoebiuZ/OpcodeOne
213594bd5504dc95e9e026d09d4d15dbfffdd40c
[ "Apache-2.0" ]
5
2017-04-22T17:13:50.000Z
2021-01-19T18:27:58.000Z
oldfiles/oldcode/tools/assembler/instructions.py
MoebiuZ/OpcodeOne
213594bd5504dc95e9e026d09d4d15dbfffdd40c
[ "Apache-2.0" ]
3
2017-04-17T13:24:57.000Z
2017-06-05T15:25:14.000Z
oldfiles/oldcode/tools/assembler/instructions.py
MoebiuZ/OpcodeOne
213594bd5504dc95e9e026d09d4d15dbfffdd40c
[ "Apache-2.0" ]
2
2017-04-18T19:26:52.000Z
2017-04-22T17:14:21.000Z
import re class Assembler: def inst_ARITH(self, func, reg1 = "%A", reg2 = "%A", reg3 = "%A"): self.checkInCode() opcode = self.OPCODES['ARITH'] self.push_code24((opcode << 16) | ( (self.ARITH_FUNCS[func] << 4) | self.REGISTERS[reg1] ) << 8 | (self.REGISTERS[reg2] << 4 | self.REGISTERS[reg3])) def inst_CALL(self, address): self.checkInCode() opcode = self.OPCODES['CALL'] if re.match(self.HEX, address): addr = address elif re.match(self.ALPHANUMERIC, address): self.enqueuelabel(address, self.current_code_addr+1) addr = "0xdeadbe" self.push_code24(opcode << 16) self.push_code24(int(addr, 0)) def inst_LD(self, dest, src): self.checkInCode() opcode = self.OPCODES['LD'] mode = '' r_1 = '%A' r_2 = '%A' r_3 = '%A' value = '' r_1 = dest.upper() if re.match(self.REG, dest): mode = "REGISTER" r_src = src.upper() elif re.match(self.HEX + "|" + self.INT, src): mode = "VALUE" value = src elif re.match(self.ALPHANUMERIC, src): mode = "VALUE" self.enqueuelabel(src, self.current_code_addr+1) value = "0xdeadbe" self.push_code24((opcode << 16) | (((self.MODES[mode] << 4) | (self.REGISTERS[r_1])) << 8) | (self.REGISTERS[r_2] << 4)) if value != '': self.push_code24(int(value, 0)) def inst_DBG(self, src): self.checkInCode() opcode = self.OPCODES['DBG'] r_1 = "%A" addr = "" value = src.strip("()") if re.match("\(" + self.REG + "\)", src): mode = "INDIRECT" r_1 = value elif re.match(self.REG, value): mode = "REGISTER" r_1 = value elif re.match(self.HEX, value): mode = "ABSOLUTE" addr = value elif re.match(self.ALPHANUMERIC, value): mode = "ABSOLUTE" self.enqueuelabel(value, self.current_code_addr+1) addr = "0xdeadbe" self.push_code24((opcode << 16) | (((self.MODES[mode] << 4) | (self.REGISTERS[r_1])) << 8) | 0x00) if addr != '': self.push_code24(int(addr, 0)) def inst_HALT(self): self.checkInCode() opcode = self.OPCODES['HALT'] self.push_code24(opcode << 16) def inst_MR(self, dest, src, sign, ofst): self.checkInCode() opcode = self.OPCODES['MR'] mode = '' r_1 = '%A' r_2 = '%A' r_3 = '%A' addr = '' offset = '' near = False r_1 = dest.upper() if re.match(self.REG, src, re.IGNORECASE): mode += "INDIRECT" r_2 = src.upper() elif re.match(self.HEX, src): mode += "ABSOLUTE" addr = src elif re.match(self.ALPHANUMERIC, src): mode = "ABSOLUTE" self.enqueuelabel(src, self.current_code_addr+1) offset = "0xdeadbe" if (sign != None): # There is offset mode += sign if re.match(self.REG, ofst, re.IGNORECASE): mode += "REG" r_3 = ofst.upper() elif re.match(self.INT, ofst): if int(ofst, 0) <= 15: mode += "NEAR" near = True r_3 = ofst elif int(ofst, 0) > self.MAX_INT: self.printerror("Max. offset 16777215") else: mode += "FAR" offset = ofst else: mode += "FAR" offset = ofst self.push_code24((opcode << 16) | (((self.MODES[mode] << 4) | self.REGISTERS[r_1]) << 8) | ((self.REGISTERS[r_2] << 4) | (int(r_3) if near else self.REGISTERS[r_3]))) if addr != '': self.push_code24(int(addr, 0)) if offset != '': self.push_code24(int(offset, 0)) def inst_MW(self, dest, sign, ofst, src): self.checkInCode() opcode = self.OPCODES['MW'] mode = '' r_1 = '%A' r_2 = '%A' r_3 = '%A' addr = '' offset = '' near = False r_2 = src.upper() if re.match(self.REG, dest, re.IGNORECASE): mode += "INDIRECT" r_1 = dest.upper() elif re.match(self.HEX, dest): mode += "ABSOLUTE" addr = dest elif re.match(self.ALPHANUMERIC, dest): mode = "ABSOLUTE" self.enqueuelabel(dest, self.current_code_addr+1) offset = "0xdeadbe" if (sign != None): # There is offset mode += sign if re.match(self.REG, ofst, re.IGNORECASE): mode += "REG" r_3 = ofst.upper() elif re.match(self.INT, ofst): if int(ofst, 0) <= 15: mode += "NEAR" near = True r_3 = ofst elif int(ofst, 0) > self.MAX_INT: self.printerror("Max. offset 16777215") else: mode += "FAR" offset = ofst else: mode += "FAR" offset = ofst self.push_code24((opcode << 16) | (((self.MODES[mode] << 4) | self.REGISTERS[r_1]) << 8) | ((self.REGISTERS[r_2] << 4) | (int(r_3) if near else self.REGISTERS[r_3]))) if addr != '': self.push_code24(int(addr, 0)) if offset != '': self.push_code24(int(offset, 0)) def inst_NOP(self): self.checkInCode() opcode = self.OPCODES['NOP'] self.push_code24(opcode << 16) def inst_POP(self, register): self.checkInCode() opcode = self.OPCODES['POP'] self.push_code24((opcode << 16) | (self.REGISTERS[register] << 8)) def inst_PUSH(self, register): self.checkInCode() opcode = self.OPCODES['PUSH'] self.push_code24((opcode << 16) | (self.REGISTERS[register] << 8)) def inst_RET(self): self.checkInCode() opcode = self.OPCODES['RET'] self.push_code24(opcode << 16) def inst_VR(self, dest, src, sign, ofst): self.checkInCode() opcode = self.OPCODES['VR'] mode = '' r_1 = '%A' r_2 = '%A' r_3 = '%A' addr = '' offset = '' near = False r_1 = dest.upper() if re.match(self.REG, src, re.IGNORECASE): mode += "INDIRECT" r_2 = src.upper() elif re.match(self.HEX, src): mode += "ABSOLUTE" addr = src elif re.match(self.ALPHANUMERIC, src): mode = "ABSOLUTE" self.enqueuelabel(src, self.current_code_addr+1) offset = "0xdeadbe" if (sign != None): # There is offset mode += sign if re.match(self.REG, ofst, re.IGNORECASE): mode += "REG" r_3 = ofst.upper() elif re.match(self.INT, ofst): if int(ofst, 0) <= 15: mode += "NEAR" near = True r_3 = ofst elif int(ofst, 0) > self.MAX_INT: self.printerror("Max. offset 16777215") else: mode += "FAR" offset = ofst else: mode += "FAR" offset = ofst self.push_code24((opcode << 16) | (((self.MODES[mode] << 4) | self.REGISTERS[r_1]) << 8) | ((self.REGISTERS[r_2] << 4) | (int(r_3) if near else self.REGISTERS[r_3]))) if addr != '': self.push_code24(int(addr, 0)) if offset != '': self.push_code24(int(offset, 0)) def inst_VW(self, dest, sign, ofst, src): self.checkInCode() opcode = self.OPCODES['VW'] mode = '' r_1 = '%A' r_2 = '%A' r_3 = '%A' addr = '' offset = '' near = False r_2 = src.upper() if re.match(self.REG, dest, re.IGNORECASE): mode += "INDIRECT" r_1 = dest.upper() elif re.match(self.HEX, dest): mode += "ABSOLUTE" addr = dest elif re.match(self.ALPHANUMERIC, dest): mode = "ABSOLUTE" self.enqueuelabel(dest, self.current_code_addr+1) offset = "0xdeadbe" if (sign != None): # There is offset mode += sign if re.match(self.REG, ofst, re.IGNORECASE): mode += "REG" r_3 = ofst.upper() elif re.match(self.INT, ofst): if int(ofst, 0) <= 15: mode += "NEAR" near = True r_3 = ofst elif int(ofst, 0) > self.MAX_INT: self.printerror("Max. offset 16777215") else: mode += "FAR" offset = ofst else: mode += "FAR" offset = ofst self.push_code24((opcode << 16) | (((self.MODES[mode] << 4) | self.REGISTERS[r_1]) << 8) | ((self.REGISTERS[r_2] << 4) | (int(r_3) if near else self.REGISTERS[r_3]))) if addr != '': self.push_code24(int(addr, 0)) if offset != '': self.push_code24(int(offset, 0))
23.55205
168
0.592419
1,084
7,466
3.97786
0.072878
0.047078
0.07398
0.062616
0.884276
0.855056
0.80334
0.745594
0.714054
0.707792
0
0.040236
0.227699
7,466
316
169
23.626582
0.707596
0.008438
0
0.792969
0
0
0.056502
0
0
0
0.00811
0
0
1
0.050781
false
0
0.003906
0
0.058594
0.015625
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
7d4aad044e265874045457fe8813057bfeca447a
10,379
py
Python
Server-GPT2/GPT2Inference.py
hritik5102/Fake-news-classification-model
52b23e02a2fa01b2bfb32ef5240ef557d2c09460
[ "MIT" ]
8
2021-05-18T13:57:57.000Z
2022-03-22T12:35:20.000Z
Server-GPT2/GPT2Inference.py
hritik5102/Fake-news-classification-model
52b23e02a2fa01b2bfb32ef5240ef557d2c09460
[ "MIT" ]
1
2021-05-09T12:54:49.000Z
2021-06-05T10:36:14.000Z
Server-GPT2/GPT2Inference.py
hritik5102/Fake-news-classification-model
52b23e02a2fa01b2bfb32ef5240ef557d2c09460
[ "MIT" ]
1
2021-09-04T11:16:19.000Z
2021-09-04T11:16:19.000Z
import os # For Removing this warning : "Could not load dynamic library 'cudart64_110.dll'; dlerror: cudart64_110.dll not found" os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' import logging logging.getLogger('tensorflow').disabled = True import numpy as np import torch from transformers import (set_seed, TrainingArguments, Trainer, GPT2Config, GPT2Tokenizer, AdamW, get_linear_schedule_with_warmup, GPT2ForSequenceClassification) class GPT2: def __init__(self): # Look for gpu to use. Will use `cpu` by default if no gpu found. self.device = torch.device( 'cuda' if torch.cuda.is_available() else 'cpu') # device = torch.device('cpu') print("Device: ", self.device) _ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) model_path = os.path.join(_ROOT, "Models","GPT2_Model","model") self.tokenizer = GPT2Tokenizer.from_pretrained( pretrained_model_name_or_path=model_path) self.gpt_model = GPT2ForSequenceClassification.from_pretrained( pretrained_model_name_or_path=model_path) self.gpt_model.eval() def predict(self, text): self.gpt_model.to(self.device) text = text[:1024] inputs = self.tokenizer(text, return_tensors="pt").to(self.device) # print(inputs) # inputs = inputs[:1024] outputs = self.gpt_model(**inputs) logits = outputs[0] predict_label = logits.argmax(axis=-1).flatten().tolist() return predict_label[0] # The following code runs only while testing. if __name__ == "__main__": sample = """Is it possible that our ancient ancestors knew the secrets of levitation? Technology that has since been lost in time and space? Is it possible that great ancient civilizations like the ancient Egyptians, Olmec, Pre-Inca and Inca deciphered the secrets of levitation and other technologies that have been labeled by today’s society as impossible, mythological? And if they did, is it possible that they used these ‘forgotten technologies’ to erect some of the most incredible ancient constructions on our planet? There are dozens of amazing megalithic sites on our planet that defy our modern-day capabilities: Tiahuanaco, The Pyramids of the Giza plateau, Puma Punku, and Stonehenge among others. All of these sites were built using incredible blocks of stone that weight up to hundreds of tons, blocks of stone that our modern-day technologies have a hard time dealing with. So why did the ancient use such megalithic blocks of stone when they could have used smaller blocks and achieve a similar if not identical result? Is it possible that ancient man possessed technologies that are lost today? Is it possible they had knowledge that surpasses our very own understanding? According to some researchers, it is possible that ancient man mastered the ‘art of levitation’ which allowed them to defy known physics and move and manipulate massive objects with extreme ease. Tiahuanaco: defying modern-technology 13.000 feet above sea level stand the incredible ancient ruins of Tiahuanaco and its incredible ‘Sun Gate’. “La Puerta del Sol” or Sun Gate is an elaborately carved structure that is composed of stone blocks that weigh over ten tons. It is still a mystery how ancient managed to cut, transport and place these blocks of stone. Temple of Jupiter Baalbek The Temple of Jupiter located in Baalbek, Lebanon is another masterpiece of ancient engineering where huge blocks of stone were put together to form one of the greatest ancient sites on Earth. The foundation of the Temple of Jupiter contains three of the most massive stones ever quarried by mankind. The three foundation blocks together weigh 3.000 tons. If you ask yourself what type of vehicle would be used to transport them, the answer is NONE. Somehow, ancient man was able to quarry, transport and put them into place with such precision that not a single sheet of paper could fit in-between them. At Baalbek, we have the ‘stone of the pregnant women’ which is one of the largest stones ever cut my mankind, with a weight of 1,200 tons. Egyptian Pyramids: A mystery to mainstream science The Egyptian pyramids are one of the ‘mission impossible’ constructions that have caused amazement among everyone who has had the opportunity to visit them. Even today, no one knows for a fact how ancient man was able to erect such marvelous structures. Mainstream science has proposed that it took a workforce of around 5000 men, working for twenty years to build them using ropes, ramps, and brute force. Abul Hasan Ali Al-Masudi, known as the Herodotus of the Arabs wrote about how the ancient Egyptians built the pyramids in the distant past. Al-Mas’udi was an Arab historian and geographer and was one of the first to combine history and scientific geography in a large-scale work. Al-Masudi wrote about how ancient Egyptians transported the huge blocks of stone used to build the pyramids. According to him, a ‘magic papyrus’ was placed under each of the blocks of stone which allowed them to be transported. After placing the magical papyrus beneath the blocks, the stone was struck with a ‘metal rod’ that made the blocks of stone levitate and move along the path paved with stones and fenced on either side by metal poles. This allowed the stones to move for around 50-meters after which the process had to be repeated in order to get the blocks of stone to where they needed to be. Was Al-Masudi objective when he wrote about the pyramids? Or is it possible that just like many others, he was simply amazed by their magnificence, concluded that the ancient Egyptians must have used extraordinary means to construct the pyramids? What if, levitation technology was present on Earth in the distant past and ancient civilizations like the Egyptians, Inca or Pre-Inca knew the secrets of levitation? What if Levitation was possible in the past… but even today? Is it possible that our ancient ancestors knew the secrets of levitation? Technology that has since been lost in time and space? Is it possible that great ancient civilizations like the ancient Egyptians, Olmec, Pre-Inca and Inca deciphered the secrets of levitation and other technologies that have been labeled by today’s society as impossible, mythological? And if they did, is it possible that they used these ‘forgotten technologies’ to erect some of the most incredible ancient constructions on our planet? There are dozens of amazing megalithic sites on our planet that defy our modern-day capabilities: Tiahuanaco, The Pyramids of the Giza plateau, Puma Punku, and Stonehenge among others. All of these sites were built using incredible blocks of stone that weight up to hundreds of tons, blocks of stone that our modern-day technologies have a hard time dealing with. So why did the ancient use such megalithic blocks of stone when they could have used smaller blocks and achieve a similar if not identical result? Is it possible that ancient man possessed technologies that are lost today? Is it possible they had knowledge that surpasses our very own understanding? According to some researchers, it is possible that ancient man mastered the ‘art of levitation’ which allowed them to defy known physics and move and manipulate massive objects with extreme ease. Tiahuanaco: defying modern-technology 13.000 feet above sea level stand the incredible ancient ruins of Tiahuanaco and its incredible ‘Sun Gate’. “La Puerta del Sol” or Sun Gate is an elaborately carved structure that is composed of stone blocks that weigh over ten tons. It is still a mystery how ancient managed to cut, transport and place these blocks of stone. Temple of Jupiter Baalbek The Temple of Jupiter located in Baalbek, Lebanon is another masterpiece of ancient engineering where huge blocks of stone were put together to form one of the greatest ancient sites on Earth. The foundation of the Temple of Jupiter contains three of the most massive stones ever quarried by mankind. The three foundation blocks together weigh 3.000 tons. If you ask yourself what type of vehicle would be used to transport them, the answer is NONE. Somehow, ancient man was able to quarry, transport and put them into place with such precision that not a single sheet of paper could fit in-between them. At Baalbek, we have the ‘stone of the pregnant women’ which is one of the largest stones ever cut my mankind, with a weight of 1,200 tons. Egyptian Pyramids: A mystery to mainstream science The Egyptian pyramids are one of the ‘mission impossible’ constructions that have caused amazement among everyone who has had the opportunity to visit them. Even today, no one knows for a fact how ancient man was able to erect such marvelous structures. Mainstream science has proposed that it took a workforce of around 5000 men, working for twenty years to build them using ropes, ramps, and brute force. Abul Hasan Ali Al-Masudi, known as the Herodotus of the Arabs wrote about how the ancient Egyptians built the pyramids in the distant past. Al-Mas’udi was an Arab historian and geographer and was one of the first to combine history and scientific geography in a large-scale work. Al-Masudi wrote about how ancient Egyptians transported the huge blocks of stone used to build the pyramids. According to him, a ‘magic papyrus’ was placed under each of the blocks of stone which allowed them to be transported. After placing the magical papyrus beneath the blocks, the stone was struck with a ‘metal rod’ that made the blocks of stone levitate and move along the path paved with stones and fenced on either side by metal poles. This allowed the stones to move for around 50-meters after which the process had to be repeated in order to get the blocks of stone to where they needed to be. Was Al-Masudi objective when he wrote about the pyramids? Or is it possible that just like many others, he was simply amazed by their magnificence, concluded that the ancient Egyptians must have used extraordinary means to construct the pyramids? What if, levitation technology was present on Earth in the distant past and ancient civilizations like the Egyptians, Inca or Pre-Inca knew the secrets of levitation? What if Levitation was possible in the past… but even today?""" gpt = GPT2() print(gpt.predict(sample))
97
1,131
0.785336
1,687
10,379
4.804979
0.235329
0.01357
0.028868
0.019738
0.885517
0.881569
0.881569
0.881569
0.881569
0.881569
0
0.007968
0.177763
10,379
106
1,132
97.915094
0.941176
0.028037
0
0.459016
0
0.360656
0.868565
0
0
0
0
0
0
1
0.032787
false
0.032787
0.081967
0
0.147541
0.032787
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
adfa12a853b24e5ddc35c7afd0512504f2574009
109
py
Python
AlgorithmTest/BOJ_STEP_PYTHON/Step1/BOJ10171.py
bluesky0960/AlgorithmTest
35e6c01b1c25bf13d4c034c047f3dd3b67f1578e
[ "MIT" ]
null
null
null
AlgorithmTest/BOJ_STEP_PYTHON/Step1/BOJ10171.py
bluesky0960/AlgorithmTest
35e6c01b1c25bf13d4c034c047f3dd3b67f1578e
[ "MIT" ]
null
null
null
AlgorithmTest/BOJ_STEP_PYTHON/Step1/BOJ10171.py
bluesky0960/AlgorithmTest
35e6c01b1c25bf13d4c034c047f3dd3b67f1578e
[ "MIT" ]
null
null
null
#https://www.acmicpc.net/problem/10171 print("\ /\\") print(" ) ( ')") print("( / )") print(" \(__)|")
18.166667
38
0.477064
10
109
5
0.7
0.6
0.6
0
0
0
0
0
0
0
0
0.054945
0.165138
109
6
39
18.166667
0.494505
0.33945
0
0
0
0
0.416667
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
7
adfb0e8f59f1b85134b1b22d1d7ffabfb1d7e41d
1,688
py
Python
Code/analysis/job_array_rq3/infervecs/r100_split.py
lhmtriet/PUMiner_MSR
cbaa126bc56b0968f4b709374c59b87f16b6811d
[ "MIT" ]
2
2020-06-26T07:00:44.000Z
2022-03-18T02:34:19.000Z
Code/analysis/job_array_rq3/infervecs/r100_split.py
lhmtriet/PUMiner_MSR
cbaa126bc56b0968f4b709374c59b87f16b6811d
[ "MIT" ]
null
null
null
Code/analysis/job_array_rq3/infervecs/r100_split.py
lhmtriet/PUMiner_MSR
cbaa126bc56b0968f4b709374c59b87f16b6811d
[ "MIT" ]
null
null
null
import pandas as pd import numpy as np from progressbar import progressbar import sanalytics.algorithms.utils as sau import sanalytics.estimators.pu_estimators as pu import sanalytics.evaluation.utils as seu X_train = pd.read_parquet("datasets/rq3_data/sec1.0R100_train.parquet") X_train = np.array_split(X_train, 300) for num, i in progressbar(enumerate(X_train)): i.to_parquet("datasets/rq3_dataR100/sec1.0R100_train.parquet.{}".format(num), compression=None, index=False) test = pd.read_parquet("datasets/rq3_dataR100/sec1.0R100_train.parquet.{}".format(num)) X_train = pd.read_parquet("datasets/rq3_data/sec1.0R100_test.parquet") X_train = np.array_split(X_train, 50) for num, i in progressbar(enumerate(X_train)): i.to_parquet("datasets/rq3_dataR100/sec1.0R100_test.parquet.{}".format(num), compression=None, index=False) test = pd.read_parquet("datasets/rq3_dataR100/sec1.0R100_test.parquet.{}".format(num)) X_train = pd.read_parquet("datasets/rq3_data/sec1.0R100_all_train.parquet") X_train = np.array_split(X_train, 300) for num, i in progressbar(enumerate(X_train)): i.to_parquet("datasets/rq3_dataR100/sec1.0R100_all_train.parquet.{}".format(num), compression=None, index=False) test = pd.read_parquet("datasets/rq3_dataR100/sec1.0R100_all_train.parquet.{}".format(num)) X_train = pd.read_parquet("datasets/rq3_data/sec1.0R100_all_test_edge.parquet") X_train.to_parquet("datasets/rq3_dataR100/sec1.0R100_all_test_edge.parquet.0", compression=None, index=False) X_train = pd.read_parquet("datasets/rq3_data/sec1.0R100_all_test_easy.parquet") X_train.to_parquet("datasets/rq3_dataR100/sec1.0R100_all_test_easy.parquet.0", compression=None, index=False)
58.206897
116
0.799171
269
1,688
4.754647
0.171004
0.075059
0.182955
0.131353
0.856919
0.856919
0.792807
0.792807
0.768569
0.768569
0
0.071292
0.069313
1,688
29
117
58.206897
0.742839
0
0
0.2
0
0
0.379515
0.379515
0
0
0
0
0
1
0
false
0
0.24
0
0.24
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
70d8c622cb21f361a95f52f22be91f4116be6412
56,991
py
Python
infoblox_netmri/api/broker/v3_8_0/switch_fwd_neighbor_broker.py
infobloxopen/infoblox_netmri
aa1c744df7e439dbe163bb9edd165e4e85a9771b
[ "Apache-2.0" ]
12
2016-02-19T12:37:54.000Z
2022-03-04T20:11:08.000Z
infoblox_netmri/api/broker/v3_8_0/switch_fwd_neighbor_broker.py
azinfoblox/infoblox-netmri
02372c5231e2677ab6299cb659a73c9a41b4b0f4
[ "Apache-2.0" ]
18
2015-11-12T18:37:00.000Z
2021-05-19T07:59:55.000Z
infoblox_netmri/api/broker/v3_8_0/switch_fwd_neighbor_broker.py
azinfoblox/infoblox-netmri
02372c5231e2677ab6299cb659a73c9a41b4b0f4
[ "Apache-2.0" ]
18
2016-01-07T12:04:34.000Z
2022-03-31T11:05:41.000Z
from ..broker import Broker class SwitchFwdNeighborBroker(Broker): controller = "switch_fwd_neighbors" def show(self, **kwargs): """Shows the details for the specified switch fwd neighbor. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param NeighborID: The internal NetMRI identifier for the neighbor relationship. This can be used to look up the Neighbor relationship which contains the source and destination device information. :type NeighborID: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param methods: A list of switch fwd neighbor methods. The listed methods will be called on each switch fwd neighbor returned and included in the output. Available methods are: network_name. :type methods: Array of String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return switch_fwd_neighbor: The switch fwd neighbor identified by the specified NeighborID. :rtype switch_fwd_neighbor: SwitchFwdNeighbor """ return self.api_request(self._get_method_fullname("show"), kwargs) def index(self, **kwargs): """Lists the available switch fwd neighbors. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient. **Inputs** | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param NeighborID: The internal NetMRI identifier for the neighbor relationship. This can be used to look up the Neighbor relationship which contains the source and destination device information. :type NeighborID: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param NeighborID: The internal NetMRI identifier for the neighbor relationship. This can be used to look up the Neighbor relationship which contains the source and destination device information. :type NeighborID: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborMAC: The Media Access Controller (MAC) address of the destination neighbor in this neighbor relationship. :type SwitchFwdNeighborMAC: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborMAC: The Media Access Controller (MAC) address of the destination neighbor in this neighbor relationship. :type SwitchFwdNeighborMAC: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results. :type DeviceGroupID: Array of Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param timestamp: The data returned will represent the switch fwd neighbors as of this date and time. If omitted, the result will indicate the most recently collected data. :type timestamp: DateTime | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param methods: A list of switch fwd neighbor methods. The listed methods will be called on each switch fwd neighbor returned and included in the output. Available methods are: network_name. :type methods: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` NeighborID :param sort: The data field(s) to use for sorting the output. Default is NeighborID. Valid values are DataSourceID, NeighborID, SwitchFwdNeighborFirstSeenTime, SwitchFwdNeighborStartTime, SwitchFwdNeighborEndTime, SwitchFwdNeighborChangedCols, SwitchFwdNeighborTimestamp, SwitchFwdNeighborMapSource, SwitchFwdNeighborType, SwitchFwdNeighborMAC, SwitchFwdNeighborIPDotted, SwitchFwdNeighborIPNumeric, SwitchFwdNeighborVlanID, SwitchFwdNeighborVlanIndex. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each SwitchFwdNeighbor. Valid values are DataSourceID, NeighborID, SwitchFwdNeighborFirstSeenTime, SwitchFwdNeighborStartTime, SwitchFwdNeighborEndTime, SwitchFwdNeighborChangedCols, SwitchFwdNeighborTimestamp, SwitchFwdNeighborMapSource, SwitchFwdNeighborType, SwitchFwdNeighborMAC, SwitchFwdNeighborIPDotted, SwitchFwdNeighborIPNumeric, SwitchFwdNeighborVlanID, SwitchFwdNeighborVlanIndex. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return switch_fwd_neighbors: An array of the SwitchFwdNeighbor objects that match the specified input criteria. :rtype switch_fwd_neighbors: Array of SwitchFwdNeighbor """ return self.api_list_request(self._get_method_fullname("index"), kwargs) def search(self, **kwargs): """Lists the available switch fwd neighbors matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below. **Inputs** | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. :type DataSourceID: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. :type DataSourceID: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param NeighborID: The internal NetMRI identifier for the neighbor relationship. This can be used to look up the Neighbor relationship which contains the source and destination device information. :type NeighborID: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param NeighborID: The internal NetMRI identifier for the neighbor relationship. This can be used to look up the Neighbor relationship which contains the source and destination device information. :type NeighborID: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborChangedCols: The fields that changed between this revision of the record and the previous revision. :type SwitchFwdNeighborChangedCols: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborChangedCols: The fields that changed between this revision of the record and the previous revision. :type SwitchFwdNeighborChangedCols: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborEndTime: The ending effective time of this revision of this record, or empty if still in effect. :type SwitchFwdNeighborEndTime: DateTime | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborEndTime: The ending effective time of this revision of this record, or empty if still in effect. :type SwitchFwdNeighborEndTime: Array of DateTime | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborFirstSeenTime: The date and time this switch forwarding neighbor was first seen on the network, and since which it has been continuously present. :type SwitchFwdNeighborFirstSeenTime: DateTime | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborFirstSeenTime: The date and time this switch forwarding neighbor was first seen on the network, and since which it has been continuously present. :type SwitchFwdNeighborFirstSeenTime: Array of DateTime | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborIPDotted: The IP address corresponding to the MAC found in the switch forwarding table, in dotted (or colon-delimited for IPv6) format. :type SwitchFwdNeighborIPDotted: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborIPDotted: The IP address corresponding to the MAC found in the switch forwarding table, in dotted (or colon-delimited for IPv6) format. :type SwitchFwdNeighborIPDotted: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborIPNumeric: The numerical value of the IP address corresponding to the MAC found in the switch forwarding table. :type SwitchFwdNeighborIPNumeric: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborIPNumeric: The numerical value of the IP address corresponding to the MAC found in the switch forwarding table. :type SwitchFwdNeighborIPNumeric: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborMAC: The Media Access Controller (MAC) address of the destination neighbor in this neighbor relationship. :type SwitchFwdNeighborMAC: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborMAC: The Media Access Controller (MAC) address of the destination neighbor in this neighbor relationship. :type SwitchFwdNeighborMAC: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborMapSource: Internal tracking information for NetMRI algorithms. :type SwitchFwdNeighborMapSource: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborMapSource: Internal tracking information for NetMRI algorithms. :type SwitchFwdNeighborMapSource: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborStartTime: The starting effective time of this revision of the record. :type SwitchFwdNeighborStartTime: DateTime | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborStartTime: The starting effective time of this revision of the record. :type SwitchFwdNeighborStartTime: Array of DateTime | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborTimestamp: The date and time this record was collected or calculated. :type SwitchFwdNeighborTimestamp: DateTime | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborTimestamp: The date and time this record was collected or calculated. :type SwitchFwdNeighborTimestamp: Array of DateTime | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborType: Internal tracking information for NetMRI algorithms. :type SwitchFwdNeighborType: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborType: Internal tracking information for NetMRI algorithms. :type SwitchFwdNeighborType: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborVlanID: The internal NetMRI identifier for the VLAN of this neighbor. :type SwitchFwdNeighborVlanID: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborVlanID: The internal NetMRI identifier for the VLAN of this neighbor. :type SwitchFwdNeighborVlanID: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param SwitchFwdNeighborVlanIndex: The numerical VLAN number (VLAN ID) of this neighbor. :type SwitchFwdNeighborVlanIndex: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SwitchFwdNeighborVlanIndex: The numerical VLAN number (VLAN ID) of this neighbor. :type SwitchFwdNeighborVlanIndex: Array of Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results. :type DeviceGroupID: Array of Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param timestamp: The data returned will represent the switch fwd neighbors as of this date and time. If omitted, the result will indicate the most recently collected data. :type timestamp: DateTime | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param methods: A list of switch fwd neighbor methods. The listed methods will be called on each switch fwd neighbor returned and included in the output. Available methods are: network_name. :type methods: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` NeighborID :param sort: The data field(s) to use for sorting the output. Default is NeighborID. Valid values are DataSourceID, NeighborID, SwitchFwdNeighborFirstSeenTime, SwitchFwdNeighborStartTime, SwitchFwdNeighborEndTime, SwitchFwdNeighborChangedCols, SwitchFwdNeighborTimestamp, SwitchFwdNeighborMapSource, SwitchFwdNeighborType, SwitchFwdNeighborMAC, SwitchFwdNeighborIPDotted, SwitchFwdNeighborIPNumeric, SwitchFwdNeighborVlanID, SwitchFwdNeighborVlanIndex. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each SwitchFwdNeighbor. Valid values are DataSourceID, NeighborID, SwitchFwdNeighborFirstSeenTime, SwitchFwdNeighborStartTime, SwitchFwdNeighborEndTime, SwitchFwdNeighborChangedCols, SwitchFwdNeighborTimestamp, SwitchFwdNeighborMapSource, SwitchFwdNeighborType, SwitchFwdNeighborMAC, SwitchFwdNeighborIPDotted, SwitchFwdNeighborIPNumeric, SwitchFwdNeighborVlanID, SwitchFwdNeighborVlanIndex. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param query: This value will be matched against switch fwd neighbors, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: DataSourceID, NeighborID, SwitchFwdNeighborChangedCols, SwitchFwdNeighborEndTime, SwitchFwdNeighborFirstSeenTime, SwitchFwdNeighborIPDotted, SwitchFwdNeighborIPNumeric, SwitchFwdNeighborMAC, SwitchFwdNeighborMapSource, SwitchFwdNeighborStartTime, SwitchFwdNeighborTimestamp, SwitchFwdNeighborType, SwitchFwdNeighborVlanID, SwitchFwdNeighborVlanIndex. :type query: String | ``api version min:`` 2.3 | ``api version max:`` None | ``required:`` False | ``default:`` None :param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering. :type xml_filter: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return switch_fwd_neighbors: An array of the SwitchFwdNeighbor objects that match the specified input criteria. :rtype switch_fwd_neighbors: Array of SwitchFwdNeighbor """ return self.api_list_request(self._get_method_fullname("search"), kwargs) def find(self, **kwargs): """Lists the available switch fwd neighbors matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: DataSourceID, NeighborID, SwitchFwdNeighborChangedCols, SwitchFwdNeighborEndTime, SwitchFwdNeighborFirstSeenTime, SwitchFwdNeighborIPDotted, SwitchFwdNeighborIPNumeric, SwitchFwdNeighborMAC, SwitchFwdNeighborMapSource, SwitchFwdNeighborStartTime, SwitchFwdNeighborTimestamp, SwitchFwdNeighborType, SwitchFwdNeighborVlanID, SwitchFwdNeighborVlanIndex. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_DataSourceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified. :type val_f_DataSourceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified. :type val_c_DataSourceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_NeighborID: The operator to apply to the field NeighborID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NeighborID: The internal NetMRI identifier for the neighbor relationship. This can be used to look up the Neighbor relationship which contains the source and destination device information. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_NeighborID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_NeighborID: If op_NeighborID is specified, the field named in this input will be compared to the value in NeighborID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NeighborID must be specified if op_NeighborID is specified. :type val_f_NeighborID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_NeighborID: If op_NeighborID is specified, this value will be compared to the value in NeighborID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NeighborID must be specified if op_NeighborID is specified. :type val_c_NeighborID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborChangedCols: The operator to apply to the field SwitchFwdNeighborChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborChangedCols: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborChangedCols: If op_SwitchFwdNeighborChangedCols is specified, the field named in this input will be compared to the value in SwitchFwdNeighborChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborChangedCols must be specified if op_SwitchFwdNeighborChangedCols is specified. :type val_f_SwitchFwdNeighborChangedCols: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborChangedCols: If op_SwitchFwdNeighborChangedCols is specified, this value will be compared to the value in SwitchFwdNeighborChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborChangedCols must be specified if op_SwitchFwdNeighborChangedCols is specified. :type val_c_SwitchFwdNeighborChangedCols: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborEndTime: The operator to apply to the field SwitchFwdNeighborEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborEndTime: The ending effective time of this revision of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborEndTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborEndTime: If op_SwitchFwdNeighborEndTime is specified, the field named in this input will be compared to the value in SwitchFwdNeighborEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborEndTime must be specified if op_SwitchFwdNeighborEndTime is specified. :type val_f_SwitchFwdNeighborEndTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborEndTime: If op_SwitchFwdNeighborEndTime is specified, this value will be compared to the value in SwitchFwdNeighborEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborEndTime must be specified if op_SwitchFwdNeighborEndTime is specified. :type val_c_SwitchFwdNeighborEndTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborFirstSeenTime: The operator to apply to the field SwitchFwdNeighborFirstSeenTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborFirstSeenTime: The date and time this switch forwarding neighbor was first seen on the network, and since which it has been continuously present. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborFirstSeenTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborFirstSeenTime: If op_SwitchFwdNeighborFirstSeenTime is specified, the field named in this input will be compared to the value in SwitchFwdNeighborFirstSeenTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborFirstSeenTime must be specified if op_SwitchFwdNeighborFirstSeenTime is specified. :type val_f_SwitchFwdNeighborFirstSeenTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborFirstSeenTime: If op_SwitchFwdNeighborFirstSeenTime is specified, this value will be compared to the value in SwitchFwdNeighborFirstSeenTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborFirstSeenTime must be specified if op_SwitchFwdNeighborFirstSeenTime is specified. :type val_c_SwitchFwdNeighborFirstSeenTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborIPDotted: The operator to apply to the field SwitchFwdNeighborIPDotted. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborIPDotted: The IP address corresponding to the MAC found in the switch forwarding table, in dotted (or colon-delimited for IPv6) format. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborIPDotted: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborIPDotted: If op_SwitchFwdNeighborIPDotted is specified, the field named in this input will be compared to the value in SwitchFwdNeighborIPDotted using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborIPDotted must be specified if op_SwitchFwdNeighborIPDotted is specified. :type val_f_SwitchFwdNeighborIPDotted: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborIPDotted: If op_SwitchFwdNeighborIPDotted is specified, this value will be compared to the value in SwitchFwdNeighborIPDotted using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborIPDotted must be specified if op_SwitchFwdNeighborIPDotted is specified. :type val_c_SwitchFwdNeighborIPDotted: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborIPNumeric: The operator to apply to the field SwitchFwdNeighborIPNumeric. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborIPNumeric: The numerical value of the IP address corresponding to the MAC found in the switch forwarding table. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborIPNumeric: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborIPNumeric: If op_SwitchFwdNeighborIPNumeric is specified, the field named in this input will be compared to the value in SwitchFwdNeighborIPNumeric using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborIPNumeric must be specified if op_SwitchFwdNeighborIPNumeric is specified. :type val_f_SwitchFwdNeighborIPNumeric: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborIPNumeric: If op_SwitchFwdNeighborIPNumeric is specified, this value will be compared to the value in SwitchFwdNeighborIPNumeric using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborIPNumeric must be specified if op_SwitchFwdNeighborIPNumeric is specified. :type val_c_SwitchFwdNeighborIPNumeric: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborMAC: The operator to apply to the field SwitchFwdNeighborMAC. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborMAC: The Media Access Controller (MAC) address of the destination neighbor in this neighbor relationship. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborMAC: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborMAC: If op_SwitchFwdNeighborMAC is specified, the field named in this input will be compared to the value in SwitchFwdNeighborMAC using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborMAC must be specified if op_SwitchFwdNeighborMAC is specified. :type val_f_SwitchFwdNeighborMAC: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborMAC: If op_SwitchFwdNeighborMAC is specified, this value will be compared to the value in SwitchFwdNeighborMAC using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborMAC must be specified if op_SwitchFwdNeighborMAC is specified. :type val_c_SwitchFwdNeighborMAC: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborMapSource: The operator to apply to the field SwitchFwdNeighborMapSource. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborMapSource: Internal tracking information for NetMRI algorithms. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborMapSource: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborMapSource: If op_SwitchFwdNeighborMapSource is specified, the field named in this input will be compared to the value in SwitchFwdNeighborMapSource using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborMapSource must be specified if op_SwitchFwdNeighborMapSource is specified. :type val_f_SwitchFwdNeighborMapSource: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborMapSource: If op_SwitchFwdNeighborMapSource is specified, this value will be compared to the value in SwitchFwdNeighborMapSource using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborMapSource must be specified if op_SwitchFwdNeighborMapSource is specified. :type val_c_SwitchFwdNeighborMapSource: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborStartTime: The operator to apply to the field SwitchFwdNeighborStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborStartTime: The starting effective time of this revision of the record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborStartTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborStartTime: If op_SwitchFwdNeighborStartTime is specified, the field named in this input will be compared to the value in SwitchFwdNeighborStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborStartTime must be specified if op_SwitchFwdNeighborStartTime is specified. :type val_f_SwitchFwdNeighborStartTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborStartTime: If op_SwitchFwdNeighborStartTime is specified, this value will be compared to the value in SwitchFwdNeighborStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborStartTime must be specified if op_SwitchFwdNeighborStartTime is specified. :type val_c_SwitchFwdNeighborStartTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborTimestamp: The operator to apply to the field SwitchFwdNeighborTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborTimestamp: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborTimestamp: If op_SwitchFwdNeighborTimestamp is specified, the field named in this input will be compared to the value in SwitchFwdNeighborTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborTimestamp must be specified if op_SwitchFwdNeighborTimestamp is specified. :type val_f_SwitchFwdNeighborTimestamp: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborTimestamp: If op_SwitchFwdNeighborTimestamp is specified, this value will be compared to the value in SwitchFwdNeighborTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborTimestamp must be specified if op_SwitchFwdNeighborTimestamp is specified. :type val_c_SwitchFwdNeighborTimestamp: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborType: The operator to apply to the field SwitchFwdNeighborType. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborType: Internal tracking information for NetMRI algorithms. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborType: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborType: If op_SwitchFwdNeighborType is specified, the field named in this input will be compared to the value in SwitchFwdNeighborType using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborType must be specified if op_SwitchFwdNeighborType is specified. :type val_f_SwitchFwdNeighborType: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborType: If op_SwitchFwdNeighborType is specified, this value will be compared to the value in SwitchFwdNeighborType using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborType must be specified if op_SwitchFwdNeighborType is specified. :type val_c_SwitchFwdNeighborType: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborVlanID: The operator to apply to the field SwitchFwdNeighborVlanID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborVlanID: The internal NetMRI identifier for the VLAN of this neighbor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborVlanID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborVlanID: If op_SwitchFwdNeighborVlanID is specified, the field named in this input will be compared to the value in SwitchFwdNeighborVlanID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborVlanID must be specified if op_SwitchFwdNeighborVlanID is specified. :type val_f_SwitchFwdNeighborVlanID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborVlanID: If op_SwitchFwdNeighborVlanID is specified, this value will be compared to the value in SwitchFwdNeighborVlanID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborVlanID must be specified if op_SwitchFwdNeighborVlanID is specified. :type val_c_SwitchFwdNeighborVlanID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SwitchFwdNeighborVlanIndex: The operator to apply to the field SwitchFwdNeighborVlanIndex. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SwitchFwdNeighborVlanIndex: The numerical VLAN number (VLAN ID) of this neighbor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SwitchFwdNeighborVlanIndex: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SwitchFwdNeighborVlanIndex: If op_SwitchFwdNeighborVlanIndex is specified, the field named in this input will be compared to the value in SwitchFwdNeighborVlanIndex using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SwitchFwdNeighborVlanIndex must be specified if op_SwitchFwdNeighborVlanIndex is specified. :type val_f_SwitchFwdNeighborVlanIndex: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SwitchFwdNeighborVlanIndex: If op_SwitchFwdNeighborVlanIndex is specified, this value will be compared to the value in SwitchFwdNeighborVlanIndex using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SwitchFwdNeighborVlanIndex must be specified if op_SwitchFwdNeighborVlanIndex is specified. :type val_c_SwitchFwdNeighborVlanIndex: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results. :type DeviceGroupID: Array of Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param timestamp: The data returned will represent the switch fwd neighbors as of this date and time. If omitted, the result will indicate the most recently collected data. :type timestamp: DateTime | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param methods: A list of switch fwd neighbor methods. The listed methods will be called on each switch fwd neighbor returned and included in the output. Available methods are: network_name. :type methods: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` NeighborID :param sort: The data field(s) to use for sorting the output. Default is NeighborID. Valid values are DataSourceID, NeighborID, SwitchFwdNeighborFirstSeenTime, SwitchFwdNeighborStartTime, SwitchFwdNeighborEndTime, SwitchFwdNeighborChangedCols, SwitchFwdNeighborTimestamp, SwitchFwdNeighborMapSource, SwitchFwdNeighborType, SwitchFwdNeighborMAC, SwitchFwdNeighborIPDotted, SwitchFwdNeighborIPNumeric, SwitchFwdNeighborVlanID, SwitchFwdNeighborVlanIndex. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each SwitchFwdNeighbor. Valid values are DataSourceID, NeighborID, SwitchFwdNeighborFirstSeenTime, SwitchFwdNeighborStartTime, SwitchFwdNeighborEndTime, SwitchFwdNeighborChangedCols, SwitchFwdNeighborTimestamp, SwitchFwdNeighborMapSource, SwitchFwdNeighborType, SwitchFwdNeighborMAC, SwitchFwdNeighborIPDotted, SwitchFwdNeighborIPNumeric, SwitchFwdNeighborVlanID, SwitchFwdNeighborVlanIndex. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String | ``api version min:`` 2.3 | ``api version max:`` None | ``required:`` False | ``default:`` None :param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering. :type xml_filter: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return switch_fwd_neighbors: An array of the SwitchFwdNeighbor objects that match the specified input criteria. :rtype switch_fwd_neighbors: Array of SwitchFwdNeighbor """ return self.api_list_request(self._get_method_fullname("find"), kwargs)
59.738994
768
0.64705
6,439
56,991
5.678522
0.047834
0.061809
0.040176
0.065638
0.94604
0.944508
0.90611
0.894103
0.892873
0.892873
0
0.004329
0.278553
56,991
953
769
59.801679
0.88496
0.843782
0
0
0
0
0.065657
0
0
0
0
0
0
1
0.363636
false
0
0.090909
0
1
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
9
cb0d6ae0a73c7d387c3958c5c2db1b1fe4f96e39
8,904
py
Python
tests/test_get_doc_params_numpystyle.py
janfreyberg/docargs
435c31a8a9af5ea897af740f0d7dfde703b43ff9
[ "MIT" ]
4
2018-11-26T11:54:05.000Z
2019-10-14T10:08:07.000Z
tests/test_get_doc_params_numpystyle.py
janfreyberg/docargs
435c31a8a9af5ea897af740f0d7dfde703b43ff9
[ "MIT" ]
2
2018-11-21T16:15:31.000Z
2018-11-25T19:17:51.000Z
tests/test_get_doc_params_numpystyle.py
janfreyberg/docargs
435c31a8a9af5ea897af740f0d7dfde703b43ff9
[ "MIT" ]
null
null
null
import ast from docargs.check import get_doc_params, get_signature_params, check SAMPLE_DOCCED_FN = ast.parse(""" def function_with_types_in_docstring(param1, param2): \"\"\"Example function with types documented in the docstring. `PEP 484`_ type annotations are supported. If attribute, parameter, and return types are annotated according to `PEP 484`_, they do not need to be included in the docstring: Parameters ---------- param1 : int The first parameter. param2 : str The second parameter. Returns ------- bool True if successful, False otherwise. .. _PEP 484: https://www.python.org/dev/peps/pep-0484/ \"\"\" """).body[0] def test_correctly_docced_fn(): _, over, under = next(check(SAMPLE_DOCCED_FN)) assert over == under == [] assert (get_doc_params(SAMPLE_DOCCED_FN) == get_signature_params(SAMPLE_DOCCED_FN)[0] == {"param1", "param2"}) SAMPLE_UNDOCCED_FN = ast.parse(""" def function_with_types_in_docstring(param1, param2): \"\"\"Example function with types documented in the docstring. `PEP 484`_ type annotations are supported. If attribute, parameter, and return types are annotated according to `PEP 484`_, they do not need to be included in the docstring: Returns ------- bool True if successful, False otherwise. .. _PEP 484: https://www.python.org/dev/peps/pep-0484/ \"\"\" """).body[0] def test_incorrectly_docced_fn(): _, under, over = next(check(SAMPLE_UNDOCCED_FN)) assert over == [] assert set(under) == {"param1", "param2"} assert get_doc_params(SAMPLE_UNDOCCED_FN) == set() assert ( get_signature_params(SAMPLE_UNDOCCED_FN)[0] == {"param1", "param2"} ) assert ( get_doc_params(SAMPLE_UNDOCCED_FN) != get_signature_params(SAMPLE_UNDOCCED_FN)[0] ) SAMPLE_OVERDOCCED_FN = ast.parse(""" def function_with_types_in_docstring(param1, param2): \"\"\"Example function with types documented in the docstring. `PEP 484`_ type annotations are supported. If attribute, parameter, and return types are annotated according to `PEP 484`_, they do not need to be included in the docstring: Parameters ---------- param1 : int The first parameter. param2 : str The second parameter. param3 : None An extra param Returns ------- bool True if successful, False otherwise. .. _PEP 484: https://www.python.org/dev/peps/pep-0484/ \"\"\" """).body[0] def test_over_docced_fn(): _, under, over = next(check(SAMPLE_OVERDOCCED_FN)) assert under == [] assert set(over) == {"param3"} DOCCED_CLASS = ast.parse(""" class ExampleClass(object): \"\"\"The summary line for a class docstring should fit on one line. If the class has public attributes, they may be documented here in an ``Attributes`` section and follow the same formatting as a function's ``Args`` section. Alternatively, attributes may be documented inline with the attribute's declaration (see __init__ method below). Properties created with the ``@property`` decorator should be documented in the property's getter method. Attributes ---------- attr1 : str Description of `attr1`. attr2 : :obj:`int`, optional Description of `attr2`. \"\"\" def __init__(self, param1, param2, param3): \"\"\"Example of docstring on the __init__ method. The __init__ method may be documented in either the class level docstring, or as a docstring on the __init__ method itself. Either form is acceptable, but the two should not be mixed. Choose one convention to document the __init__ method and be consistent with it. Parameters ---------- param1 : str Description of `param1`. param2 : :obj:`list` of :obj:`str` Description of `param2`. Multiple lines are supported. param3 : :obj:`int`, optional Description of `param3`. \"\"\" """).body[0] def test_cls_with_param_docs_in_init_docstring(): _, over, under = next(check(DOCCED_CLASS)) assert over == under == [] ALT_DOCCED_CLASS = ast.parse(""" class ExampleClass(object): \"\"\"The summary line for a class docstring should fit on one line. If the class has public attributes, they may be documented here in an ``Attributes`` section and follow the same formatting as a function's ``Args`` section. Alternatively, attributes may be documented inline with the attribute's declaration (see __init__ method below). Properties created with the ``@property`` decorator should be documented in the property's getter method. Parameters ---------- param1 : str Description of `param1`. param2 : :obj:`list` of :obj:`str` Description of `param2`. Multiple lines are supported. param3 : :obj:`int`, optional Description of `param3`. Attributes ---------- attr1 : str Description of `attr1`. attr2 : :obj:`int`, optional Description of `attr2`. \"\"\" def __init__(self, param1, param2, param3): \"\"\"Example of docstring on the __init__ method. The __init__ method may be documented in either the class level docstring, or as a docstring on the __init__ method itself. Either form is acceptable, but the two should not be mixed. Choose one convention to document the __init__ method and be consistent with it. \"\"\" """).body[0] def test_cls_with_param_docs_in_class_docstring(): _, over, under = next(check(ALT_DOCCED_CLASS)) assert over == under == [] UN_DOCCED_CLASS = ast.parse(""" class ExampleClass(object): \"\"\"The summary line for a class docstring should fit on one line. If the class has public attributes, they may be documented here in an ``Attributes`` section and follow the same formatting as a function's ``Args`` section. Alternatively, attributes may be documented inline with the attribute's declaration (see __init__ method below). Properties created with the ``@property`` decorator should be documented in the property's getter method. Parameters ---------- param1 : str Description of `param1`. param2 : :obj:`list` of :obj:`str` Description of `param2`. Multiple lines are supported. Attributes ---------- attr1 : str Description of `attr1`. attr2 : :obj:`int`, optional Description of `attr2`. \"\"\" def __init__(self, param1, param2, param3): \"\"\"Example of docstring on the __init__ method. The __init__ method may be documented in either the class level docstring, or as a docstring on the __init__ method itself. Either form is acceptable, but the two should not be mixed. Choose one convention to document the __init__ method and be consistent with it. \"\"\" """).body[0] def test_cls_one_param_missing(): _, under, over = next(check(UN_DOCCED_CLASS)) assert set(under) == {"param3"} assert set(over) == set() OVER_DOCCED_CLASS = ast.parse(""" class ExampleClass(object): \"\"\"The summary line for a class docstring should fit on one line. If the class has public attributes, they may be documented here in an ``Attributes`` section and follow the same formatting as a function's ``Args`` section. Alternatively, attributes may be documented inline with the attribute's declaration (see __init__ method below). Properties created with the ``@property`` decorator should be documented in the property's getter method. Parameters ---------- param1 : str Description of `param1`. param2 : :obj:`list` of :obj:`str` Description of `param2`. Multiple lines are supported. param3 : :obj:`int`, optional Description of `param3`. param4 : :obj:`int`, optional Description of `param3`. Attributes ---------- attr1 : str Description of `attr1`. attr2 : :obj:`int`, optional Description of `attr2`. \"\"\" def __init__(self, param1, param2, param3): \"\"\"Example of docstring on the __init__ method. The __init__ method may be documented in either the class level docstring, or as a docstring on the __init__ method itself. Either form is acceptable, but the two should not be mixed. Choose one convention to document the __init__ method and be consistent with it. \"\"\" """).body[0] def test_cls_one_param_extra(): _, under, over = next(check(UN_DOCCED_CLASS)) assert set(under) == {"param3"} assert set(over) == set()
29.193443
78
0.647462
1,122
8,904
4.951872
0.129234
0.035997
0.037437
0.035997
0.935205
0.907127
0.907127
0.883009
0.883009
0.858351
0
0.018025
0.246069
8,904
304
79
29.289474
0.809623
0
0
0.832599
0
0
0.803122
0.022911
0
0
0
0
0.066079
1
0.030837
false
0
0.008811
0
0.052863
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
cb928924135ce48ff53a6335a420c7e30d832fc6
189
py
Python
pydoge/__init__.py
ezeportela/pydoge
89ba8c4537cf22470caa51a698b9462f5522c079
[ "MIT" ]
null
null
null
pydoge/__init__.py
ezeportela/pydoge
89ba8c4537cf22470caa51a698b9462f5522c079
[ "MIT" ]
null
null
null
pydoge/__init__.py
ezeportela/pydoge
89ba8c4537cf22470caa51a698b9462f5522c079
[ "MIT" ]
null
null
null
from pydoge.config import * from pydoge.connection_dbcursor import * from pydoge.http_service import * from pydoge.logger import * from pydoge.timestamp import * from pydoge.utils import *
27
40
0.809524
26
189
5.807692
0.423077
0.397351
0.529801
0
0
0
0
0
0
0
0
0
0.126984
189
6
41
31.5
0.915152
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
cba7ab00a093ab6153a27510d8bf3abe2e8067b1
8,732
py
Python
third_party/libSBML-5.9.0-Source/src/bindings/python/test/annotation/TestAnnotationCopyAndClone.py
0u812/roadrunner
f464c2649e388fa1f5a015592b0b29b65cc84b4b
[ "Apache-2.0" ]
5
2015-04-16T14:27:38.000Z
2021-11-30T14:54:39.000Z
third_party/libSBML-5.9.0-Source/src/bindings/python/test/annotation/TestAnnotationCopyAndClone.py
0u812/roadrunner
f464c2649e388fa1f5a015592b0b29b65cc84b4b
[ "Apache-2.0" ]
95
2015-03-06T12:14:06.000Z
2015-03-20T11:15:54.000Z
third_party/libSBML-5.9.0-Source/src/bindings/python/test/annotation/TestAnnotationCopyAndClone.py
0u812/roadrunner
f464c2649e388fa1f5a015592b0b29b65cc84b4b
[ "Apache-2.0" ]
7
2016-05-29T08:12:59.000Z
2019-05-02T13:39:25.000Z
# # @file TestAnnotationCopyAndClone.py # @brief Test the copy and clone methods for annotation classes # # @author Akiya Jouraku (Python conversion) # @author Sarah Keating # # ====== WARNING ===== WARNING ===== WARNING ===== WARNING ===== WARNING ====== # # DO NOT EDIT THIS FILE. # # This file was generated automatically by converting the file located at # src/annotation/test/TestCopyAndClone.cpp # using the conversion program dev/utilities/translateTests/translateTests.pl. # Any changes made here will be lost the next time the file is regenerated. # # ----------------------------------------------------------------------------- # This file is part of libSBML. Please visit http://sbml.org for more # information about SBML, and the latest version of libSBML. # # Copyright 2005-2010 California Institute of Technology. # Copyright 2002-2005 California Institute of Technology and # Japan Science and Technology Corporation. # # This library is free software; you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation. A copy of the license agreement is provided # in the file named "LICENSE.txt" included with this software distribution # and also available online as http://sbml.org/software/libsbml/license.html # ----------------------------------------------------------------------------- import sys import unittest import libsbml class TestAnnotationCopyAndClone(unittest.TestCase): def test_CVTerm_assignmentOperator(self): CVTerm1 = libsbml.CVTerm(libsbml.BIOLOGICAL_QUALIFIER) CVTerm1.addResource("http://www.geneontology.org/#GO:0005892") self.assert_( CVTerm1.getQualifierType() == libsbml.BIOLOGICAL_QUALIFIER ) self.assert_( CVTerm1.getResources().getLength() == 1 ) self.assert_( CVTerm1.getResources().getValue(0) == "http://www.geneontology.org/#GO:0005892" ) CVTerm2 = libsbml.CVTerm() CVTerm2 = CVTerm1 self.assert_( CVTerm2.getQualifierType() == libsbml.BIOLOGICAL_QUALIFIER ) self.assert_( CVTerm2.getResources().getLength() == 1 ) self.assert_( CVTerm2.getResources().getValue(0) == "http://www.geneontology.org/#GO:0005892" ) CVTerm2 = None CVTerm1 = None pass def test_CVTerm_clone(self): CVTerm1 = libsbml.CVTerm(libsbml.BIOLOGICAL_QUALIFIER) CVTerm1.addResource("http://www.geneontology.org/#GO:0005892") self.assert_( CVTerm1.getQualifierType() == libsbml.BIOLOGICAL_QUALIFIER ) self.assert_( CVTerm1.getResources().getLength() == 1 ) self.assert_( CVTerm1.getResources().getValue(0) == "http://www.geneontology.org/#GO:0005892" ) CVTerm2 = CVTerm1.clone() self.assert_( CVTerm2.getQualifierType() == libsbml.BIOLOGICAL_QUALIFIER ) self.assert_( CVTerm2.getResources().getLength() == 1 ) self.assert_( CVTerm2.getResources().getValue(0) == "http://www.geneontology.org/#GO:0005892" ) CVTerm2 = None CVTerm1 = None pass def test_CVTerm_copyConstructor(self): CVTerm1 = libsbml.CVTerm(libsbml.BIOLOGICAL_QUALIFIER) CVTerm1.addResource("http://www.geneontology.org/#GO:0005892") self.assert_( CVTerm1.getQualifierType() == libsbml.BIOLOGICAL_QUALIFIER ) self.assert_( CVTerm1.getResources().getLength() == 1 ) self.assert_( CVTerm1.getResources().getValue(0) == "http://www.geneontology.org/#GO:0005892" ) CVTerm2 = libsbml.CVTerm(CVTerm1) self.assert_( CVTerm2.getQualifierType() == libsbml.BIOLOGICAL_QUALIFIER ) self.assert_( CVTerm2.getResources().getLength() == 1 ) self.assert_( CVTerm2.getResources().getValue(0) == "http://www.geneontology.org/#GO:0005892" ) CVTerm2 = None CVTerm1 = None pass def test_Date_assignmentOperator(self): date = libsbml.Date(2005,12,30,12,15,45,1,2,0) self.assert_( date.getMonth() == 12 ) self.assert_( date.getSecond() == 45 ) date2 = libsbml.Date() date2 = date self.assert_( date2.getMonth() == 12 ) self.assert_( date2.getSecond() == 45 ) date2 = None date = None pass def test_Date_clone(self): date = libsbml.Date(2005,12,30,12,15,45,1,2,0) self.assert_( date.getMonth() == 12 ) self.assert_( date.getSecond() == 45 ) date2 = date.clone() self.assert_( date2.getMonth() == 12 ) self.assert_( date2.getSecond() == 45 ) date2 = None date = None pass def test_Date_copyConstructor(self): date = libsbml.Date(2005,12,30,12,15,45,1,2,0) self.assert_( date.getMonth() == 12 ) self.assert_( date.getSecond() == 45 ) date2 = libsbml.Date(date) self.assert_( date2.getMonth() == 12 ) self.assert_( date2.getSecond() == 45 ) date2 = None date = None pass def test_ModelCreator_assignmentOperator(self): mc = libsbml.ModelCreator() mc.setFamilyName("Keating") mc.setEmail("sbml-team@caltech.edu") self.assert_( mc.getFamilyName() == "Keating" ) self.assert_( mc.getEmail() == "sbml-team@caltech.edu" ) mc2 = libsbml.ModelCreator() mc2 = mc self.assert_( mc2.getFamilyName() == "Keating" ) self.assert_( mc2.getEmail() == "sbml-team@caltech.edu" ) mc2 = None mc = None pass def test_ModelCreator_clone(self): mc = libsbml.ModelCreator() mc.setFamilyName("Keating") mc.setEmail("sbml-team@caltech.edu") self.assert_( mc.getFamilyName() == "Keating" ) self.assert_( mc.getEmail() == "sbml-team@caltech.edu" ) mc2 = mc.clone() self.assert_( mc2.getFamilyName() == "Keating" ) self.assert_( mc2.getEmail() == "sbml-team@caltech.edu" ) mc2 = None mc = None pass def test_ModelCreator_copyConstructor(self): mc = libsbml.ModelCreator() mc.setFamilyName("Keating") mc.setEmail("sbml-team@caltech.edu") self.assert_( mc.getFamilyName() == "Keating" ) self.assert_( mc.getEmail() == "sbml-team@caltech.edu" ) mc2 = libsbml.ModelCreator(mc) self.assert_( mc2.getFamilyName() == "Keating" ) self.assert_( mc2.getEmail() == "sbml-team@caltech.edu" ) mc2 = None mc = None pass def test_ModelHistory_assignmentOperator(self): mh = libsbml.ModelHistory() mc = libsbml.ModelCreator() mc.setGivenName("Sarah") mc.setFamilyName("Keating") mc.setEmail("sbml-team@caltech.edu") mh.addCreator(mc) mc = None date = libsbml.Date(2005,12,30,12,15,45,1,2,0) mh.setCreatedDate(date) date = None self.assert_( mh.getCreatedDate().getMonth() == 12 ) self.assert_( mh.getCreatedDate().getSecond() == 45 ) self.assert_( mh.getCreator(0).getFamilyName() == "Keating" ) mh2 = libsbml.ModelHistory() mh2 = mh self.assert_( mh2.getCreatedDate().getMonth() == 12 ) self.assert_( mh2.getCreatedDate().getSecond() == 45 ) self.assert_( mh2.getCreator(0).getFamilyName() == "Keating" ) mh2 = None mh = None pass def test_ModelHistory_clone(self): mh = libsbml.ModelHistory() mc = libsbml.ModelCreator() mc.setFamilyName("Keating") mc.setGivenName("Sarah") mc.setEmail("sbml-team@caltech.edu") mh.addCreator(mc) mc = None date = libsbml.Date(2005,12,30,12,15,45,1,2,0) mh.setCreatedDate(date) date = None self.assert_( mh.getCreatedDate().getMonth() == 12 ) self.assert_( mh.getCreatedDate().getSecond() == 45 ) self.assert_( mh.getCreator(0).getFamilyName() == "Keating" ) mh2 = mh.clone() self.assert_( mh2.getCreatedDate().getMonth() == 12 ) self.assert_( mh2.getCreatedDate().getSecond() == 45 ) self.assert_( mh2.getCreator(0).getFamilyName() == "Keating" ) mh2 = None mh = None pass def test_ModelHistory_copyConstructor(self): mh = libsbml.ModelHistory() mc = libsbml.ModelCreator() mc.setFamilyName("Keating") mc.setGivenName("Sarah") mc.setEmail("sbml-team@caltech.edu") mh.addCreator(mc) mc = None date = libsbml.Date(2005,12,30,12,15,45,1,2,0) mh.setCreatedDate(date) date = None self.assert_( mh.getCreatedDate().getMonth() == 12 ) self.assert_( mh.getCreatedDate().getSecond() == 45 ) self.assert_( mh.getCreator(0).getFamilyName() == "Keating" ) mh2 = libsbml.ModelHistory(mh) self.assert_( mh2.getCreatedDate().getMonth() == 12 ) self.assert_( mh2.getCreatedDate().getSecond() == 45 ) self.assert_( mh2.getCreator(0).getFamilyName() == "Keating" ) mh2 = None mh = None pass def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(TestAnnotationCopyAndClone)) return suite if __name__ == "__main__": if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() : sys.exit(0) else: sys.exit(1)
37
100
0.664224
1,017
8,732
5.603736
0.173058
0.105282
0.023162
0.042113
0.747149
0.742235
0.742235
0.742235
0.733813
0.729953
0
0.045809
0.177508
8,732
235
101
37.157447
0.747703
0.156322
0
0.778378
0
0
0.102522
0.034356
0
0
0
0
0.324324
1
0.07027
false
0.064865
0.016216
0
0.097297
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
1dab9d711970f28b020a1201db765c1c0de5acdf
500
py
Python
BOOK/PRACTICAL-BOOK/chapter-1/07-hollow-diamond-pattern.py
kabirsrivastava3/python-practice
f56a4a0764031d3723b0ba4cd1418a1a83b1e4f5
[ "MIT" ]
null
null
null
BOOK/PRACTICAL-BOOK/chapter-1/07-hollow-diamond-pattern.py
kabirsrivastava3/python-practice
f56a4a0764031d3723b0ba4cd1418a1a83b1e4f5
[ "MIT" ]
null
null
null
BOOK/PRACTICAL-BOOK/chapter-1/07-hollow-diamond-pattern.py
kabirsrivastava3/python-practice
f56a4a0764031d3723b0ba4cd1418a1a83b1e4f5
[ "MIT" ]
null
null
null
n = int(input("Enter the limit: ")) for row in range(n,0,-1): for column in range(row,0,-1): print("*",end=" ") for column in range(2*(n - row)): print(" ",end=" ") for column in range(row,0,-1): print("*",end=" ") print() for row in range(n): for column in range(row + 1): print("*",end=" ") for column in range(2*(n - row - 1)): print(" ",end=" ") for column in range(row + 1): print("*",end=" ") print()
21.73913
41
0.47
73
500
3.219178
0.219178
0.238298
0.280851
0.408511
0.855319
0.73617
0.73617
0.702128
0.46383
0.255319
0
0.03207
0.314
500
22
42
22.727273
0.653061
0
0
0.705882
0
0
0.058233
0
0
0
0
0
0
1
0
false
0
0
0
0
0.470588
0
0
0
null
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
9
38345493854bfd886edcb380eb3535ba6766f373
3,685
py
Python
SC101Lecture_code/SC101_week3/command_line.py
Jewel-Hong/SC-projects
9502b3f0c789a931226d4ce0200ccec56e47bc14
[ "MIT" ]
null
null
null
SC101Lecture_code/SC101_week3/command_line.py
Jewel-Hong/SC-projects
9502b3f0c789a931226d4ce0200ccec56e47bc14
[ "MIT" ]
null
null
null
SC101Lecture_code/SC101_week3/command_line.py
Jewel-Hong/SC-projects
9502b3f0c789a931226d4ce0200ccec56e47bc14
[ "MIT" ]
null
null
null
""" File: command_line.py Name: -------------------------- This file shows how to use command line to launch different functions. We will be introducing sys module and use it to do the job """ import sys def main(): if len(sys.argv) == 2: # 如果用run的需要加在parameter if sys.argv[1] == 'charmander': draw_charmander() else: print('Please enter a pokemon name.') def draw_charmander(): print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") print(" N777777777NO") print(" N7777777777777N") print(" $N877777777D77777M") print(" N M77777777ONND777M") print(" MN777777777NN D777") print(" N7ZN777777777NN ~M7778") print(" N777777777777MMNN88777N") print(" N7OONND7777777D77777N") print(" 8$M++++?N???$77777$") print(" DNNM$$$$777777N") print(" N$N:=N$777N7777M ") print(" 77Z::::N777777777 ") print(" 77N::::::N77777777M DZ ") print(" $777:::::::77777777MN DNZ ") print(" 7777M:::::M7777777$777M DZZZ") print(" M777$:::::N777777$M7777M NNZZZ$") print(" NN$::::::7777$$M777777N M8ZZZ") print(" N::::::N:7$O:77777777 $++IZZ") print(" INN===::::::=77777777777N I777N") print(" ?777N========N7777777777787M N777") print(" M$$$$$$$$M M$$$$$$$$N=N$$$$7777777$$$ND") print(" O77Z$$$$$$$ M$$$$$$$$MNI==$DNNNNM=~N") print(" 7 :N MNN$$$$M$ $$$777$8 8D8I") print(" NMM.:7O 777777778") print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") def draw_bulbasaur(): print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") print(" `;,;.;,;.;.'") print(" ..:;:;::;: ") print(" ..--''' '' ' ' '''--. ") print(" /' . .' '. .`\\") print(" | / / \\ '.|") print(" | | : : :|") print(" .'| | : : :|") print(" ,: /\\ \\.._\\ __..===..__/_../ /`.") print("|'' | :.| `' `' |.' ::.") print("| | ''| :''; | , `''\\") print("|.: \\/ | /'-.`' ':'.-'\\ | \\, |") print("| ' / / | / |... | \\ | | |';'|") print(" \\ _ |:.| |_\\_|`.' |_/_| |.:| _ |") print("/,.,.|' \\__ . . __/ '|.,.,\\") print(" | ':`.`----._____.---'.' |") print("l42 \\ `:\"\"\"-------'\"\"' | |") print(" ',-,-', .'-=,=,") print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") def draw_squirtle(): print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~') print(' ___ ') print(" _.-~~ ~~~-. ") print(" / _ ~. ") print(" |#` /#` \\ ") print(" |-'| |-'| | ") print(" /-- -- |-. ") print(" \\__ . . / /\\_ ") print(" \\ ~~--___---~~/\\| | ~-. ") print(".---`~~--____---_) \\ \\-__ \\ ") print(") < |__ __\\_ \\ \\ | ") print("~-.__ / ~~~~ \\ \\ \\ | ") print(" ~-. | .~-.-' | | ") print(" | \\___|___/ / / |") print(" | / | \\ | | / |") print(" \\ | ~-___ \\ \\/ / ") print(" /\\__ / `._ / ~-\\ \\_/ ") print(" / \\_____| |`~ ") print(" | | | | ") print(" \\ | | | ") print(" >______) /_/\\/\\_\\ ") print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~') if __name__ == '__main__': main()
36.85
68
0.303935
210
3,685
4.928571
0.480952
0.376812
0.507246
0.637681
0.188406
0.188406
0.188406
0.188406
0.188406
0.188406
0
0.11234
0.36228
3,685
99
69
37.222222
0.328085
0.056174
0
0.075949
0
0
0.719308
0.159366
0
0
0
0
0
1
0.050633
true
0
0.012658
0
0.063291
0.860759
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
7
38395a1883e581d1422f548a4cc7e8ab675158ee
40,230
py
Python
tests/st/ops/ascend/dynamic_shape/test_dynamic_all.py
tianjiashuo/akg
a9cbf642063fb1086a93e8bc6be6feb145689817
[ "Apache-2.0" ]
286
2020-06-23T06:40:44.000Z
2022-03-30T01:27:49.000Z
tests/st/ops/ascend/dynamic_shape/test_dynamic_all.py
tianjiashuo/akg
a9cbf642063fb1086a93e8bc6be6feb145689817
[ "Apache-2.0" ]
10
2020-07-31T03:26:59.000Z
2021-12-27T15:00:54.000Z
tests/st/ops/ascend/dynamic_shape/test_dynamic_all.py
tianjiashuo/akg
a9cbf642063fb1086a93e8bc6be6feb145689817
[ "Apache-2.0" ]
30
2020-07-17T01:04:14.000Z
2021-12-27T14:05:19.000Z
import sys from collections.abc import Iterable from tests.common import boot from tests.common.test_run.ascend.batchmatmul_run import batchmatmul_execute from tests.common.test_run import reduce_sum_run def run_case(*args, **kwargs): if len(sys.argv) >= 3: case_name = sys.argv[2] if len(args) < 2: return if not (isinstance(args[0], Iterable) and case_name in args[0]): if not (isinstance(args[1], Iterable) and case_name in args[1]): return boot.run(*args, **kwargs) def run_conv_case(*args, **kwargs): if len(sys.argv) >= 3: case_name = sys.argv[2] if len(args) < 2: return if not (isinstance(args[0], Iterable) and case_name in args[0]): if not (isinstance(args[1], Iterable) and case_name in args[1]): return boot.run_conv(*args, **kwargs) def test_dynamic_manual(): run_case("002_cast_test_case_1_23_dim_2", "cast_run", ((1, 23), "float16", "float32"), (((1, 0), (23, 0)),"dynamic")) run_case("relu_001_gx", "relu_run", ((1, 128), "float16", 1e-5), ((16, 0), (1, 0)),"dynamic") run_case("test_squeeze_16_1__1", "squeeze_run", [(16, 1), 1, "int32", "squeeze"], [(16, 16), (1, 1)],"dynamic") run_case("argmax_001", "argmax_run", ((3, 1020), "float32", 1),"dynamic") run_case("reshape_010", "reshape_run", [(32, 2048, 1, 1), (32, 2048), "float16"],"dynamic") run_case("001_equal_count", "equal_count_run", (((32,), (32,)), "int32", "equal_count"),"dynamic") run_case("softmax_01", "softmax_run", ((16, 1024), "float16", -1, "cce_softmax_fp16"),"dynamic") run_case("bias_add_fp16_002", "bias_add_run", ([32, 1001], "DefaultFormat", "float16"), [(1, 1), (1, 1)],"dynamic") run_case("001_sum", reduce_sum_run, ((2, 3, 5), (0, 1), False, "float32"),"dynamic") run_case("mean_01", "mean_run", ((8,), "float16", (0,), False, "cce_mean_1_64_fp16"),"dynamic") run_case("five2four_009", "five2four_run", ([32, 2048, 1, 1], "float16", 'NCHW', "float16"),"dynamic") run_case("four2five_016", "four2five_run", ([1, 1024, 14, 14], "float16", 'NCHW', 'float16'),"dynamic") run_case("resnet50_maxpool_fp16_c", "maxpool_with_argmax_run", ((32, 4, 112, 112, 16), (3, 3), (2, 2), (0, 1, 0, 1), True, "float16"),"dynamic") run_case("resnet50_Bn5dFp16Ref01", "fused_batch_norm_run", ((32, 4, 112, 112, 16), "float32", 0.99, 1e-5, True, "NC1HWC0", None, "resnet50_Bn5dFp16Ref01"),"dynamic") def test_dynamic_auto(): run_case("cast_test_case", "cast_run", ((1, 8192), "float16", "float32"),"dynamic") run_case("test_resnet50_relu_002", "relu_run", ((32, 128, 7, 7, 16), "float32", 1e-5), "dynamic") run_case("test_squeeze_16_1__1", "squeeze_run", [(16, 1), 1, "int32", "squeeze"],"dynamic") run_case("argmax_001", "argmax_run", ((3, 1020), "float32", 1),"dynamic") run_case("reshape_010", "reshape_run", [(32, 2048, 1, 1), (32, 2048), "float16"],"dynamic") run_case("001_equal_count", "equal_count_run", (((32,), (32,)), "int32", "equal_count"),"dynamic") run_case("softmax_01", "softmax_run", ((16, 1024), "float16", -1, "cce_softmax_fp16"),"dynamic") run_case("bias_add_fp16_002", "bias_add_run", ([32, 1001], "DefaultFormat", "float16"),"dynamic") run_case("001_sum", reduce_sum_run, ((2, 3, 5), (0, 1), False, "float32"),"dynamic") run_case("mean_01", "mean_run", ((8,), "float16", (0,), False, "cce_mean_1_64_fp16"),"dynamic") run_case("five2four_009", "five2four_run", ([32, 2048, 1, 1], "float16", 'NCHW', "float16"),"dynamic") run_case("four2five_016", "four2five_run", ([1, 1024, 14, 14], "float16", 'NCHW', 'float16'),"dynamic") run_case("resnet50_maxpool_fp16_c", "maxpool_run", ((32, 4, 112, 112, 16), (3, 3), (2, 2), (1, 1, 1, 1), True, "float16"),"dynamic") run_case("resnet50_Bn5dFp16Ref01", "fused_batch_norm_run", ((32, 4, 112, 112, 16), "float32", 0.99, 1e-5, True, "NC1HWC0", None, "resnet50_Bn5dFp16Ref01"),"dynamic") def test_static_shape(): run_case("002_cast_test_case_1_23_dim_2", "cast_run", ((1, 23), "float16", "float32")) run_case("relu_001_gx", "relu_run", ((1, 128), "float16", 1e-5)) run_case("test_squeeze_16_1__1", "squeeze_run", [(16, 1), 1, "int32", "squeeze"]) run_case("argmax_001", "argmax_run", ((3, 1020), "float32", 1),) run_case("reshape_010", "reshape_run", [(32, 2048, 1, 1), (32, 2048), "float16"],) run_case("001_equal_count", "equal_count_run", (((32,), (32,)), "int32", "equal_count")) run_case("softmax_01", "softmax_run", ((16, 1024), "float16", -1, "cce_softmax_fp16")) run_case("bias_add_fp16_002", "bias_add_run", ([32, 1001], "DefaultFormat", "float16")) run_case("001_sum", reduce_sum_run, ((2, 3, 5), (0, 1), False, "float32")) run_case("mean_01", "mean_run", ((8,), "float16", (0,), False, "cce_mean_1_64_fp16")) run_case("five2four_009", "five2four_run", ([32, 2048, 1, 1], "float16", 'NCHW', "float16")) run_case("four2five_016", "four2five_run", ([1, 1024, 14, 14], "float16", 'NCHW', 'float16')) run_case("resnet50_maxpool_fp16_c", "maxpool_run", ((32, 4, 112, 112, 16), (3, 3), (2, 2), (1, 1, 1, 1), True, "float16")) run_case("resnet50_Bn5dFp16Ref01", "fused_batch_norm_run", ((32, 4, 112, 112, 16), "float32", 0.99, 1e-5, True, "NC1HWC0", None, "resnet50_Bn5dFp16Ref01")) run_conv_case("conv_run006", "conv_run", ((1, 2048, 7, 7), (512, 2048, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)) def test_all_dynamic_conv(): run_conv_case("conv_run006", "conv_run", ((1, 2048, 7, 7), (512, 2048, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False) , [1, 16, 16, 16, 16, 7, 128], "dynamic") def test_partial_dynamic_conv(): run_conv_case("conv_run006", "conv_run", ((1, 2048, 7, 7), (512, 2048, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False) , [1, 16, 16, 16, 16, 7, 128], "partial_dynamic") def test_partial_dynamic_conv_perf(): run_conv_case("conv_run001", "conv_run", ((1, 1024, 14, 14), (2048, 1024, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [14, 2048, 64, 128, 128, 14, 64], "partial_dynamic", "bypassL1") run_conv_case("conv_run002", "conv_run", ((1, 1024, 14, 14), (256, 1024, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [14, 256, 208, 64, 128, 14, 64], "partial_dynamic") run_conv_case("conv_run003", "conv_run", ((1, 1024, 14, 14), (512, 1024, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [14, 512, 64, 32, 512, 14, 64], "partial_dynamic", "bypassL1") run_conv_case("conv_run004", "conv_run", ((1, 128, 28, 28), (128, 128, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [30, 128, 112, 32, 128, 30, 8], "partial_dynamic") run_conv_case("conv_run005", "conv_run", ((1, 128, 28, 28), (512, 128, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [28, 512, 784, 16, 32, 28, 8], "partial_dynamic") run_conv_case("conv_run006", "conv_run", ((1, 2048, 7, 7), (512, 2048, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [7, 512, 64, 32, 512, 7, 128], "partial_dynamic", "bypassL1") run_conv_case("conv_run007", "conv_run", ((1, 256, 14, 14), (1024, 256, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [7, 1024, 112, 32, 256, 14, 16], "partial_dynamic") run_conv_case("conv_run008", "conv_run", ((1, 256, 14, 14), (256, 256, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [16, 256, 208, 64, 128, 16, 16], "partial_dynamic", "bypassL1") run_conv_case("conv_run009", "conv_run", ((1, 256, 56, 56), (128, 256, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [7, 128, 252, 64, 128, 56, 16], "partial_dynamic") run_conv_case("conv_run010", "conv_run", ((1, 256, 56, 56), (64, 256, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [8, 64, 224, 16, 64, 56, 16], "partial_dynamic") run_conv_case("conv_run011", "conv_run", ((1, 3, 224, 224), (64, 3, 7, 7), (2, 3, 2, 3), (2, 2), (1, 1), False), [61, 64, 448, 16, 64, 230, 1], "partial_dynamic", "bypassL1"), run_conv_case("conv_run012", "conv_run", ((1, 512, 28, 28), (128, 512, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [14, 128, 448, 16, 64, 28, 32], "partial_dynamic") run_conv_case("conv_run013", "conv_run", ((1, 512, 28, 28), (256, 512, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [13, 256, 112, 64, 256, 28, 32], "partial_dynamic") run_conv_case("conv_run014", "conv_run", ((1, 512, 7, 7), (2048, 512, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [7, 2048, 64, 16, 512, 7, 32], "partial_dynamic", "bypassL1") run_conv_case("conv_run015", "conv_run", ((1, 512, 7, 7), (512, 512, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [9, 512, 49, 32, 512, 9, 32], "partial_dynamic", "bypassL1") run_conv_case("conv_run016", "conv_run", ((1, 64, 56, 56), (256, 64, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [56, 256, 784, 16, 32, 56, 4], "partial_dynamic", "bypassL1") run_conv_case("conv_run017", "conv_run", ((1, 64, 56, 56), (64, 64, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [56, 64, 784, 16, 32, 56, 4], "partial_dynamic") run_conv_case("conv_run018", "conv_run", ((1, 64, 56, 56), (64, 64, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [58, 64, 448, 16, 64, 58, 4], "partial_dynamic") run_conv_case("conv_run019", "conv_run", ((1, 256, 56, 56), (512, 256, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [7, 512, 196, 64, 256, 56, 16], "partial_dynamic") run_conv_case("conv_run020", "conv_run", ((1, 512, 28, 28), (1024, 512, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [13, 1024, 112, 32, 256, 28, 32], "partial_dynamic", "bypassL1") def test_partial_dynamic_conv_autotiling(): run_conv_case("conv_run006", "conv_run", ((1, 2048, 7, 7), (512, 2048, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False) ,"partial_dynamic") def test_static_conv(): run_conv_case("conv_run001", "conv_run", ((1, 1024, 14, 14), (2048, 1024, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False)), # run_conv_case("conv_run002", "conv_run", ((1, 1024, 14, 14), (256, 1024, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)), # run_conv_case("conv_run003", "conv_run", ((1, 1024, 14, 14), (512, 1024, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False)), # run_conv_case("conv_run004", "conv_run", ((1, 128, 28, 28), (128, 128, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False)), # run_conv_case("conv_run005", "conv_run", ((1, 128, 28, 28), (512, 128, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)), # run_conv_case("conv_run006", "conv_run", ((1, 2048, 7, 7), (512, 2048, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)), # run_conv_case("conv_run007", "conv_run", ((1, 256, 14, 14), (1024, 256, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)), # run_conv_case("conv_run008", "conv_run", ((1, 256, 14, 14), (256, 256, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False)), # run_conv_case("conv_run009", "conv_run", ((1, 256, 56, 56), (128, 256, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False)), # run_conv_case("conv_run010", "conv_run", ((1, 256, 56, 56), (64, 256, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)), # run_conv_case("conv_run011", "conv_run", ((1, 3, 224, 224), (64, 3, 7, 7), (2, 3, 2, 3), (2, 2), (1, 1), False)), # run_conv_case("conv_run012", "conv_run", ((1, 512, 28, 28), (128, 512, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)), # run_conv_case("conv_run013", "conv_run", ((1, 512, 28, 28), (256, 512, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False)), # run_conv_case("conv_run014", "conv_run", ((1, 512, 7, 7), (2048, 512, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)), # run_conv_case("conv_run015", "conv_run", ((1, 512, 7, 7), (512, 512, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False)), # run_conv_case("conv_run016", "conv_run", ((1, 64, 56, 56), (256, 64, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)), # run_conv_case("conv_run017", "conv_run", ((1, 64, 56, 56), (64, 64, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)), # run_conv_case("conv_run018", "conv_run", ((1, 64, 56, 56), (64, 64, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False)), # run_conv_case("conv_run019", "conv_run", ((1, 256, 56, 56), (512, 256, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False)), # run_conv_case("conv_run020", "conv_run", ((1, 512, 28, 28), (1024, 512, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False)), # run_conv_case("conv_run021", "conv_run", ((1, 256, 56, 56), ( 128, 256, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)),#1.5 # run_conv_case("conv_run022", "conv_run", ((1, 512, 28, 28), ( 256, 512, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)),#1.5 # run_conv_case("conv_run023", "conv_run", ((1,1024, 14, 14), ( 512, 1024, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)),#1.5 # run_conv_case("conv_run024", "conv_run", ((1, 128, 56, 56), ( 128, 128, 3, 3), (0, 1, 0, 1), (2, 2), (1, 1), False)),#1.5 # run_conv_case("conv_run025", "conv_run", ((1, 256, 28, 28), ( 256, 256, 3, 3), (0, 1, 0, 1), (2, 2), (1, 1), False)),#1.5 # run_conv_case("conv_run026", "conv_run", ((1, 512, 14, 14), ( 512, 512, 3, 3), (0, 1, 0, 1), (2, 2), (1, 1), False)),#1.5 def mini_ci_conv(): run_conv_case("conv_run006", "conv_run", ((1, 2048, 7, 7), (512, 2048, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False)) # resnet50 conv layer run_conv_case("conv_run001", "conv_run", ((1, 1024, 14, 14), (2048, 1024, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 14, 64], "partial_dynamic") run_conv_case("conv_run002", "conv_run", ((1, 1024, 14, 14), (256, 1024, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 14, 64], "partial_dynamic") run_conv_case("conv_run003", "conv_run", ((1, 1024, 14, 14), (512, 1024, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 14, 64], "partial_dynamic") run_conv_case("conv_run004", "conv_run", ((1, 128, 28, 28), (128, 128, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [3, 16, 16, 16, 16, 30, 8], "partial_dynamic"), run_conv_case("conv_run005", "conv_run", ((1, 128, 28, 28), (512, 128, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 28, 8], "partial_dynamic") run_conv_case("conv_run006", "conv_run", ((1, 2048, 7, 7), (512, 2048, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 7, 128], "partial_dynamic") run_conv_case("conv_run007", "conv_run", ((1, 256, 14, 14), (1024, 256, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 14, 16], "partial_dynamic") run_conv_case("conv_run008", "conv_run", ((1, 256, 14, 14), (256, 256, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [3, 16, 16, 16, 16, 16, 16], "partial_dynamic"), run_conv_case("conv_run009", "conv_run", ((1, 256, 56, 56), (128, 256, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 56, 16], "partial_dynamic") run_conv_case("conv_run010", "conv_run", ((1, 256, 56, 56), (64, 256, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 56, 16], "partial_dynamic") run_conv_case("conv_run011", "conv_run", ((1, 3, 224, 224), (64, 3, 7, 7), (2, 3, 2, 3), (2, 2), (1, 1), False), [13, 16, 16, 16, 16, 230, 1], "partial_dynamic"), run_conv_case("conv_run012", "conv_run", ((1, 512, 28, 28), (128, 512, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 28, 32], "partial_dynamic") run_conv_case("conv_run013", "conv_run", ((1, 512, 28, 28), (256, 512, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 28, 32], "partial_dynamic") run_conv_case("conv_run014", "conv_run", ((1, 512, 7, 7), (2048, 512, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 7, 32], "partial_dynamic") run_conv_case("conv_run015", "conv_run", ((1, 512, 7, 7), (512, 512, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [3, 16, 16, 16, 16, 9, 32], "partial_dynamic"), run_conv_case("conv_run016", "conv_run", ((1, 64, 56, 56), (256, 64, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 56, 4], "partial_dynamic") run_conv_case("conv_run017", "conv_run", ((1, 64, 56, 56), (64, 64, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 56, 4], "partial_dynamic") run_conv_case("conv_run018", "conv_run", ((1, 64, 56, 56), (64, 64, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [3, 16, 16, 16, 16, 58, 4], "partial_dynamic"), run_conv_case("conv_run019", "conv_run", ((1, 256, 56, 56), (512, 256, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 56, 16], "partial_dynamic") run_conv_case("conv_run020", "conv_run", ((1, 512, 28, 28), (1024, 512, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 28, 32], "partial_dynamic") run_conv_case("conv_run001", "conv_run", ((1, 1024, 14, 14), (2048, 1024, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 14, 64], "dynamic") run_conv_case("conv_run002", "conv_run", ((1, 1024, 14, 14), (256, 1024, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 14, 64], "dynamic") run_conv_case("conv_run003", "conv_run", ((1, 1024, 14, 14), (512, 1024, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 14, 64], "dynamic") run_conv_case("conv_run004", "conv_run", ((1, 128, 28, 28), (128, 128, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [3, 16, 16, 16, 16, 30, 8], "dynamic"), run_conv_case("conv_run005", "conv_run", ((1, 128, 28, 28), (512, 128, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 28, 8], "dynamic") run_conv_case("conv_run006", "conv_run", ((1, 2048, 7, 7), (512, 2048, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 7, 128], "dynamic") run_conv_case("conv_run007", "conv_run", ((1, 256, 14, 14), (1024, 256, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 14, 16], "dynamic") run_conv_case("conv_run008", "conv_run", ((1, 256, 14, 14), (256, 256, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [3, 16, 16, 16, 16, 16, 16], "dynamic"), run_conv_case("conv_run009", "conv_run", ((1, 256, 56, 56), (128, 256, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 56, 16], "dynamic") run_conv_case("conv_run010", "conv_run", ((1, 256, 56, 56), (64, 256, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 56, 16], "dynamic") run_conv_case("conv_run011", "conv_run", ((1, 3, 224, 224), (64, 3, 7, 7), (2, 3, 2, 3), (2, 2), (1, 1), False), [13, 16, 16, 16, 16, 230, 1], "dynamic"), run_conv_case("conv_run012", "conv_run", ((1, 512, 28, 28), (128, 512, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 28, 32], "dynamic") run_conv_case("conv_run013", "conv_run", ((1, 512, 28, 28), (256, 512, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 28, 32], "dynamic") run_conv_case("conv_run014", "conv_run", ((1, 512, 7, 7), (2048, 512, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 7, 32], "dynamic") run_conv_case("conv_run015", "conv_run", ((1, 512, 7, 7), (512, 512, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [3, 16, 16, 16, 16, 9, 32], "dynamic"), run_conv_case("conv_run016", "conv_run", ((1, 64, 56, 56), (256, 64, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 56, 4], "dynamic") run_conv_case("conv_run017", "conv_run", ((1, 64, 56, 56), (64, 64, 1, 1), (0, 0, 0, 0), (1, 1), (1, 1), False), [1, 16, 16, 16, 16, 56, 4], "dynamic") run_conv_case("conv_run018", "conv_run", ((1, 64, 56, 56), (64, 64, 3, 3), (1, 1, 1, 1), (1, 1), (1, 1), False), [3, 16, 16, 16, 16, 58, 4], "dynamic"), run_conv_case("conv_run019", "conv_run", ((1, 256, 56, 56), (512, 256, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 56, 16], "dynamic") run_conv_case("conv_run020", "conv_run", ((1, 512, 28, 28), (1024, 512, 1, 1), (0, 0, 0, 0), (2, 2), (1, 1), False), [1, 16, 16, 16, 16, 28, 32], "dynamic") def mini_ci(): # all these cases are passed in earlier version, please make sure they are still passed after your commit # DYNAMIC SHAPE # run_case("002_cast_test_case_1_23_dim_2", "cast_run", ((1, 23), "float16", "float32"),"dynamic") run_case("resnet50_maxpool_fp16_c", "maxpool_with_argmax_run", ((2, 2, 15, 15, 16), (3, 3), (2, 2), (0, 0, 0, 0), True, "float16"),"dynamic") run_case("relu_001_gx", "relu_run", ((1, 128), "float16", 1e-5),"dynamic") run_case("test_squeeze_16_1__1", "squeeze_run", [(16, 1), 1, "int32", "squeeze"],"dynamic") run_case("reshape_010", "reshape_run", [(32, 2048, 1, 1), (32, 2048), "float16"],"dynamic") mini_ci_conv() # # STATIC SHAPE # run_case("002_cast_test_case_1_23_dim_2", "cast_run", ((1, 23), "float16", "float32")) run_case("001_equal_count", "equal_count_run", (((32,), (32,)), "int32", "equal_count")) run_case("mean_01", "mean_run", ((8,), "float16", (0,), False, "cce_mean_1_64_fp16")) run_case("relu_001_gx", "relu_run", ((1, 128), "float16", 1e-5)) run_case("test_squeeze_16_1__1", "squeeze_run", [(16, 1), 1, "int32", "squeeze"]) run_case("reshape_010", "reshape_run", [(32, 2048, 1, 1), (32, 2048), "float16"],) run_case("001_equal_count", "equal_count_run", (((32,), (32,)), "int32", "equal_count")) run_case("softmax_01", "softmax_run", ((16, 1024), "float16", -1, "cce_softmax_fp16")) run_case("bias_add_fp16_002", "bias_add_run", ([32, 1001], "DefaultFormat", "float16")) # run_case("001_sum", reduce_sum_run, ((2, 3, 5), (0, 1), False, "float32")) # output nan run_case("mean_01", "mean_run", ((8,), "float16", (0,), False, "cce_mean_1_64_fp16")) run_case("five2four_009", "five2four_run", ([32, 2048, 1, 1], "float16", 'NCHW', "float16")) def test_dynamic_bn(): run_case("test_resnet50_bn_5D_reference000", "fused_batch_norm_run", ((32, 128, 7, 7, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference01"), "dynamic") run_case("test_resnet50_bn_5D_reference001", "fused_batch_norm_run", ((32, 16, 14, 14, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference01"), "dynamic") run_case("test_resnet50_bn_5D_reference002", "fused_batch_norm_run", ((32, 16, 56, 56, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference01"), "dynamic") run_case("test_resnet50_bn_5D_reference003", "fused_batch_norm_run", ((32, 32, 28, 28, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference01"), "dynamic") run_case("test_resnet50_bn_5D_reference004", "fused_batch_norm_run", ((32, 32, 7, 7, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference01"), "dynamic") run_case("test_resnet50_bn_5D_reference005", "fused_batch_norm_run", ((32, 4, 112, 112, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference01"), "dynamic") run_case("test_resnet50_bn_5D_reference006", "fused_batch_norm_run", ((32, 4, 56, 56, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference01"), "dynamic") run_case("test_resnet50_bn_5D_reference007", "fused_batch_norm_run", ((32, 64, 14, 14, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference01"), "dynamic") run_case("test_resnet50_bn_5D_reference008", "fused_batch_norm_run", ((32, 8, 28, 28, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference01"), "dynamic") run_case("test_resnet50_bn_5D_reference009", "fused_batch_norm_run", ((32, 8, 56, 56, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference010"), "dynamic") run_case("test_resnet50_bn_5D_reference010", "fused_batch_norm_run", ((32, 16, 28, 28, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference011"), "dynamic") run_case("test_resnet50_bn_5D_reference011", "fused_batch_norm_run", ((32, 32, 14, 14, 16), "float32", 0.1, 1e-4, False, "NC1HWC0", None, "resnet50_bn_5D_reference012"), "dynamic") def test_dynamic_matmul(): #run_case("test_resnet50_matmul_000", batchmatmul_execute, ((), 32, 10, 2048, (10,), "float32", False, True, "batchmatmul_output"), "dynamic") run_case("test_resnet50_matmul_001", batchmatmul_execute, ((), 2048, 10, 32, (), "float32", True, False, "batchmatmul_output"), "dynamic") run_case("test_resnet50_matmul_002", batchmatmul_execute, ((), 32, 2048, 10, (), "float32", False, False, "batchmatmul_output"), "dynamic") run_case("test_resnet50_matmul_003", batchmatmul_execute, ((), 2048, 1001, 32, (), "float32", True, False, "batchmatmul_output"), "dynamic") run_case("test_resnet50_matmul_004", batchmatmul_execute, ((), 32, 2048, 1001, (), "float32", False, False, "batchmatmul_output"), "dynamic") #run_case("test_resnet50_matmul_005", batchmatmul_execute, ((), 32, 1001, 2048, (1001,), "float32", False, True, "batchmatmul_output"), "dynamic") def test_dynamic_resnet50(): mini_ci_conv() test_dynamic_bn() test_dynamic_matmul() # mean run_case("test_resnet50_mean_000", "mean_run", ((32, 128, 7, 7, 16), "float32", (2, 3), True, "cce_mean"), "dynamic") run_case("test_resnet50_mean_001", "mean_run", ((32, 128, 7, 7, 16), "float16", (2, 3), True, "cce_mean"), "dynamic") # relu run_case("test_resnet50_relu_000", "relu_run", ((32, 128, 7, 7, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_001", "relu_run", ((32, 16, 14, 14, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_002", "relu_run", ((32, 16, 56, 56, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_003", "relu_run", ((32, 32, 28, 28, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_004", "relu_run", ((32, 32, 7, 7, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_005", "relu_run", ((32, 4, 112, 112, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_006", "relu_run", ((32, 4, 56, 56, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_007", "relu_run", ((32, 64, 14, 14, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_008", "relu_run", ((32, 8, 28, 28, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_009", "relu_run", ((32, 8, 56, 56, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_010", "relu_run", ((32, 16, 28, 28, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_011", "relu_run", ((32, 32, 14, 14, 16), "float32", 1e-5), "dynamic") run_case("test_resnet50_relu_012", "relu_run", ((32, 128, 7, 7, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_013", "relu_run", ((32, 16, 14, 14, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_014", "relu_run", ((32, 16, 56, 56, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_015", "relu_run", ((32, 32, 28, 28, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_016", "relu_run", ((32, 32, 7, 7, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_017", "relu_run", ((32, 4, 112, 112, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_018", "relu_run", ((32, 4, 56, 56, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_019", "relu_run", ((32, 64, 14, 14, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_020", "relu_run", ((32, 8, 28, 28, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_021", "relu_run", ((32, 8, 56, 56, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_022", "relu_run", ((32, 16, 28, 28, 16), "float16", 1e-5), "dynamic") run_case("test_resnet50_relu_023", "relu_run", ((32, 32, 14, 14, 16), "float16", 1e-5), "dynamic") # Add run_case("test_resnet50_add_000", "add_run", ([32, 128, 7, 7, 16], [32, 128, 7, 7, 16], "float32", "cce_add_fp32"), "dynamic") run_case("test_resnet50_add_001", "add_run", ([32, 16, 56, 56, 16], [32, 16, 56, 56, 16], "float32", "cce_add_fp32"), "dynamic") run_case("test_resnet50_add_002", "add_run", ([32, 32, 28, 28, 16], [32, 32, 28, 28, 16], "float32", "cce_add_fp32"), "dynamic") run_case("test_resnet50_add_003", "add_run", ([32, 64, 14, 14, 16], [32, 64, 14, 14, 16], "float32", "cce_add_fp32"), "dynamic") run_case("test_resnet50_add_004", "add_run", ([32, 128, 7, 7, 16], [32, 128, 7, 7, 16], "float16", "cce_add_fp16"), "dynamic") run_case("test_resnet50_add_005", "add_run", ([32, 16, 56, 56, 16], [32, 16, 56, 56, 16], "float16", "cce_add_fp16"), "dynamic") run_case("test_resnet50_add_006", "add_run", ([32, 32, 28, 28, 16], [32, 32, 28, 28, 16], "float16", "cce_add_fp16"), "dynamic") run_case("test_resnet50_add_007", "add_run", ([32, 64, 14, 14, 16], [32, 64, 14, 14, 16], "float16", "cce_add_fp16"), "dynamic") # bias_add run_case("test_resnet50_bias_add_000", "bias_add_run", ([32, 10], "DefaultFormat", "float32"), "dynamic") run_case("test_resnet50_bias_add_001", "bias_add_run", ([32, 1001], "DefaultFormat", "float32"), "dynamic") run_case("test_resnet50_bias_add_002", "bias_add_run", ([32, 10], "DefaultFormat", "float16"), "dynamic") run_case("test_resnet50_bias_add_003", "bias_add_run", ([32, 1001], "DefaultFormat", "float16"), "dynamic") # reshape run_case("test_resnet50_reshape_000", "reshape_run", [(32, 2048, 1, 1), (32, 2048), "float32"], "dynamic") run_case("test_resnet50_reshape_001", "reshape_run", [(32, 2048), (32, 2048, 1, 1), "float32"], "dynamic") run_case("test_resnet50_reshape_002", "reshape_run", [(32, 2048, 1, 1), (32, 2048), "float16"], "dynamic") run_case("test_resnet50_reshape_003", "reshape_run", [(32, 2048), (32, 2048, 1, 1), "float16"], "dynamic") # cast run_case("test_resnet50_cast_000", "cast_run", ((64, 128, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_001", "cast_run", ((32, 64, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_002", "cast_run", ((16, 32, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_003", "cast_run", ((4, 16, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_004", "cast_run", ((49, 4, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_005", "cast_run", ((32, 4, 112, 112, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_006", "cast_run", ((32, 4, 56, 56, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_007", "cast_run", ((32, 16, 56, 56, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_008", "cast_run", ((36, 4, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_009", "cast_run", ((4, 4, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_010", "cast_run", ((32, 4, 56, 56, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_011", "cast_run", ((16, 4, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_012", "cast_run", ((32, 16, 56, 56, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_013", "cast_run", ((32, 32, 28, 28, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_014", "cast_run", ((8, 32, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_015", "cast_run", ((72, 8, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_016", "cast_run", ((16, 8, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_017", "cast_run", ((32, 8, 56, 56, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_018", "cast_run", ((32, 8, 56, 56, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_019", "cast_run", ((32, 8, 28, 28, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_020", "cast_run", ((32, 8, 28, 28, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_021", "cast_run", ((32, 8, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_022", "cast_run", ((32, 32, 28, 28, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_023", "cast_run", ((32, 64, 14, 14, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_024", "cast_run", ((16, 64, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_025", "cast_run", ((144, 16, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_026", "cast_run", ((32, 16, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_027", "cast_run", ((32, 16, 28, 28, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_028", "cast_run", ((32, 16, 28, 28, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_029", "cast_run", ((32, 16, 14, 14, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_030", "cast_run", ((32, 16, 14, 14, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_031", "cast_run", ((64, 16, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_032", "cast_run", ((32, 64, 14, 14, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_033", "cast_run", ((32, 128, 7, 7, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_034", "cast_run", ((32, 128, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_035", "cast_run", ((288, 32, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_036", "cast_run", ((64, 32, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_037", "cast_run", ((32, 32, 14, 14, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_038", "cast_run", ((32, 32, 14, 14, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_039", "cast_run", ((32, 32, 7, 7, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_040", "cast_run", ((32, 32, 7, 7, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_041", "cast_run", ((128, 32, 16, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_042", "cast_run", ((32, 128, 7, 7, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_043", "cast_run", ((32, 4, 112, 112, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_044", "cast_run", ((32, 128, 1, 1, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_045", "cast_run", ((32, 2048, 1, 1), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_048", "cast_run", ((64, 128, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_049", "cast_run", ((32, 64, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_050", "cast_run", ((16, 32, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_051", "cast_run", ((4, 16, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_052", "cast_run", ((49, 4, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_053", "cast_run", ((36, 4, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_054", "cast_run", ((4, 4, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_055", "cast_run", ((16, 4, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_056", "cast_run", ((8, 32, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_057", "cast_run", ((72, 8, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_058", "cast_run", ((16, 8, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_059", "cast_run", ((32, 8, 56, 56, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_060", "cast_run", ((32, 8, 56, 56, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_061", "cast_run", ((32, 8, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_062", "cast_run", ((16, 64, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_063", "cast_run", ((144, 16, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_064", "cast_run", ((32, 16, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_065", "cast_run", ((32, 16, 28, 28, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_066", "cast_run", ((32, 16, 28, 28, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_067", "cast_run", ((64, 16, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_068", "cast_run", ((32, 128, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_069", "cast_run", ((288, 32, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_070", "cast_run", ((64, 32, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_071", "cast_run", ((32, 32, 14, 14, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_072", "cast_run", ((32, 32, 14, 14, 16), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_073", "cast_run", ((128, 32, 16, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_074", "cast_run", ((32, 2048, 1, 1), "float32", "float16"), "dynamic") run_case("test_resnet50_cast_075", "cast_run", ((32, 128, 1, 1, 16), "float16", "float32"), "dynamic") run_case("test_resnet50_cast_080", "cast_run", ((64, 128, 16, 16), "bool", "int32"), "dynamic") # four2five run_case("test_resnet50_four2five_000", "four2five_run", ([32, 3, 224, 224], "float32", "NCHW", "float16"), "dynamic") run_case("test_resnet50_four2five_001", "four2five_run", ([32, 2048, 7, 7], "float32", "NCHW", "float16"), "dynamic") run_case("test_resnet50_four2five_002", "four2five_run", ([32, 224, 224, 3], "float32", 'NHWC', "float16"), "dynamic") run_case("test_resnet50_four2five_003", "four2five_run", ([32, 3, 224, 224], "float16", "NCHW", "float16"), "dynamic") run_case("test_resnet50_four2five_004", "four2five_run", ([32, 2048, 7, 7], "float16", "NCHW", "float16"), "dynamic") run_case("test_resnet50_four2five_005", "four2five_run", ([32, 224, 224, 3], "float16", 'NHWC', "float16"), "dynamic") # five2four run_case("test_resnet50_five2four_000", "five2four_run", ([32, 2048, 1, 1], "float16", "NCHW", "float16"), "dynamic") run_case("test_resnet50_five2four_001", "five2four_run", ([32, 2048, 1, 1], "float32", "NCHW", "float16"), "dynamic") # softmax run_case("test_resnet50_softmax_001", "softmax_run", ((32, 10), "float16", -1, "softmax_16"), "dynamic") run_case("test_resnet50_softmax_002", "softmax_run", ((32, 10), "float32", -1, "softmax_32"), "dynamic") run_case("test_resnet50_softmax_003", "softmax_run", ((32, 1001), "float16", -1, "softmax_16"), "dynamic") run_case("test_resnet50_softmax_004", "softmax_run", ((32, 1001), "float32", -1, "softmax_32"), "dynamic") # argmax run_case("test_resnet50_argmax_001", "argmax_run", ((32, 10), "float16", -1), "dynamic") run_case("test_resnet50_argmax_002", "argmax_run", ((32, 10), "float32", -1), "dynamic") run_case("test_resnet50_argmax_003", "argmax_run", ((32, 1001), "float16", -1), "dynamic") run_case("test_resnet50_argmax_004", "argmax_run", ((32, 1001), "float32", -1), "dynamic") # EqualCount run_case("test_resnet50_equal_count_001", "equal_count_run", (((32,), (32,)), "int32", "equal_count"), "dynamic") def main(argv): if argv: if argv[0] == "m": test_dynamic_manual() elif argv[0] == "a": test_dynamic_auto() elif argv[0] == "s": test_static_shape() elif argv[0] == "ac": test_all_dynamic_conv() elif argv[0] == "c": test_partial_dynamic_conv() elif argv[0] == "cp": test_partial_dynamic_conv_perf() elif argv[0] == "ca": test_partial_dynamic_conv_autotiling() elif argv[0] == "sc": test_static_conv() elif argv[0] == "ci": mini_ci() elif argv[0] == "cic": mini_ci_conv() elif argv[0] == "r": test_dynamic_resnet50() elif argv[0] == "mat": test_dynamic_matmul() else: test_dynamic_manual() test_dynamic_auto() mini_ci_conv() if __name__ == "__main__": main(sys.argv[1:])
89.599109
187
0.596918
6,625
40,230
3.370566
0.041358
0.034214
0.025526
0.130184
0.902956
0.872459
0.826691
0.781236
0.763681
0.737125
0
0.210288
0.15921
40,230
448
188
89.799107
0.449874
0.088367
0
0.198324
0
0
0.356772
0.115729
0
0
0
0
0
1
0.044693
false
0.02514
0.013966
0
0.069832
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
69df9604b5be562e342715921a8d71bfad16be74
3,162
py
Python
questionnaires/migrations/0019_auto_20211004_1113.py
ChrisMarsh82/iogt
8141421a79b73bd038880a3be92fa6809adced13
[ "BSD-2-Clause" ]
20
2021-04-29T12:36:25.000Z
2022-03-27T12:17:41.000Z
questionnaires/migrations/0019_auto_20211004_1113.py
ChrisMarsh82/iogt
8141421a79b73bd038880a3be92fa6809adced13
[ "BSD-2-Clause" ]
892
2021-02-02T13:56:06.000Z
2022-03-31T11:25:44.000Z
questionnaires/migrations/0019_auto_20211004_1113.py
ChrisMarsh82/iogt
8141421a79b73bd038880a3be92fa6809adced13
[ "BSD-2-Clause" ]
28
2021-02-19T19:28:37.000Z
2022-03-11T11:46:00.000Z
# Generated by Django 3.1.13 on 2021-10-04 11:13 from django.db import migrations, models import wagtail.core.blocks import wagtail.core.fields class Migration(migrations.Migration): dependencies = [ ('questionnaires', '0018_auto_20210929_1126'), ] operations = [ migrations.AddField( model_name='poll', name='index_page_description', field=models.TextField(blank=True, null=True), ), migrations.AddField( model_name='poll', name='index_page_description_line_2', field=models.TextField(blank=True, null=True), ), migrations.AddField( model_name='poll', name='randomise_options', field=models.BooleanField(default=False, help_text='Randomising the options allows the options to be shown in a different order each time the page is displayed.'), ), migrations.AddField( model_name='poll', name='terms_and_conditions', field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('page_button', wagtail.core.blocks.StructBlock([('page', wagtail.core.blocks.PageChooserBlock()), ('text', wagtail.core.blocks.CharBlock(max_length=255, required=False))]))], blank=True, null=True), ), migrations.AddField( model_name='pollformfield', name='admin_label', field=models.CharField(help_text='Column header used during CSV export of poll responses.', max_length=256, null=True, verbose_name='admin_label'), ), migrations.AddField( model_name='quiz', name='index_page_description', field=models.TextField(blank=True, null=True), ), migrations.AddField( model_name='quiz', name='index_page_description_line_2', field=models.TextField(blank=True, null=True), ), migrations.AddField( model_name='quiz', name='terms_and_conditions', field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('page_button', wagtail.core.blocks.StructBlock([('page', wagtail.core.blocks.PageChooserBlock()), ('text', wagtail.core.blocks.CharBlock(max_length=255, required=False))]))], blank=True, null=True), ), migrations.AddField( model_name='survey', name='index_page_description', field=models.TextField(blank=True, null=True), ), migrations.AddField( model_name='survey', name='index_page_description_line_2', field=models.TextField(blank=True, null=True), ), migrations.AddField( model_name='survey', name='terms_and_conditions', field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('page_button', wagtail.core.blocks.StructBlock([('page', wagtail.core.blocks.PageChooserBlock()), ('text', wagtail.core.blocks.CharBlock(max_length=255, required=False))]))], blank=True, null=True), ), ]
44.535211
302
0.631246
336
3,162
5.78869
0.279762
0.096144
0.113625
0.152699
0.746015
0.746015
0.730077
0.730077
0.730077
0.698201
0
0.01947
0.236559
3,162
70
303
45.171429
0.786247
0.014548
0
0.78125
1
0
0.191073
0.056519
0
0
0
0
0
1
0
false
0
0.046875
0
0.09375
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
387e2fd5c795211513ed82458a36b5f922eed3c9
39,772
py
Python
risk_managed_api/tests/test_rest_filtering.py
jteppinette/water-dragon-api
f916e7ec4f342741b727de2b1b285fb28770226d
[ "BSD-2-Clause" ]
null
null
null
risk_managed_api/tests/test_rest_filtering.py
jteppinette/water-dragon-api
f916e7ec4f342741b727de2b1b285fb28770226d
[ "BSD-2-Clause" ]
8
2020-02-11T21:51:21.000Z
2021-06-09T17:20:09.000Z
risk_managed_api/tests/test_rest_filtering.py
jteppinette/risk-managed-api
f916e7ec4f342741b727de2b1b285fb28770226d
[ "BSD-2-Clause" ]
null
null
null
import json from django.contrib.auth import get_user_model from rest_framework import status from rest_framework.test import APITestCase from risk_managed_api.models import Administrator, Event, Host, Invitee, Nationals, Procedure def create_user(username, password): return get_user_model().objects.create_user( username=username, email=username + "@" + username + ".com", password=password ) class RestFilteringTests(APITestCase): def test_filter_organization(self): admin = create_user("josh", "josh") admin.is_superuser = True admin.save() self.client.login(username="josh", password="josh") # Create `Organizations` self.client.post("/organizations/", {"name": "Kappa Sigma"}) self.client.post("/organizations/", {"name": "Sigma Alpha Epsilon"}) self.client.post("/organizations/", {"name": "Sigma Nu"}) self.client.post("/organizations/", {"name": "Theta Xi"}) response = self.client.post("/organizations/", {"name": "Kappa Alpha"}) kappa_alpha = json.loads(response.content) # Check all objects created response = self.client.get("/organizations/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 5) # Assert icontains filtering response = self.client.get("/organizations/", {"name": "APP"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) # Assert exact filtering response = self.client.get("/organizations/", {"id": str(kappa_alpha["id"])}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) def test_filter_university(self): admin = create_user("josh", "josh") admin.is_superuser = True admin.save() self.client.login(username="josh", password="josh") # Create `Universities` self.client.post( "/universities/", { "name": "Southern Polytechnic State University", "acronym": "SPSU", "state": "Georgia", }, ) self.client.post( "/universities/", {"name": "Georgia Insitute of Technology", "acronym": "GT", "state": "Georgia"}, ) self.client.post( "/universities/", {"name": "University of Georgia", "acronym": "UGA", "state": "Georgia"}, ) self.client.post( "/universities/", {"name": "University of West Georgia", "acronym": "UWG", "state": "Georgia"}, ) self.client.post( "/universities/", {"name": "University of Alabama", "acronym": "UA", "state": "Alabama"} ) response = self.client.post( "/universities/", {"name": "University of North Georgia", "acronym": "UNG", "state": "Georgia"}, ) ung = json.loads(response.content) # Check all objects created response = self.client.get("/universities/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 6) # Assert icontains filtering response = self.client.get("/universities/", {"name": "georgia"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 4) # Assert iexact state filtering response = self.client.get("/universities/", {"state": "alabama"}) content = json.loads(response.content)["results"] self.assertEquals(content[0]["acronym"], "UA") # Assert iexact acronym filtering response = self.client.get("/universities/", {"acronym": "spsu"}) content = json.loads(response.content)["results"] self.assertEquals(content[0]["name"], "Southern Polytechnic State University") # Assert exact filtering response = self.client.get("/universities/", {"id": str(ung["id"])}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) def test_filter_host(self): admin = create_user("josh", "josh") admin.is_superuser = True admin.save() self.client.login(username="josh", password="josh") # Create `Organizations` response = self.client.post("/organizations/", {"name": "Kappa Sigma"}) ks = json.loads(response.content) response = self.client.post("/organizations/", {"name": "Sigma Alpha Epsilon"}) sae = json.loads(response.content) # Create `Universities` response = self.client.post( "/universities/", { "name": "Southern Polytechnic State University", "acronym": "SPSU", "state": "Georgia", }, ) spsu = json.loads(response.content) response = self.client.post( "/universities/", {"name": "Georgia Insitute of Technology", "acronym": "GT", "state": "Georgia"}, ) gt = json.loads(response.content) # Create `Administrators` response = self.client.post( "/administrators/", {"username": "spsu", "password": "spsu", "university": str(spsu["id"])}, ) spsu_admin = json.loads(response.content) response = self.client.post( "/administrators/", {"username": "gt", "password": "gt", "university": str(gt["id"])} ) gt_admin = json.loads(response.content) # Create `Nationals` response = self.client.post( "/nationals/", {"username": "ks", "password": "ks", "organization": str(ks["id"])} ) ks_nat = json.loads(response.content) response = self.client.post( "/nationals/", {"username": "sae", "password": "sae", "organization": str(sae["id"])} ) sae_nat = json.loads(response.content) # Create `Hosts` response = self.client.post( "/hosts/", { "username": "ksspsu", "password": "ksspsu", "organization": str(ks["id"]), "university": str(spsu["id"]), "administrator": str(spsu_admin["id"]), "nationals": str(ks_nat["id"]), }, ) json.loads(response.content) response = self.client.post( "/hosts/", { "username": "saespsu", "password": "saespsu", "organization": str(sae["id"]), "university": str(spsu["id"]), "administrator": str(spsu_admin["id"]), "nationals": str(sae_nat["id"]), }, ) json.loads(response.content) response = self.client.post( "/hosts/", { "username": "ksgt", "password": "ksgt", "organization": str(ks["id"]), "university": str(gt["id"]), "administrator": str(gt_admin["id"]), "nationals": str(ks_nat["id"]), }, ) json.loads(response.content) response = self.client.post( "/hosts/", { "username": "saegt", "password": "saegt", "organization": str(sae["id"]), "university": str(gt["id"]), "administrator": str(gt_admin["id"]), "nationals": str(sae_nat["id"]), }, ) json.loads(response.content) self.client.login(username="ks", password="ks") # Assert `Hosts` exist response = self.client.get("/hosts/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) # Assert filter `University` by `id` response = self.client.get("/hosts/", {"university": str(spsu["id"])}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) self.client.login(username="spsu", password="spsu") # Assert `Hosts` exist response = self.client.get("/hosts/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) # Assert filter `Organization` by `id` response = self.client.get("/hosts/", {"organization": str(ks["id"])}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) def test_filter_addresses(self): # Create `User` create_user("ksspsu", "ksspsu") # Add `CarbonCopyAddresses` self.client.login(username="ksspsu", password="ksspsu") response = self.client.post("/carboncopyaddresses/", {"email": "josheppinette@gmail.com"}) email_one = json.loads(response.content) response = self.client.post("/carboncopyaddresses/", {"email": "bob@bob.com"}) json.loads(response.content) # Assert `CarbonCopyAddresses` exist response = self.client.get("/carboncopyaddresses/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) # Filter by `email` response = self.client.get("/carboncopyaddresses/", {"email": "bob@bob.com"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) # Filter by `id` response = self.client.get("/carboncopyaddresses/", {"id": str(email_one["id"])}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) def test_filter_identities(self): admin = create_user("josh", "josh") admin.is_superuser = True admin.save() self.client.login(username="josh", password="josh") # Create `Organizations` response = self.client.post("/organizations/", {"name": "Kappa Sigma"}) ks = json.loads(response.content) response = self.client.post("/organizations/", {"name": "Sigma Alpha Epsilon"}) json.loads(response.content) # Create `Universities` response = self.client.post( "/universities/", { "name": "Southern Polytechnic State University", "acronym": "SPSU", "state": "Georgia", }, ) spsu = json.loads(response.content) response = self.client.post( "/universities/", {"name": "Georgia Insitute of Technology", "acronym": "GT", "state": "Georgia"}, ) json.loads(response.content) # Create `Hosts` response = self.client.post( "/hosts/", { "username": "ksspsu", "password": "ksspsu", "organization": str(ks["id"]), "university": str(spsu["id"]), }, ) ksspsu = json.loads(response.content) # Enable the `Host` Host.objects.filter(id=ksspsu["id"]).update(enabled=True) # Create `Identities` self.client.login(username="ksspsu", password="ksspsu") response = self.client.post( "/identities/", {"first_name": "Josh", "last_name": "Eppinette", "gender": "Male", "dob": "1994-05-12"}, ) json.loads(response.content) self.assertEquals(response.status_code, status.HTTP_201_CREATED) response = self.client.post( "/identities/", { "first_name": "Olivia", "last_name": "Eppinette", "gender": "Female", "dob": "1998-10-02", }, ) json.loads(response.content) self.assertEquals(response.status_code, status.HTTP_201_CREATED) # Test basic filtering response = self.client.get("/identities/", {"first_name": "os"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/identities/", {"last_name": "PP"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) response = self.client.get("/identities/", {"dob": "1994-05-12"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/identities/", {"gender": "Male"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/identities/", {"dob": "1994-05-12"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) # Test custom filtering response = self.client.get("/identities/", {"search": "os"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/identities/", {"search": "PP Male"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) self.assertIn("Josh", str(content)) response = self.client.get("/identities/", {"search": "PP Female"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) self.assertIn("Olivia", str(content)) response = self.client.get("/identities/", {"search": "josh eppinette Male"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/identities/", {"search": "epp Male jo"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) def test_filter_flags(self): admin = create_user("josh", "josh") admin.is_superuser = True admin.save() self.client.login(username="josh", password="josh") # Create `Organizations` response = self.client.post("/organizations/", {"name": "Kappa Sigma"}) ks = json.loads(response.content) response = self.client.post("/organizations/", {"name": "Sigma Alpha Epsilon"}) json.loads(response.content) # Create `Universities` response = self.client.post( "/universities/", { "name": "Southern Polytechnic State University", "acronym": "SPSU", "state": "Georgia", }, ) spsu = json.loads(response.content) response = self.client.post( "/universities/", {"name": "Georgia Insitute of Technology", "acronym": "GT", "state": "Georgia"}, ) json.loads(response.content) # Create `Hosts` response = self.client.post( "/hosts/", { "username": "ksspsu", "password": "ksspsu", "organization": str(ks["id"]), "university": str(spsu["id"]), }, ) ksspsu = json.loads(response.content) # Create `Nationals` response = self.client.post( "/nationals/", {"username": "ks", "password": "ks", "organization": str(ks["id"])} ) ks_nat = json.loads(response.content) # Create `Administrator` response = self.client.post( "/administrators/", {"username": "spsu", "password": "spsu", "university": str(spsu["id"])}, ) spsu_admin = json.loads(response.content) # Enable the user profiles Host.objects.filter(id=ksspsu["id"]).update(enabled=True) Nationals.objects.all().update(enabled=True) Administrator.objects.all().update(enabled=True) # Create `Identities` self.client.login(username="ksspsu", password="ksspsu") response = self.client.post( "/identities/", {"first_name": "Josh", "last_name": "Eppinette", "gender": "Male", "dob": "1994-05-12"}, ) josh = json.loads(response.content) self.assertEquals(response.status_code, status.HTTP_201_CREATED) response = self.client.post( "/identities/", { "first_name": "Olivia", "last_name": "Eppinette", "gender": "Female", "dob": "1998-10-02", }, ) olivia = json.loads(response.content) self.assertEquals(response.status_code, status.HTTP_201_CREATED) response = self.client.post( "/identities/", {"first_name": "Bobby", "last_name": "Brown", "gender": "Male", "dob": "1993-10-02"}, ) bobby = json.loads(response.content) self.assertEquals(response.status_code, status.HTTP_201_CREATED) response = self.client.post( "/identities/", {"first_name": "joe", "last_name": "brooks", "gender": "Male", "dob": "1991-10-02"}, ) joe = json.loads(response.content) self.assertEquals(response.status_code, status.HTTP_201_CREATED) # Create `Flags` self.client.login(username="ksspsu", password="ksspsu") response = self.client.post("/flags/", {"identity": josh["id"], "violation": "Stealing"}) json.loads(response.content) self.assertEquals(response.status_code, status.HTTP_201_CREATED) response = self.client.post( "/flags/", {"identity": olivia["id"], "violation": "Underage Drinking"} ) json.loads(response.content) self.assertEquals(response.status_code, status.HTTP_201_CREATED) self.client.login(username="ks", password="ks") response = self.client.post("/flags/", {"identity": bobby["id"], "violation": "Stealing"}) json.loads(response.content) self.assertEquals(response.status_code, status.HTTP_201_CREATED) self.client.login(username="spsu", password="spsu") response = self.client.post("/flags/", {"identity": joe["id"], "violation": "Stealing"}) json.loads(response.content) self.assertEquals(response.status_code, status.HTTP_201_CREATED) # Filter `Flags` self.client.login(username="ksspsu", password="ksspsu") response = self.client.get("/flags/", {"first_name": "josh"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/flags/", {"gender": "Female"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/flags/", {"last_name": "epp"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) response = self.client.get("/flags/", {"dob": "1994-05-12"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/flags/", {"nationals": ks_nat["id"]}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/flags/", {"administrator": spsu_admin["id"]}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/flags/", {"host": ksspsu["id"]}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) # Complex Filtering response = self.client.get("/flags/", {"search": "Male josh pp"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/flags/", {"search": "pp Male"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/flags/", {"search": "Female olivia"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/flags/", {"search": "Female"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get( "/flags/", {"search": "Male joe oo", "administrator": spsu_admin["id"]} ) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) def test_filter_events(self): admin = create_user("josh", "josh") admin.is_superuser = True admin.save() self.client.login(username="josh", password="josh") # Create `Organizations` response = self.client.post("/organizations/", {"name": "Kappa Sigma"}) ks = json.loads(response.content) response = self.client.post("/organizations/", {"name": "Sigma Alpha Epsilon"}) json.loads(response.content) # Create `Universities` response = self.client.post( "/universities/", { "name": "Southern Polytechnic State University", "acronym": "SPSU", "state": "Georgia", }, ) spsu = json.loads(response.content) response = self.client.post( "/universities/", {"name": "Georgia Insitute of Technology", "acronym": "GT", "state": "Georgia"}, ) json.loads(response.content) # Create `Hosts` response = self.client.post( "/hosts/", { "username": "ksspsu", "password": "ksspsu", "organization": str(ks["id"]), "university": str(spsu["id"]), }, ) ksspsu = json.loads(response.content) # Enable the `Host` Host.objects.filter(id=ksspsu["id"]).update(enabled=True) # `Event` default data data = { "name": "New Event", "description": "My event.", "date": "2014-07-21", "time": "00:00:00.000000", "location": "Chapter House", "planner_name": "Joshua Taylor Eppinette", "planner_mobile": "7704016678", "planner_email": "josh.eppinette@waterdragon.net", "president_email": "pres@pres.com", "sober_monitors": "Josh Eppinette", "expected_guest_count": 50, "exclusivity": "Invitation Only", "alcohol_distribution": "Mobile Cocktails", "entry": "Yes", "entry_description": "Front Door", "co_sponsored_description": "With Kappa Delta", } # Create `Events` self.client.login(username="ksspsu", password="ksspsu") response = self.client.post("/events/", data) self.assertEquals(response.status_code, status.HTTP_201_CREATED) event_one = json.loads(response.content) data["name"] = "New Event 2" response = self.client.post("/events/", data) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) # Test `Filter` self.client.login(username="ks", password="ks") response = self.client.get("/events/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) response = self.client.get("/events/", {"name": "2"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get("/events/", {"id": event_one["id"]}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) def test_filter_procedures(self): admin = create_user("josh", "josh") admin.is_superuser = True admin.save() self.client.login(username="josh", password="josh") # Create `Organizations` response = self.client.post("/organizations/", {"name": "Kappa Sigma"}) ks = json.loads(response.content) response = self.client.post("/organizations/", {"name": "Sigma Alpha Epsilon"}) sae = json.loads(response.content) # Create `Universities` response = self.client.post( "/universities/", { "name": "Southern Polytechnic State University", "acronym": "SPSU", "state": "Georgia", }, ) spsu = json.loads(response.content) response = self.client.post( "/universities/", {"name": "Georgia Insitute of Technology", "acronym": "GT", "state": "Georgia"}, ) gt = json.loads(response.content) # Create `Nationals` response = self.client.post( "/nationals/", {"username": "ks", "password": "ks", "organization": str(ks["id"])} ) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) response = self.client.post( "/nationals/", {"username": "sae", "password": "sae", "organization": str(sae["id"])} ) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) # Create `Administrators` response = self.client.post( "/administrators/", {"username": "spsu", "password": "spsu", "university": str(spsu["id"])}, ) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) response = self.client.post( "/administrators/", {"username": "gt", "password": "gt", "university": str(gt["id"])} ) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) # Create `Hosts` response = self.client.post( "/hosts/", { "username": "ksspsu", "password": "ksspsu", "organization": str(ks["id"]), "university": str(spsu["id"]), }, ) ksspsu = json.loads(response.content) response = self.client.post( "/hosts/", { "username": "saespsu", "password": "saespsu", "organization": str(sae["id"]), "university": str(spsu["id"]), }, ) saespsu = json.loads(response.content) response = self.client.post( "/hosts/", { "username": "ksgt", "password": "ksgt", "organization": str(ks["id"]), "university": str(gt["id"]), }, ) ksgt = json.loads(response.content) # Enable the `Nationals`, `Administrators`, `Hosts` Nationals.objects.all().update(enabled=True) Administrator.objects.all().update(enabled=True) Host.objects.all().update(enabled=True) # `Event` default data data = { "name": "New Event", "description": "My event.", "date": "2014-07-21", "time": "00:00:00.000000", "location": "Chapter House", "planner_name": "Joshua Taylor Eppinette", "planner_mobile": "7704016678", "planner_email": "josh.eppinette@waterdragon.net", "president_email": "pres@pres.com", "sober_monitors": "Josh Eppinette", "expected_guest_count": 50, "exclusivity": "Invitation Only", "alcohol_distribution": "Mobile Cocktails", "entry": "Yes", "entry_description": "Front Door", "co_sponsored_description": "With Kappa Delta", } # Create `Events` # KSSPSU self.client.login(username="ksspsu", password="ksspsu") response = self.client.post("/events/", data) self.assertEquals(response.status_code, status.HTTP_201_CREATED) ksspsu_event = json.loads(response.content) # SAESPSU self.client.login(username="saespsu", password="saespsu") response = self.client.post("/events/", data) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) # KSGT self.client.login(username="ksgt", password="ksgt") response = self.client.post("/events/", data) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) # Test `Events` # `Nationals` self.client.login(username="ks", password="ks") response = self.client.get("/events/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) # `Administrator` self.client.login(username="gt", password="gt") response = self.client.get("/events/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) # `Host` self.client.login(username="ksspsu", password="ksspsu") response = self.client.get("/events/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) # Create `Procedures` event = Event.objects.get(host__id=ksspsu["id"]) Procedure.objects.create(description="Interior Sweep", event=event) event = Event.objects.get(host__id=saespsu["id"]) Procedure.objects.create(description="Interior Sweep", event=event) event = Event.objects.get(host__id=ksgt["id"]) Procedure.objects.create(description="Interior Sweep", event=event) # Test `Procedure` filtering # `Nationals` self.client.login(username="ks", password="ks") response = self.client.get("/procedures/", {"host": ksspsu["id"]}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) # `Administrator` self.client.login(username="spsu", password="spsu") response = self.client.get("/procedures/", {"host": ksspsu["id"]}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) # `Host` self.client.login(username="ksspsu", password="ksspsu") response = self.client.get("/procedures/", {"event": 10}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 0) self.client.login(username="ksspsu", password="ksspsu") response = self.client.get("/procedures/", {"event": ksspsu_event["id"]}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) def test_filter_invitees(self): admin = create_user("josh", "josh") admin.is_superuser = True admin.save() self.client.login(username="josh", password="josh") # Create `Organizations` response = self.client.post("/organizations/", {"name": "Kappa Sigma"}) ks = json.loads(response.content) response = self.client.post("/organizations/", {"name": "Sigma Alpha Epsilon"}) sae = json.loads(response.content) # Create `Universities` response = self.client.post( "/universities/", { "name": "Southern Polytechnic State University", "acronym": "SPSU", "state": "Georgia", }, ) spsu = json.loads(response.content) response = self.client.post( "/universities/", {"name": "Georgia Insitute of Technology", "acronym": "GT", "state": "Georgia"}, ) gt = json.loads(response.content) # Create `Nationals` response = self.client.post( "/nationals/", {"username": "ks", "password": "ks", "organization": str(ks["id"])} ) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) response = self.client.post( "/nationals/", {"username": "sae", "password": "sae", "organization": str(sae["id"])} ) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) # Create `Administrators` response = self.client.post( "/administrators/", {"username": "spsu", "password": "spsu", "university": str(spsu["id"])}, ) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) response = self.client.post( "/administrators/", {"username": "gt", "password": "gt", "university": str(gt["id"])} ) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) # Create `Hosts` response = self.client.post( "/hosts/", { "username": "ksspsu", "password": "ksspsu", "organization": str(ks["id"]), "university": str(spsu["id"]), }, ) ksspsu = json.loads(response.content) response = self.client.post( "/hosts/", { "username": "saespsu", "password": "saespsu", "organization": str(sae["id"]), "university": str(spsu["id"]), }, ) saespsu = json.loads(response.content) response = self.client.post( "/hosts/", { "username": "ksgt", "password": "ksgt", "organization": str(ks["id"]), "university": str(gt["id"]), }, ) ksgt = json.loads(response.content) # Enable the `Nationals`, `Administrators`, `Hosts` Nationals.objects.all().update(enabled=True) Administrator.objects.all().update(enabled=True) Host.objects.all().update(enabled=True) # `Event` default data data = { "name": "New Event", "description": "My event.", "date": "2014-07-21", "time": "00:00:00.000000", "location": "Chapter House", "planner_name": "Joshua Taylor Eppinette", "planner_mobile": "7704016678", "planner_email": "josh.eppinette@waterdragon.net", "president_email": "pres@pres.com", "sober_monitors": "Josh Eppinette", "expected_guest_count": 50, "exclusivity": "Invitation Only", "alcohol_distribution": "Mobile Cocktails", "entry": "Yes", "entry_description": "Front Door", "co_sponsored_description": "With Kappa Delta", } # Create `Events` # KSSPSU self.client.login(username="ksspsu", password="ksspsu") response = self.client.post("/events/", data) self.assertEquals(response.status_code, status.HTTP_201_CREATED) ksspsu_event = json.loads(response.content) # SAESPSU self.client.login(username="saespsu", password="saespsu") response = self.client.post("/events/", data) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) # KSGT self.client.login(username="ksgt", password="ksgt") response = self.client.post("/events/", data) self.assertEquals(response.status_code, status.HTTP_201_CREATED) json.loads(response.content) # Test `Events` # `Nationals` self.client.login(username="ks", password="ks") response = self.client.get("/events/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) # `Administrator` self.client.login(username="gt", password="gt") response = self.client.get("/events/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) # `Host` self.client.login(username="ksspsu", password="ksspsu") response = self.client.get("/events/") content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) # Create `Procedures` event = Event.objects.get(host__id=ksspsu["id"]) Invitee.objects.create(first_name="Josh", last_name="Eppinette", gender="Male", event=event) Invitee.objects.create(first_name="Bobby", last_name="Brown", gender="Male", event=event) Invitee.objects.create( first_name="Olivia", last_name="Eppinette", gender="Female", event=event ) event = Event.objects.get(host__id=saespsu["id"]) Invitee.objects.create(first_name="Josh", last_name="Smith", gender="Male", event=event) event = Event.objects.get(host__id=ksgt["id"]) Invitee.objects.create(first_name="Josh", last_name="Brown", gender="Male", event=event) # Test `Invitee` basic filtering # `Nationals` self.client.login(username="ks", password="ks") response = self.client.get("/invitees/", {"host": ksspsu["id"]}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 3) self.assertIn("Eppinette", str(content)) # `Administrator` self.client.login(username="spsu", password="spsu") response = self.client.get("/invitees/", {"host": ksspsu["id"]}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 3) self.assertIn("Eppinette", str(content)) # `Host` self.client.login(username="ksspsu", password="ksspsu") response = self.client.get("/invitees/", {"event": 10}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 0) self.client.login(username="ksspsu", password="ksspsu") response = self.client.get("/invitees/", {"event": ksspsu_event["id"]}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 3) self.assertIn("Eppinette", str(content)) # `Invitee` complex filtering # `Host` self.client.login(username="ksspsu", password="ksspsu") response = self.client.get( "/invitees/", {"event": ksspsu_event["id"], "first_name": "Josh"} ) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get( "/invitees/", {"event": ksspsu_event["id"], "last_name": "eppinette"} ) content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) response = self.client.get("/invitees/", {"event": ksspsu_event["id"], "gender": "Male"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) # Test `name` custom filter response = self.client.get("/invitees/", {"event": ksspsu_event["id"], "name": "epp"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 2) response = self.client.get("/invitees/", {"event": ksspsu_event["id"], "name": "josh"}) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1) response = self.client.get( "/invitees/", {"event": ksspsu_event["id"], "name": "eppinette j"} ) content = json.loads(response.content)["results"] self.assertEquals(len(content), 1)
36.656221
100
0.564291
3,876
39,772
5.731166
0.054696
0.08193
0.10777
0.143693
0.939543
0.926623
0.907131
0.885072
0.872558
0.851355
0
0.010838
0.276174
39,772
1,084
101
36.690037
0.760803
0.045132
0
0.733333
0
0
0.201029
0.007655
0
0
0
0
0.114465
1
0.012579
false
0.086792
0.006289
0.001258
0.021384
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
388216df55b10418204ccd2527a3a690c69f3126
227
py
Python
MoleculeACE/GNN/models/__init__.py
molML/MoleculeACE
e831d2371a9b89f4853a03d5c04cc4bf59f64ee0
[ "MIT" ]
9
2022-03-26T17:36:03.000Z
2022-03-29T19:50:26.000Z
MoleculeACE/GNN/models/__init__.py
molML/MoleculeACE
e831d2371a9b89f4853a03d5c04cc4bf59f64ee0
[ "MIT" ]
null
null
null
MoleculeACE/GNN/models/__init__.py
molML/MoleculeACE
e831d2371a9b89f4853a03d5c04cc4bf59f64ee0
[ "MIT" ]
null
null
null
from MoleculeACE.GNN.models.train_hyperopt import train_model_with_hyperparameters_optimization from MoleculeACE.GNN.models.model import init_model from MoleculeACE.GNN.models.utils import get_bond_feat_size, get_atom_feat_size
75.666667
95
0.903084
34
227
5.676471
0.529412
0.233161
0.279793
0.373057
0
0
0
0
0
0
0
0
0.052863
227
3
96
75.666667
0.897674
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
2a33d696207f1f25adf6e4b575f00960f57312a2
3,298
py
Python
flexget/tests/test_failed_api.py
sillygreen89/Flexget
60f24ab0dda7b94c87ba43451921c50c3cef391f
[ "MIT" ]
1
2018-05-02T21:14:50.000Z
2018-05-02T21:14:50.000Z
flexget/tests/test_failed_api.py
sillygreen89/Flexget
60f24ab0dda7b94c87ba43451921c50c3cef391f
[ "MIT" ]
null
null
null
flexget/tests/test_failed_api.py
sillygreen89/Flexget
60f24ab0dda7b94c87ba43451921c50c3cef391f
[ "MIT" ]
null
null
null
from __future__ import unicode_literals, division, absolute_import from builtins import * # pylint: disable=unused-import, redefined-builtin from flexget.manager import Session from flexget.plugins.filter.retry_failed import FailedEntry from flexget.utils import json class TestRetryFailedAPI(object): config = "{'tasks': {}}" def test_retry_failed_get_all(self, api_client): rsp = api_client.get('/failed/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code assert json.loads(rsp.get_data(as_text=True))['number_of_failed_entries'] == 0 with Session() as session: failed_entry = FailedEntry(title='Failed title', url='http://123.com', reason='Test reason') session.add(failed_entry) session.commit() rsp = api_client.get('/failed/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code assert json.loads(rsp.get_data(as_text=True))['number_of_failed_entries'] == 1 def test_retry_failed_delete_all(self, api_client): rsp = api_client.get('/failed/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code assert json.loads(rsp.get_data(as_text=True))['number_of_failed_entries'] == 0 with Session() as session: failed_entry = FailedEntry(title='Failed title', url='http://123.com', reason='Test reason') session.add(failed_entry) session.commit() rsp = api_client.get('/failed/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code assert json.loads(rsp.get_data(as_text=True))['number_of_failed_entries'] == 1 rsp = api_client.delete('/failed/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code rsp = api_client.get('/failed/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code assert json.loads(rsp.get_data(as_text=True))['number_of_failed_entries'] == 0 def test_retry_failed_get_by_id(self, api_client): rsp = api_client.get('/failed/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code with Session() as session: failed_entry = FailedEntry(title='Failed title', url='http://123.com', reason='Test reason') session.add(failed_entry) session.commit() rsp = api_client.get('/failed/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code def test_retry_failed_delete_by_id(self, api_client): rsp = api_client.get('/failed/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code with Session() as session: failed_entry = FailedEntry(title='Failed title', url='http://123.com', reason='Test reason') session.add(failed_entry) session.commit() rsp = api_client.get('/failed/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code rsp = api_client.delete('/failed/1/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code rsp = api_client.get('/failed/1/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code
41.225
104
0.651304
450
3,298
4.555556
0.148889
0.105366
0.152195
0.11122
0.872683
0.817073
0.817073
0.817073
0.817073
0.817073
0
0.022922
0.219527
3,298
79
105
41.746835
0.773504
0.014554
0
0.767857
0
0
0.189963
0.036946
0
0
0
0
0.303571
1
0.071429
false
0
0.089286
0
0.196429
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
2a64ff0ee0696cc756d10934cfc7f8bfef217971
28,790
py
Python
forms/migrations/0047_auto_20200820_0609.py
CodeForAfrica/gmmp
d7ffe2dac16bd57e81bb3555ddea9df1fe7e9ebf
[ "Apache-2.0" ]
4
2020-01-05T09:14:19.000Z
2022-02-17T03:22:09.000Z
forms/migrations/0047_auto_20200820_0609.py
CodeForAfrica/gmmp
d7ffe2dac16bd57e81bb3555ddea9df1fe7e9ebf
[ "Apache-2.0" ]
68
2019-12-23T02:19:55.000Z
2021-04-23T06:13:36.000Z
forms/migrations/0047_auto_20200820_0609.py
CodeForAfrica/gmmp
d7ffe2dac16bd57e81bb3555ddea9df1fe7e9ebf
[ "Apache-2.0" ]
2
2020-11-07T12:23:21.000Z
2021-11-07T18:21:31.000Z
# Generated by Django 2.2.15 on 2020-08-20 06:09 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('forms', '0046_auto_20200312_0949'), ] operations = [ migrations.AlterField( model_name='internetnewssheet', name='topic', field=models.PositiveIntegerField(choices=[('Politics and Government', ((1, '(1) Women politicians, women electoral candidates...'), (2, '(2) Peace, negotiations, treaties...(local, regional, national),'), (3, '(3) Other domestic politics/government (local, regional, national), elections, speeches, the political process ...'), (4, '(4) Global partnerships (international trade and finance systems, e.g. WTO, IMF, World Bank, debt) ...'), (5, '(5) Foreign/international politics, relations with other countries, negotiations, treaties, UN peacekeeping ...'), (6, '(6) National defence, military spending, military training, military parades, internal security ...'), (7, "(7) Other stories on politics and government (specify the topic in 'Comments' section of coding sheet)"))), ('Economy', ((8, '(8) Economic policies, strategies, modules, indicators, stock markets, taxes,...'), (9, '(9) Economic crisis, state bailouts of companies, company takeovers and mergers ...'), (10, '(10) Poverty, housing, social welfare, aid to those in need ...'), (11, '(11) Women’s participation in economic processes (informal work, paid employment, unemployment, unpaid labour)'), (12, '(12) Employment'), (13, '(13) Informal work, street vending, ...'), (14, '(14) Other labour issues, strikes, trade unions, negotiations, other employment and unemployment'), (15, '(15) Rural economy, agriculture, farming practices, agricultural policy, land rights ...'), (16, '(16) Consumer issues, consumer protection, regulation, prices, consumer fraud ...'), (17, '(17) Transport, traffic, roads...'), (18, "(18) Other stories on the economy (specify the topic in 'Comments' section of coding sheet)"))), ('Science and Health', ((19, '(19) Science, technology, research, funding, discoveries, developments ...'), (20, '(20) Medicine, health, hygiene, safety, disability, medical research, funding (not EBOLA or HIV- AIDS)...'), (21, '(21) EBOLA, treatment, response...'), (22, '(22) HIV and AIDS, incidence, policy, treatment, people affected ...'), (23, '(23) Other epidemics, viruses, contagions, Influenza, BSE, SARS. NOT COVID-19 (For stories related to Covid-19 choose the closest relevant sub-topic)'), (24, '(24) Birth control, fertility, sterilization, amniocentesis, termination of pregnancy ...'), (25, '(25) Climate change, global warming'), (26, '(26) Environment, pollution, tourism ...'), (27, "(27) Other stories on science or health (specify the topic in 'Comments' section of coding sheet)"))), ('Social and Legal', ((28, '(28) Sustainable Development Goals (SDGs), Post 2015 agenda, Agenda 2030'), (29, '(29) Family relations, inter-generational conflict, single parents ...'), (30, "(30) Human rights, women's rights, children's rights, gay & lesbian rights, rights of minorities .."), (31, '(31) Religion, culture, tradition, cultural controversies, teachings, celebrations, practices ...'), (32, '(32) Migration, refugees, asylum seekers, ethnic conflict, integration, racism, xenophobia ...'), (33, '(33) Other development issues, sustainability,'), (34, '(34) Education, childcare, nursery, university, literacy'), (35, "(35) Women's movement, feminist activism, events, demonstrations, gender equality advocacy ..."), (36, '(36) Changing gender relations, roles and relationships of women and men inside and outside the home ...'), (37, '(37) Family law, family codes, property law, inheritance law and rights ...'), (38, '(38) Legal system, judicial system, legislation (apart from family, property & inheritance law) ...'), (39, '(39) Disaster, accident, famine, flood, plane crash, etc'), (40, '(40) Riots, demonstrations, public disorder, etc.'), (41, "(41) Other stories on social or legal issues (specify the topic in 'Comments' section of coding sheet)"))), ('Crime and Violence', ((42, '(42) Non-violent crime, bribery, theft, drug-dealing, ...'), (43, '(43) Corruption, (including political corruption/malpractice)'), (44, '(44) Violent crime, murder, abduction, kidnapping, assault, drug-related violence ...'), (45, '(45) Child abuse, sexual violence against children, neglect'), (46, '(46) War, civil war, terrorism, state-based violence'), (47, "(47) Other stories on crime and violence (specify the topic in 'Comments' section of coding sheet)"))), ('Gender and related', ((48, '(48) Sexual harassment against women, rape, sexual assault, #MeToo #TimesUp'), (49, '(49) Other gender violence such as feminicide, trafficking of girls and women, FGM...'), (50, '(50) Inequality between women and men such as income inequality/gender pay gap,'))), ('Celebrity, Arts and Media, Sports', ((51, '(51) Celebrity news, births, marriages, deaths, obituaries, famous people, royalty ...'), (52, '(52) Arts, entertainment, leisure, cinema, theatre, books, dance ...'), (53, '(53) Media, including new media (computers, internet), portrayal of women and/or men'), (54, '(54) Fake news, mis-information, dis-information, mal-information...'), (55, '(55) Beauty contests, models, fashion, beauty aids, cosmetic surgery ...'), (56, '(56) Sports, events, players, facilities, training, policies, funding ...'), (57, "(57) Other stories on celebrities, arts, media (specify the topic in 'Comments' section of coding sheet)"))), ('Other', ((58, '(58) Use only as a last resort and explain'),))], help_text="Choose one topic that best describes how the story is reported. Remember that a single event can be reported in different ways. Within each broad category, we include a code for 'other stories'. Please use these codes only as a <strong>last resort</strong>.", verbose_name='(2) Topic'), ), migrations.AlterField( model_name='newspapersheet', name='topic', field=models.PositiveIntegerField(choices=[('Politics and Government', ((1, '(1) Women politicians, women electoral candidates...'), (2, '(2) Peace, negotiations, treaties...(local, regional, national),'), (3, '(3) Other domestic politics/government (local, regional, national), elections, speeches, the political process ...'), (4, '(4) Global partnerships (international trade and finance systems, e.g. WTO, IMF, World Bank, debt) ...'), (5, '(5) Foreign/international politics, relations with other countries, negotiations, treaties, UN peacekeeping ...'), (6, '(6) National defence, military spending, military training, military parades, internal security ...'), (7, "(7) Other stories on politics and government (specify the topic in 'Comments' section of coding sheet)"))), ('Economy', ((8, '(8) Economic policies, strategies, modules, indicators, stock markets, taxes,...'), (9, '(9) Economic crisis, state bailouts of companies, company takeovers and mergers ...'), (10, '(10) Poverty, housing, social welfare, aid to those in need ...'), (11, '(11) Women’s participation in economic processes (informal work, paid employment, unemployment, unpaid labour)'), (12, '(12) Employment'), (13, '(13) Informal work, street vending, ...'), (14, '(14) Other labour issues, strikes, trade unions, negotiations, other employment and unemployment'), (15, '(15) Rural economy, agriculture, farming practices, agricultural policy, land rights ...'), (16, '(16) Consumer issues, consumer protection, regulation, prices, consumer fraud ...'), (17, '(17) Transport, traffic, roads...'), (18, "(18) Other stories on the economy (specify the topic in 'Comments' section of coding sheet)"))), ('Science and Health', ((19, '(19) Science, technology, research, funding, discoveries, developments ...'), (20, '(20) Medicine, health, hygiene, safety, disability, medical research, funding (not EBOLA or HIV- AIDS)...'), (21, '(21) EBOLA, treatment, response...'), (22, '(22) HIV and AIDS, incidence, policy, treatment, people affected ...'), (23, '(23) Other epidemics, viruses, contagions, Influenza, BSE, SARS. NOT COVID-19 (For stories related to Covid-19 choose the closest relevant sub-topic)'), (24, '(24) Birth control, fertility, sterilization, amniocentesis, termination of pregnancy ...'), (25, '(25) Climate change, global warming'), (26, '(26) Environment, pollution, tourism ...'), (27, "(27) Other stories on science or health (specify the topic in 'Comments' section of coding sheet)"))), ('Social and Legal', ((28, '(28) Sustainable Development Goals (SDGs), Post 2015 agenda, Agenda 2030'), (29, '(29) Family relations, inter-generational conflict, single parents ...'), (30, "(30) Human rights, women's rights, children's rights, gay & lesbian rights, rights of minorities .."), (31, '(31) Religion, culture, tradition, cultural controversies, teachings, celebrations, practices ...'), (32, '(32) Migration, refugees, asylum seekers, ethnic conflict, integration, racism, xenophobia ...'), (33, '(33) Other development issues, sustainability,'), (34, '(34) Education, childcare, nursery, university, literacy'), (35, "(35) Women's movement, feminist activism, events, demonstrations, gender equality advocacy ..."), (36, '(36) Changing gender relations, roles and relationships of women and men inside and outside the home ...'), (37, '(37) Family law, family codes, property law, inheritance law and rights ...'), (38, '(38) Legal system, judicial system, legislation (apart from family, property & inheritance law) ...'), (39, '(39) Disaster, accident, famine, flood, plane crash, etc'), (40, '(40) Riots, demonstrations, public disorder, etc.'), (41, "(41) Other stories on social or legal issues (specify the topic in 'Comments' section of coding sheet)"))), ('Crime and Violence', ((42, '(42) Non-violent crime, bribery, theft, drug-dealing, ...'), (43, '(43) Corruption, (including political corruption/malpractice)'), (44, '(44) Violent crime, murder, abduction, kidnapping, assault, drug-related violence ...'), (45, '(45) Child abuse, sexual violence against children, neglect'), (46, '(46) War, civil war, terrorism, state-based violence'), (47, "(47) Other stories on crime and violence (specify the topic in 'Comments' section of coding sheet)"))), ('Gender and related', ((48, '(48) Sexual harassment against women, rape, sexual assault, #MeToo #TimesUp'), (49, '(49) Other gender violence such as feminicide, trafficking of girls and women, FGM...'), (50, '(50) Inequality between women and men such as income inequality/gender pay gap,'))), ('Celebrity, Arts and Media, Sports', ((51, '(51) Celebrity news, births, marriages, deaths, obituaries, famous people, royalty ...'), (52, '(52) Arts, entertainment, leisure, cinema, theatre, books, dance ...'), (53, '(53) Media, including new media (computers, internet), portrayal of women and/or men'), (54, '(54) Fake news, mis-information, dis-information, mal-information...'), (55, '(55) Beauty contests, models, fashion, beauty aids, cosmetic surgery ...'), (56, '(56) Sports, events, players, facilities, training, policies, funding ...'), (57, "(57) Other stories on celebrities, arts, media (specify the topic in 'Comments' section of coding sheet)"))), ('Other', ((58, '(58) Use only as a last resort and explain'),))], help_text="Choose one topic that best describes how the story is reported. Remember that a single event can be reported in different ways. Within each broad category, we include a code for 'other stories'. Please use these codes only as a <strong>last resort</strong>.", verbose_name='(2) Topic'), ), migrations.AlterField( model_name='radiosheet', name='topic', field=models.PositiveIntegerField(choices=[('Politics and Government', ((1, '(1) Women politicians, women electoral candidates...'), (2, '(2) Peace, negotiations, treaties...(local, regional, national),'), (3, '(3) Other domestic politics/government (local, regional, national), elections, speeches, the political process ...'), (4, '(4) Global partnerships (international trade and finance systems, e.g. WTO, IMF, World Bank, debt) ...'), (5, '(5) Foreign/international politics, relations with other countries, negotiations, treaties, UN peacekeeping ...'), (6, '(6) National defence, military spending, military training, military parades, internal security ...'), (7, "(7) Other stories on politics and government (specify the topic in 'Comments' section of coding sheet)"))), ('Economy', ((8, '(8) Economic policies, strategies, modules, indicators, stock markets, taxes,...'), (9, '(9) Economic crisis, state bailouts of companies, company takeovers and mergers ...'), (10, '(10) Poverty, housing, social welfare, aid to those in need ...'), (11, '(11) Women’s participation in economic processes (informal work, paid employment, unemployment, unpaid labour)'), (12, '(12) Employment'), (13, '(13) Informal work, street vending, ...'), (14, '(14) Other labour issues, strikes, trade unions, negotiations, other employment and unemployment'), (15, '(15) Rural economy, agriculture, farming practices, agricultural policy, land rights ...'), (16, '(16) Consumer issues, consumer protection, regulation, prices, consumer fraud ...'), (17, '(17) Transport, traffic, roads...'), (18, "(18) Other stories on the economy (specify the topic in 'Comments' section of coding sheet)"))), ('Science and Health', ((19, '(19) Science, technology, research, funding, discoveries, developments ...'), (20, '(20) Medicine, health, hygiene, safety, disability, medical research, funding (not EBOLA or HIV- AIDS)...'), (21, '(21) EBOLA, treatment, response...'), (22, '(22) HIV and AIDS, incidence, policy, treatment, people affected ...'), (23, '(23) Other epidemics, viruses, contagions, Influenza, BSE, SARS. NOT COVID-19 (For stories related to Covid-19 choose the closest relevant sub-topic)'), (24, '(24) Birth control, fertility, sterilization, amniocentesis, termination of pregnancy ...'), (25, '(25) Climate change, global warming'), (26, '(26) Environment, pollution, tourism ...'), (27, "(27) Other stories on science or health (specify the topic in 'Comments' section of coding sheet)"))), ('Social and Legal', ((28, '(28) Sustainable Development Goals (SDGs), Post 2015 agenda, Agenda 2030'), (29, '(29) Family relations, inter-generational conflict, single parents ...'), (30, "(30) Human rights, women's rights, children's rights, gay & lesbian rights, rights of minorities .."), (31, '(31) Religion, culture, tradition, cultural controversies, teachings, celebrations, practices ...'), (32, '(32) Migration, refugees, asylum seekers, ethnic conflict, integration, racism, xenophobia ...'), (33, '(33) Other development issues, sustainability,'), (34, '(34) Education, childcare, nursery, university, literacy'), (35, "(35) Women's movement, feminist activism, events, demonstrations, gender equality advocacy ..."), (36, '(36) Changing gender relations, roles and relationships of women and men inside and outside the home ...'), (37, '(37) Family law, family codes, property law, inheritance law and rights ...'), (38, '(38) Legal system, judicial system, legislation (apart from family, property & inheritance law) ...'), (39, '(39) Disaster, accident, famine, flood, plane crash, etc'), (40, '(40) Riots, demonstrations, public disorder, etc.'), (41, "(41) Other stories on social or legal issues (specify the topic in 'Comments' section of coding sheet)"))), ('Crime and Violence', ((42, '(42) Non-violent crime, bribery, theft, drug-dealing, ...'), (43, '(43) Corruption, (including political corruption/malpractice)'), (44, '(44) Violent crime, murder, abduction, kidnapping, assault, drug-related violence ...'), (45, '(45) Child abuse, sexual violence against children, neglect'), (46, '(46) War, civil war, terrorism, state-based violence'), (47, "(47) Other stories on crime and violence (specify the topic in 'Comments' section of coding sheet)"))), ('Gender and related', ((48, '(48) Sexual harassment against women, rape, sexual assault, #MeToo #TimesUp'), (49, '(49) Other gender violence such as feminicide, trafficking of girls and women, FGM...'), (50, '(50) Inequality between women and men such as income inequality/gender pay gap,'))), ('Celebrity, Arts and Media, Sports', ((51, '(51) Celebrity news, births, marriages, deaths, obituaries, famous people, royalty ...'), (52, '(52) Arts, entertainment, leisure, cinema, theatre, books, dance ...'), (53, '(53) Media, including new media (computers, internet), portrayal of women and/or men'), (54, '(54) Fake news, mis-information, dis-information, mal-information...'), (55, '(55) Beauty contests, models, fashion, beauty aids, cosmetic surgery ...'), (56, '(56) Sports, events, players, facilities, training, policies, funding ...'), (57, "(57) Other stories on celebrities, arts, media (specify the topic in 'Comments' section of coding sheet)"))), ('Other', ((58, '(58) Use only as a last resort and explain'),))], help_text="Choose one topic that best describes how the story is reported. Remember that a single event can be reported in different ways. Within each broad category, we include a code for 'other stories'. Please use these codes only as a <strong>last resort</strong>.", verbose_name='(2) Topic'), ), migrations.AlterField( model_name='televisionsheet', name='topic', field=models.PositiveIntegerField(choices=[('Politics and Government', ((1, '(1) Women politicians, women electoral candidates...'), (2, '(2) Peace, negotiations, treaties...(local, regional, national),'), (3, '(3) Other domestic politics/government (local, regional, national), elections, speeches, the political process ...'), (4, '(4) Global partnerships (international trade and finance systems, e.g. WTO, IMF, World Bank, debt) ...'), (5, '(5) Foreign/international politics, relations with other countries, negotiations, treaties, UN peacekeeping ...'), (6, '(6) National defence, military spending, military training, military parades, internal security ...'), (7, "(7) Other stories on politics and government (specify the topic in 'Comments' section of coding sheet)"))), ('Economy', ((8, '(8) Economic policies, strategies, modules, indicators, stock markets, taxes,...'), (9, '(9) Economic crisis, state bailouts of companies, company takeovers and mergers ...'), (10, '(10) Poverty, housing, social welfare, aid to those in need ...'), (11, '(11) Women’s participation in economic processes (informal work, paid employment, unemployment, unpaid labour)'), (12, '(12) Employment'), (13, '(13) Informal work, street vending, ...'), (14, '(14) Other labour issues, strikes, trade unions, negotiations, other employment and unemployment'), (15, '(15) Rural economy, agriculture, farming practices, agricultural policy, land rights ...'), (16, '(16) Consumer issues, consumer protection, regulation, prices, consumer fraud ...'), (17, '(17) Transport, traffic, roads...'), (18, "(18) Other stories on the economy (specify the topic in 'Comments' section of coding sheet)"))), ('Science and Health', ((19, '(19) Science, technology, research, funding, discoveries, developments ...'), (20, '(20) Medicine, health, hygiene, safety, disability, medical research, funding (not EBOLA or HIV- AIDS)...'), (21, '(21) EBOLA, treatment, response...'), (22, '(22) HIV and AIDS, incidence, policy, treatment, people affected ...'), (23, '(23) Other epidemics, viruses, contagions, Influenza, BSE, SARS. NOT COVID-19 (For stories related to Covid-19 choose the closest relevant sub-topic)'), (24, '(24) Birth control, fertility, sterilization, amniocentesis, termination of pregnancy ...'), (25, '(25) Climate change, global warming'), (26, '(26) Environment, pollution, tourism ...'), (27, "(27) Other stories on science or health (specify the topic in 'Comments' section of coding sheet)"))), ('Social and Legal', ((28, '(28) Sustainable Development Goals (SDGs), Post 2015 agenda, Agenda 2030'), (29, '(29) Family relations, inter-generational conflict, single parents ...'), (30, "(30) Human rights, women's rights, children's rights, gay & lesbian rights, rights of minorities .."), (31, '(31) Religion, culture, tradition, cultural controversies, teachings, celebrations, practices ...'), (32, '(32) Migration, refugees, asylum seekers, ethnic conflict, integration, racism, xenophobia ...'), (33, '(33) Other development issues, sustainability,'), (34, '(34) Education, childcare, nursery, university, literacy'), (35, "(35) Women's movement, feminist activism, events, demonstrations, gender equality advocacy ..."), (36, '(36) Changing gender relations, roles and relationships of women and men inside and outside the home ...'), (37, '(37) Family law, family codes, property law, inheritance law and rights ...'), (38, '(38) Legal system, judicial system, legislation (apart from family, property & inheritance law) ...'), (39, '(39) Disaster, accident, famine, flood, plane crash, etc'), (40, '(40) Riots, demonstrations, public disorder, etc.'), (41, "(41) Other stories on social or legal issues (specify the topic in 'Comments' section of coding sheet)"))), ('Crime and Violence', ((42, '(42) Non-violent crime, bribery, theft, drug-dealing, ...'), (43, '(43) Corruption, (including political corruption/malpractice)'), (44, '(44) Violent crime, murder, abduction, kidnapping, assault, drug-related violence ...'), (45, '(45) Child abuse, sexual violence against children, neglect'), (46, '(46) War, civil war, terrorism, state-based violence'), (47, "(47) Other stories on crime and violence (specify the topic in 'Comments' section of coding sheet)"))), ('Gender and related', ((48, '(48) Sexual harassment against women, rape, sexual assault, #MeToo #TimesUp'), (49, '(49) Other gender violence such as feminicide, trafficking of girls and women, FGM...'), (50, '(50) Inequality between women and men such as income inequality/gender pay gap,'))), ('Celebrity, Arts and Media, Sports', ((51, '(51) Celebrity news, births, marriages, deaths, obituaries, famous people, royalty ...'), (52, '(52) Arts, entertainment, leisure, cinema, theatre, books, dance ...'), (53, '(53) Media, including new media (computers, internet), portrayal of women and/or men'), (54, '(54) Fake news, mis-information, dis-information, mal-information...'), (55, '(55) Beauty contests, models, fashion, beauty aids, cosmetic surgery ...'), (56, '(56) Sports, events, players, facilities, training, policies, funding ...'), (57, "(57) Other stories on celebrities, arts, media (specify the topic in 'Comments' section of coding sheet)"))), ('Other', ((58, '(58) Use only as a last resort and explain'),))], help_text="Choose one topic that best describes how the story is reported. Remember that a single event can be reported in different ways. Within each broad category, we include a code for 'other stories'. Please use these codes only as a <strong>last resort</strong>.", verbose_name='(2) Topic'), ), migrations.AlterField( model_name='twittersheet', name='topic', field=models.PositiveIntegerField(choices=[('Politics and Government', ((1, '(1) Women politicians, women electoral candidates...'), (2, '(2) Peace, negotiations, treaties...(local, regional, national),'), (3, '(3) Other domestic politics/government (local, regional, national), elections, speeches, the political process ...'), (4, '(4) Global partnerships (international trade and finance systems, e.g. WTO, IMF, World Bank, debt) ...'), (5, '(5) Foreign/international politics, relations with other countries, negotiations, treaties, UN peacekeeping ...'), (6, '(6) National defence, military spending, military training, military parades, internal security ...'), (7, "(7) Other stories on politics and government (specify the topic in 'Comments' section of coding sheet)"))), ('Economy', ((8, '(8) Economic policies, strategies, modules, indicators, stock markets, taxes,...'), (9, '(9) Economic crisis, state bailouts of companies, company takeovers and mergers ...'), (10, '(10) Poverty, housing, social welfare, aid to those in need ...'), (11, '(11) Women’s participation in economic processes (informal work, paid employment, unemployment, unpaid labour)'), (12, '(12) Employment'), (13, '(13) Informal work, street vending, ...'), (14, '(14) Other labour issues, strikes, trade unions, negotiations, other employment and unemployment'), (15, '(15) Rural economy, agriculture, farming practices, agricultural policy, land rights ...'), (16, '(16) Consumer issues, consumer protection, regulation, prices, consumer fraud ...'), (17, '(17) Transport, traffic, roads...'), (18, "(18) Other stories on the economy (specify the topic in 'Comments' section of coding sheet)"))), ('Science and Health', ((19, '(19) Science, technology, research, funding, discoveries, developments ...'), (20, '(20) Medicine, health, hygiene, safety, disability, medical research, funding (not EBOLA or HIV- AIDS)...'), (21, '(21) EBOLA, treatment, response...'), (22, '(22) HIV and AIDS, incidence, policy, treatment, people affected ...'), (23, '(23) Other epidemics, viruses, contagions, Influenza, BSE, SARS. NOT COVID-19 (For stories related to Covid-19 choose the closest relevant sub-topic)'), (24, '(24) Birth control, fertility, sterilization, amniocentesis, termination of pregnancy ...'), (25, '(25) Climate change, global warming'), (26, '(26) Environment, pollution, tourism ...'), (27, "(27) Other stories on science or health (specify the topic in 'Comments' section of coding sheet)"))), ('Social and Legal', ((28, '(28) Sustainable Development Goals (SDGs), Post 2015 agenda, Agenda 2030'), (29, '(29) Family relations, inter-generational conflict, single parents ...'), (30, "(30) Human rights, women's rights, children's rights, gay & lesbian rights, rights of minorities .."), (31, '(31) Religion, culture, tradition, cultural controversies, teachings, celebrations, practices ...'), (32, '(32) Migration, refugees, asylum seekers, ethnic conflict, integration, racism, xenophobia ...'), (33, '(33) Other development issues, sustainability,'), (34, '(34) Education, childcare, nursery, university, literacy'), (35, "(35) Women's movement, feminist activism, events, demonstrations, gender equality advocacy ..."), (36, '(36) Changing gender relations, roles and relationships of women and men inside and outside the home ...'), (37, '(37) Family law, family codes, property law, inheritance law and rights ...'), (38, '(38) Legal system, judicial system, legislation (apart from family, property & inheritance law) ...'), (39, '(39) Disaster, accident, famine, flood, plane crash, etc'), (40, '(40) Riots, demonstrations, public disorder, etc.'), (41, "(41) Other stories on social or legal issues (specify the topic in 'Comments' section of coding sheet)"))), ('Crime and Violence', ((42, '(42) Non-violent crime, bribery, theft, drug-dealing, ...'), (43, '(43) Corruption, (including political corruption/malpractice)'), (44, '(44) Violent crime, murder, abduction, kidnapping, assault, drug-related violence ...'), (45, '(45) Child abuse, sexual violence against children, neglect'), (46, '(46) War, civil war, terrorism, state-based violence'), (47, "(47) Other stories on crime and violence (specify the topic in 'Comments' section of coding sheet)"))), ('Gender and related', ((48, '(48) Sexual harassment against women, rape, sexual assault, #MeToo #TimesUp'), (49, '(49) Other gender violence such as feminicide, trafficking of girls and women, FGM...'), (50, '(50) Inequality between women and men such as income inequality/gender pay gap,'))), ('Celebrity, Arts and Media, Sports', ((51, '(51) Celebrity news, births, marriages, deaths, obituaries, famous people, royalty ...'), (52, '(52) Arts, entertainment, leisure, cinema, theatre, books, dance ...'), (53, '(53) Media, including new media (computers, internet), portrayal of women and/or men'), (54, '(54) Fake news, mis-information, dis-information, mal-information...'), (55, '(55) Beauty contests, models, fashion, beauty aids, cosmetic surgery ...'), (56, '(56) Sports, events, players, facilities, training, policies, funding ...'), (57, "(57) Other stories on celebrities, arts, media (specify the topic in 'Comments' section of coding sheet)"))), ('Other', ((58, '(58) Use only as a last resort and explain'),))], help_text="Choose one topic that best describes how the story is reported. Remember that a single event can be reported in different ways. Within each broad category, we include a code for 'other stories'. Please use these codes only as a <strong>last resort</strong>.", verbose_name='(2) Topic'), ), ]
738.205128
5,602
0.702675
3,684
28,790
5.486428
0.116178
0.02078
0.02078
0.025233
0.987829
0.987829
0.987829
0.987829
0.987829
0.987829
0
0.04696
0.136818
28,790
38
5,603
757.631579
0.766368
0.001598
0
0.625
1
2.1875
0.852968
0.008455
0
0
0
0
0
1
0
false
0
0.03125
0
0.125
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
1
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
11
aa595e91d48ef6bd0381d3ae5df9d6d9f98834bc
98
py
Python
tests/test_main_hypothosis_example.py
peterHoburg/python_docker_example
55312fb450a53232fab8de08041e2d7edcc4ab23
[ "MIT" ]
null
null
null
tests/test_main_hypothosis_example.py
peterHoburg/python_docker_example
55312fb450a53232fab8de08041e2d7edcc4ab23
[ "MIT" ]
null
null
null
tests/test_main_hypothosis_example.py
peterHoburg/python_docker_example
55312fb450a53232fab8de08041e2d7edcc4ab23
[ "MIT" ]
null
null
null
from src.main import hypothesis_example def hypothesis_example_test(): hypothesis_example()
16.333333
39
0.806122
12
98
6.25
0.666667
0.68
0
0
0
0
0
0
0
0
0
0
0.132653
98
5
40
19.6
0.882353
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
7
aada45b383c42235ce7f675b592ae687d3236d5e
159
py
Python
src/striga/service/sproaster/__init__.py
ateska/striga
451b5d9421e2e5fdf49b94c8f3d76e576abc5923
[ "MIT" ]
null
null
null
src/striga/service/sproaster/__init__.py
ateska/striga
451b5d9421e2e5fdf49b94c8f3d76e576abc5923
[ "MIT" ]
null
null
null
src/striga/service/sproaster/__init__.py
ateska/striga
451b5d9421e2e5fdf49b94c8f3d76e576abc5923
[ "MIT" ]
null
null
null
from ._sproasersvc_service import SubprocessRoasterServiceFactory from ._sproasersvc_process import ResidentSubprocess, RepeatingSubprocess, OneShotSubprocess
53
92
0.91195
12
159
11.75
0.75
0.212766
0
0
0
0
0
0
0
0
0
0
0.062893
159
2
93
79.5
0.946309
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
2d4d6de7c931f34b8fdf6505cb963fa81eb8f64c
22,383
py
Python
thrift/compiler/test/fixtures/patch/gen-python/patch/lite_types.py
donsbot/fbthrift
11e343118082583eb4326d51ff19c343c61ed3cb
[ "Apache-2.0" ]
null
null
null
thrift/compiler/test/fixtures/patch/gen-python/patch/lite_types.py
donsbot/fbthrift
11e343118082583eb4326d51ff19c343c61ed3cb
[ "Apache-2.0" ]
null
null
null
thrift/compiler/test/fixtures/patch/gen-python/patch/lite_types.py
donsbot/fbthrift
11e343118082583eb4326d51ff19c343c61ed3cb
[ "Apache-2.0" ]
null
null
null
# # Autogenerated by Thrift # # DO NOT EDIT # @generated # import folly.iobuf as _fbthrift_iobuf import thrift.py3lite.types as _fbthrift_py3lite_types import thrift.py3lite.exceptions as _fbthrift_py3lite_exceptions import facebook.thrift.annotation.scope.lite_types import facebook.thrift.annotation.thrift.thrift.lite_types class GeneratePatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ) @staticmethod def __get_thrift_name__() -> str: return "patch.GeneratePatch" @staticmethod def __get_thrift_uri__(): return "facebook.com/thrift/op/GeneratePatch" @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_GeneratePatch() class GenerateOptionalPatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ) @staticmethod def __get_thrift_name__() -> str: return "patch.GenerateOptionalPatch" @staticmethod def __get_thrift_uri__(): return "facebook.com/thrift/op/GenerateOptionalPatch" @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_GenerateOptionalPatch() class BoolPatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 1, # id False, # isUnqualified "assign", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 2, # id True, # isUnqualified "invert", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.BoolPatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_BoolPatch() class BytePatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 1, # id False, # isUnqualified "assign", # name _fbthrift_py3lite_types.typeinfo_byte, # typeinfo None, # default value ), ( 2, # id True, # isUnqualified "add", # name _fbthrift_py3lite_types.typeinfo_byte, # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.BytePatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_BytePatch() class I16Patch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 1, # id False, # isUnqualified "assign", # name _fbthrift_py3lite_types.typeinfo_i16, # typeinfo None, # default value ), ( 2, # id True, # isUnqualified "add", # name _fbthrift_py3lite_types.typeinfo_i16, # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.I16Patch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_I16Patch() class I32Patch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 1, # id False, # isUnqualified "assign", # name _fbthrift_py3lite_types.typeinfo_i32, # typeinfo None, # default value ), ( 2, # id True, # isUnqualified "add", # name _fbthrift_py3lite_types.typeinfo_i32, # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.I32Patch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_I32Patch() class I64Patch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 1, # id False, # isUnqualified "assign", # name _fbthrift_py3lite_types.typeinfo_i64, # typeinfo None, # default value ), ( 2, # id True, # isUnqualified "add", # name _fbthrift_py3lite_types.typeinfo_i64, # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.I64Patch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_I64Patch() class FloatPatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 1, # id False, # isUnqualified "assign", # name _fbthrift_py3lite_types.typeinfo_float, # typeinfo None, # default value ), ( 2, # id True, # isUnqualified "add", # name _fbthrift_py3lite_types.typeinfo_float, # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.FloatPatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_FloatPatch() class DoublePatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 1, # id False, # isUnqualified "assign", # name _fbthrift_py3lite_types.typeinfo_double, # typeinfo None, # default value ), ( 2, # id True, # isUnqualified "add", # name _fbthrift_py3lite_types.typeinfo_double, # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.DoublePatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_DoublePatch() class StringPatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 1, # id False, # isUnqualified "assign", # name _fbthrift_py3lite_types.typeinfo_string, # typeinfo None, # default value ), ( 2, # id True, # isUnqualified "clear", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 4, # id True, # isUnqualified "append", # name _fbthrift_py3lite_types.typeinfo_string, # typeinfo None, # default value ), ( 5, # id True, # isUnqualified "prepend", # name _fbthrift_py3lite_types.typeinfo_string, # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.StringPatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_StringPatch() class BinaryPatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 1, # id False, # isUnqualified "assign", # name _fbthrift_py3lite_types.typeinfo_binary, # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.BinaryPatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_BinaryPatch() class OptionalBoolPatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 2, # id True, # isUnqualified "clear", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 3, # id True, # isUnqualified "patch", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(BoolPatch), # typeinfo None, # default value ), ( 1, # id False, # isUnqualified "ensure", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 4, # id True, # isUnqualified "patchAfter", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(BoolPatch), # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.OptionalBoolPatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_OptionalBoolPatch() class OptionalBytePatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 2, # id True, # isUnqualified "clear", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 3, # id True, # isUnqualified "patch", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(BytePatch), # typeinfo None, # default value ), ( 1, # id False, # isUnqualified "ensure", # name _fbthrift_py3lite_types.typeinfo_byte, # typeinfo None, # default value ), ( 4, # id True, # isUnqualified "patchAfter", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(BytePatch), # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.OptionalBytePatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_OptionalBytePatch() class OptionalI16Patch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 2, # id True, # isUnqualified "clear", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 3, # id True, # isUnqualified "patch", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(I16Patch), # typeinfo None, # default value ), ( 1, # id False, # isUnqualified "ensure", # name _fbthrift_py3lite_types.typeinfo_i16, # typeinfo None, # default value ), ( 4, # id True, # isUnqualified "patchAfter", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(I16Patch), # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.OptionalI16Patch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_OptionalI16Patch() class OptionalI32Patch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 2, # id True, # isUnqualified "clear", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 3, # id True, # isUnqualified "patch", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(I32Patch), # typeinfo None, # default value ), ( 1, # id False, # isUnqualified "ensure", # name _fbthrift_py3lite_types.typeinfo_i32, # typeinfo None, # default value ), ( 4, # id True, # isUnqualified "patchAfter", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(I32Patch), # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.OptionalI32Patch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_OptionalI32Patch() class OptionalI64Patch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 2, # id True, # isUnqualified "clear", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 3, # id True, # isUnqualified "patch", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(I64Patch), # typeinfo None, # default value ), ( 1, # id False, # isUnqualified "ensure", # name _fbthrift_py3lite_types.typeinfo_i64, # typeinfo None, # default value ), ( 4, # id True, # isUnqualified "patchAfter", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(I64Patch), # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.OptionalI64Patch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_OptionalI64Patch() class OptionalFloatPatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 2, # id True, # isUnqualified "clear", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 3, # id True, # isUnqualified "patch", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(FloatPatch), # typeinfo None, # default value ), ( 1, # id False, # isUnqualified "ensure", # name _fbthrift_py3lite_types.typeinfo_float, # typeinfo None, # default value ), ( 4, # id True, # isUnqualified "patchAfter", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(FloatPatch), # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.OptionalFloatPatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_OptionalFloatPatch() class OptionalDoublePatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 2, # id True, # isUnqualified "clear", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 3, # id True, # isUnqualified "patch", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(DoublePatch), # typeinfo None, # default value ), ( 1, # id False, # isUnqualified "ensure", # name _fbthrift_py3lite_types.typeinfo_double, # typeinfo None, # default value ), ( 4, # id True, # isUnqualified "patchAfter", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(DoublePatch), # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.OptionalDoublePatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_OptionalDoublePatch() class OptionalStringPatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 2, # id True, # isUnqualified "clear", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 3, # id True, # isUnqualified "patch", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(StringPatch), # typeinfo None, # default value ), ( 1, # id False, # isUnqualified "ensure", # name _fbthrift_py3lite_types.typeinfo_string, # typeinfo None, # default value ), ( 4, # id True, # isUnqualified "patchAfter", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(StringPatch), # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.OptionalStringPatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_OptionalStringPatch() class OptionalBinaryPatch(metaclass=_fbthrift_py3lite_types.StructMeta): _fbthrift_SPEC = ( ( 2, # id True, # isUnqualified "clear", # name _fbthrift_py3lite_types.typeinfo_bool, # typeinfo None, # default value ), ( 3, # id True, # isUnqualified "patch", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(BinaryPatch), # typeinfo None, # default value ), ( 1, # id False, # isUnqualified "ensure", # name _fbthrift_py3lite_types.typeinfo_binary, # typeinfo None, # default value ), ( 4, # id True, # isUnqualified "patchAfter", # name lambda: _fbthrift_py3lite_types.StructTypeInfo(BinaryPatch), # typeinfo None, # default value ), ) @staticmethod def __get_thrift_name__() -> str: return "patch.OptionalBinaryPatch" @staticmethod def __get_thrift_uri__(): return None @staticmethod def __get_metadata__(): return _fbthrift_metadata__struct_OptionalBinaryPatch() # This unfortunately has to be down here to prevent circular imports import patch.lite_metadata def _fbthrift_metadata__struct_GeneratePatch(): return patch.lite_metadata.gen_metadata_struct_GeneratePatch() def _fbthrift_metadata__struct_GenerateOptionalPatch(): return patch.lite_metadata.gen_metadata_struct_GenerateOptionalPatch() def _fbthrift_metadata__struct_BoolPatch(): return patch.lite_metadata.gen_metadata_struct_BoolPatch() def _fbthrift_metadata__struct_BytePatch(): return patch.lite_metadata.gen_metadata_struct_BytePatch() def _fbthrift_metadata__struct_I16Patch(): return patch.lite_metadata.gen_metadata_struct_I16Patch() def _fbthrift_metadata__struct_I32Patch(): return patch.lite_metadata.gen_metadata_struct_I32Patch() def _fbthrift_metadata__struct_I64Patch(): return patch.lite_metadata.gen_metadata_struct_I64Patch() def _fbthrift_metadata__struct_FloatPatch(): return patch.lite_metadata.gen_metadata_struct_FloatPatch() def _fbthrift_metadata__struct_DoublePatch(): return patch.lite_metadata.gen_metadata_struct_DoublePatch() def _fbthrift_metadata__struct_StringPatch(): return patch.lite_metadata.gen_metadata_struct_StringPatch() def _fbthrift_metadata__struct_BinaryPatch(): return patch.lite_metadata.gen_metadata_struct_BinaryPatch() def _fbthrift_metadata__struct_OptionalBoolPatch(): return patch.lite_metadata.gen_metadata_struct_OptionalBoolPatch() def _fbthrift_metadata__struct_OptionalBytePatch(): return patch.lite_metadata.gen_metadata_struct_OptionalBytePatch() def _fbthrift_metadata__struct_OptionalI16Patch(): return patch.lite_metadata.gen_metadata_struct_OptionalI16Patch() def _fbthrift_metadata__struct_OptionalI32Patch(): return patch.lite_metadata.gen_metadata_struct_OptionalI32Patch() def _fbthrift_metadata__struct_OptionalI64Patch(): return patch.lite_metadata.gen_metadata_struct_OptionalI64Patch() def _fbthrift_metadata__struct_OptionalFloatPatch(): return patch.lite_metadata.gen_metadata_struct_OptionalFloatPatch() def _fbthrift_metadata__struct_OptionalDoublePatch(): return patch.lite_metadata.gen_metadata_struct_OptionalDoublePatch() def _fbthrift_metadata__struct_OptionalStringPatch(): return patch.lite_metadata.gen_metadata_struct_OptionalStringPatch() def _fbthrift_metadata__struct_OptionalBinaryPatch(): return patch.lite_metadata.gen_metadata_struct_OptionalBinaryPatch() _fbthrift_all_structs = [ GeneratePatch, GenerateOptionalPatch, BoolPatch, BytePatch, I16Patch, I32Patch, I64Patch, FloatPatch, DoublePatch, StringPatch, BinaryPatch, OptionalBoolPatch, OptionalBytePatch, OptionalI16Patch, OptionalI32Patch, OptionalI64Patch, OptionalFloatPatch, OptionalDoublePatch, OptionalStringPatch, OptionalBinaryPatch, ] _fbthrift_py3lite_types.fill_specs(*_fbthrift_all_structs)
27
84
0.590984
1,881
22,383
6.5311
0.054226
0.07619
0.125356
0.107448
0.776964
0.776964
0.776964
0.711844
0.701425
0.701425
0
0.015902
0.334138
22,383
828
85
27.032609
0.808374
0.115668
0
0.697406
1
0
0.04131
0.016299
0
0
0
0
0
1
0.115274
false
0
0.008646
0.115274
0.29683
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
7
2dc049cfb468dadb4a383de37a177259a64f32bc
7,477
py
Python
src/pywildmatch/test/data/pywildmatch_data.py
ryanormous/pywildmatch
15075f8d51e52a9d611f4279a75f630bc8c73cb3
[ "BSD-3-Clause" ]
null
null
null
src/pywildmatch/test/data/pywildmatch_data.py
ryanormous/pywildmatch
15075f8d51e52a9d611f4279a75f630bc8c73cb3
[ "BSD-3-Clause" ]
null
null
null
src/pywildmatch/test/data/pywildmatch_data.py
ryanormous/pywildmatch
15075f8d51e52a9d611f4279a75f630bc8c73cb3
[ "BSD-3-Clause" ]
null
null
null
# DATA = ( # '<PATTERN>', '<TEXT>', {<RESULTS>} # ) DATA = (( # ANYTHING '?**/**0', '_Ab/210', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**/**?', 'ABc/10_', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**/?**0', 'ABc/_10', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**?**0', 'ABc_210', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**?*0', 'ABc_210', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*?**0', 'Ab_/210', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 0, 0), 'pywildmatch': (1, 1, 0, 0, 1, 1) }), ( # CHARACTER CLASS 'A**[:digit:]**0', 'A10', { 'python_fnmatch': (0, 0), 'libgit2': (0, 0, 0, 0), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**[:digit:]*0', 'A10', { 'python_fnmatch': (0, 0), 'libgit2': (0, 0, 0, 0), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*[:digit:]**0', 'A10', { 'python_fnmatch': (0, 0), 'libgit2': (0, 0, 0, 0), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( # CHARACTER SET 'A**/[a-f]**0', 'A/b0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**/[a-f]*0', 'A/b0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**[a-f]**0', 'Ab0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**[a-f]*0', 'Ab0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**[a-f]/**0', 'Ab/0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**[a-f]/*0', 'Ab/0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*/[a-f]**0', 'A/b0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*[a-f]**0', 'Ab0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*[a-f]/**0', 'Ab/0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A[a-f]**/**0', 'Ab/0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A[a-f]**/*0', 'Ab/0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A[a-f]**0', 'Ab0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A[a-f]*/**0', 'Ab/0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( # ESCAPE + ESCAPE 'A**\\\\**0', 'Ab&\\\\10', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**\\\\*0', 'Ab&\\\\10', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\\\\**0', 'Ab&\\\\10', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\\\\\\\\**0', 'Ab&\\\\\\\\10', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( # ESCAPE + SPECIAL CHARACTER 'A**\\+**0', 'Abሴ++ሴ10', { 'python_fnmatch': (0, 0), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**\\+*0', 'Abሴ++ሴ10', { 'python_fnmatch': (0, 0), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\\+**0', 'Abሴ++ሴ10', { 'python_fnmatch': (0, 0), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\\+\\+**0', 'Abሴ++ሴ10', { 'python_fnmatch': (0, 0), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( # UNICODE CHARACTER 'A**🛧**0', 'Ab🛧10', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**🛧*0', 'Ab🛧10', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*🛧**0', 'Ab🛧10', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( # OCTAL 'A**\046**Z', 'A&Z', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**\046*Z', 'A&Z', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\046**Z', 'A&Z', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\046\046**Z', 'A&&Z', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( # HEX 'A**\xff**0', 'Aÿ0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**\xff*0', 'Aÿ0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\xff**0', 'Aÿ0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\xff\xff**0', 'Aÿÿ0', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( # 16 BIT UNICODE 'A**\u0123**Z', 'AģZ', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**\u0123*Z', 'AģZ', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\u0123**Z', 'AģZ', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\u0123\u0123**Z', 'AģģZ', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( # 32 BIT UNICODE 'A**\U00001234**Z', 'AሴZ', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A**\U00001234*Z', 'AሴZ', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\U00001234**Z', 'AሴZ', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) }), ( 'A*\U00001234\U00001234**Z', 'AሴሴZ', { 'python_fnmatch': (1, 1), 'libgit2': (1, 1, 1, 1), 'pywildmatch': (1, 1, 1, 1, 1, 1) })) if __name__ == '__main__': from __init__ import validate_data for dataset in DATA: validate_data(dataset) print(__file__, len(DATA), 'tests')
27.692593
42
0.378494
1,060
7,477
2.607547
0.057547
0.303907
0.306078
0.273517
0.906295
0.900868
0.900868
0.900868
0.900868
0.900868
0
0.161438
0.326468
7,477
269
43
27.795539
0.386021
0.025278
0
0.768924
0
0
0.322514
0.003438
0
0
0
0
0
0
null
null
0
0.003984
null
null
0.003984
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
12
93278b10169f411b3afa9a2155a5dda312cbfc15
125,295
py
Python
Deface.py
nfs-tech-bd/deface
b103cd50e7edb5303f12fc2a7e3b70855fc74ebf
[ "Apache-2.0" ]
2
2021-04-18T05:34:06.000Z
2022-03-30T06:21:42.000Z
Deface.py
nfs-tech-bd/deface
b103cd50e7edb5303f12fc2a7e3b70855fc74ebf
[ "Apache-2.0" ]
null
null
null
Deface.py
nfs-tech-bd/deface
b103cd50e7edb5303f12fc2a7e3b70855fc74ebf
[ "Apache-2.0" ]
null
null
null
#-------------------------------------------------# # Obfuscate By Mr.GamingThanks To Black Coder Crush # github : https://github.com/clayhacker-max # from Linux # localhost : aarch64 # key : Asep-dC6gB6cW3tT3aY8 # date : Thu Mar 4 07:35:21 2021 #-------------------------------------------------# import marshal,base64 exec base64.b64decode('import marshal,zlib
exec marshal.loads(zlib.decompress('x\x9c\x00%@\xda\xbfc\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s6\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00e\x00\x00j\x02\x00e\x01\x00j\x03\x00d\x02\x00\x83\x01\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs#~\x00\x00x\x9ct\x97gP\x13l\xf0\xe0A\x14T\x9a\xbeHo/(\xa0\xd2\xa5\x97\x10\x15\x11\x15\x04\x15\xe9\x1d\x94H/\x11\x02\x04\x12\x10iR\xa2\xa2\xd2A@)\xd2\xa5\xd7\x80t)\x01\xe9-\xa1&H\x02\x01\x02\t\x90v\xef\xdd\xfc?\xdd\xcd\xed<\xbb\xb3\xb3\xb3\xf3\xfb\xb0\xb3\xfb\xcc\xae;\xcb\xff\x08\xdb\x7fz\xfb?\x05K\xfdg^\xfc\xf7XY|XX\xecXX^\xb2\xb0x\xb1\xb2\xbc8\xc3\x12\xc3\xfa\xbf\x83g-\xff3\x16\xd7\xffw\xba\'\xf3?1\x03\xcb\xc2XX\xdc\xff\x1f\x8c\xe6\xff\x85\xf9\x1f\x9f\x95\xc5\x8e\xf5\xff \xcf\xb0\xbcde\xf1b\xfb\x1f\xf0\xff\x8f\r\x8ada\t\xcdI\xee\x98\x0fZ\xf7\xfa\xa91\x83kr\xee\x85\xa4\x96\x06\xa8w\xbb\x12\xff\xe4\x1a]\xfez\xfe\xc2\xd7\x07n\x9di\x0f\x12\x85,\x7f8\xfeX\x0b\xc0\x0b\x1e\x88.H\xce\xfc\xa6}5\xe0\x89_\x0e4\x83p\xdf\xfc\xe78n\xe4\xb3\xbc\xb5\xc7.S\x86y\x92?\xc2d\x12\xb1L8\xdd\x1f\x8b\xe4\xc12\x19x\xe7\x1d$\xc3r\xce\x999\x8b|\xc7<\x9d\xc5\xc3\xe9\xe5\x0cI\xa6.\r\x01\xa7K2\xa7\x99\xabL\xb1l\xe6\x02\xbd\x9a\x0e\xacd(\x93\xf2\xf7f\x99\'H\xda,2\xcf\x02wxk\xdf\xb8\xfc\xbb\xc5.\xa6G2,\xfc\xf4h!\x17x\xb0nLm5\xddV\x863\xa8/\xc3,\x04(\xdd\xbc\xb4H\r\xefw\xea4\xd2\xc7Qe#\x01\'\xc9]\x9eI\x02C{8\x97\xa4n\xb3\xe4\xa4\x89\x0e\xcbB4Qg\xbfc\xf6QX\xeal\xdf\xce\x10m\xe4\xc6\x1e\x03\x08cb\xe0h:\x9cF\xd0\xf0a.\xa2\x14\x9a\x98\xf4Y\x10\xe3\x94\xb6\x89\x81\xd1\x98\xf4\x13Rz~:\xd0\xbc\x93\xa6\xcf\xa0\x1dl\xa2P\xce\x0c\xda\xe9\xa6\x9a\xf2\xe9\x08\xde\xe8x\x13\xcc\\$"a@\xb4\x7f\xf6\x95cg^\x8c\xb0\xf3\xefU0\xb2b\x1a\x1c\x94\'\xe0\xe8:\xd3\xbc\xf5\x93\xfaS\xde\x8e\x06\xeb\xd0\xc279\x9d\xf2:\xc3\xf2\xa0j\x7f)\xee\x1a\x89\xfb\x94n"\x83H\xed\xb8q\x8a\xb2o5c\n\xb7\x9c$\xb7\xef\x02\xd5|F\x88\xd9\x0c\x9d\x11"\x90\xea<\x8c\xa1#\xe9\xadLB&\xa3\xf3\xc1h\xabB\x13\x1d\xf4\xdc\x15w\x88\xac:\x05\xd9(#\x17A\xa3x\x8f\x19\xa6\xedJ=\x9d\x82\xfa\xc3@\x15j\xc2\xb6\x98\x10\x87P\xdb\x9f\xcb\x99\xf9\x95\xd4\x0e\xa7ua*E?\x8f\x8d\x84>ipN3\xa4\x8cW\xdf\x01\xc0\xed)8\xa9\x8b\xd4|\xcf\xa4\xa4\xaa\xa1\xfd+\xb2\xf4|\nS\xb3\x0b=\x0f\xae[\nP>\xa1\xb2 \xcd\x02B\x98\xb6a\xb4\xce\x9e5\xf2\x1c\x1c\xb3@\xebL8\x8e\\\x1d4\xd9\xa3a\xcbF0\x8f\xc1\x84;0\xd2\x1f$x\x19i\x1cM\xe9u\xbaG\xefl<\xea\x84\xa3\xaf\x9f \xd4\x1c\xf6l\xf7\x93m\x90\xdeLj\x84\xcf,;Ry{Pg\x9d+6\x8f\xc07\xf6i\xbed\xcb\xe3\xf3\xba\x8e?\xb0F\x96n\xe0\x88\xa3\nR;\xe2a\xb9\xde\x11\xd6\xe9\xccn\x83C|F\xff\xe8\x1e\x17\x1d\xa7\x81c\xe8o\x043#\xfd\x98\x8e8\x86\xe9\xc6\xec\x8aP2\x0f\x0e\x06\xec\xa9"\x10\x03\xa7\x99\xe1)G\x9a\xf8\xb6\xbd\x05\x86\xce/\x86\xc2w\x1b"\x03,G\xaf\xab\xbeZ\xedL|\xc8\xcc\xa5\xa3p\xb9\x88\xc8\xbf\xb3\xe5\x18^\x12\x97\x96\x7fbD\x89m>a7\x8a\x14O\'\xb6[0\x97\x8c\n\xbd7\x9a\x9e.\x911\xd6\x14L<\xa9\x9d\x00\xf4\x8f%\x898\x19\x1c\xcd>f*\x18\xbd\x0e\xd8\xc79!\xd12c\x0b"\x98\xbf\xc3j\x14\xa2\x13c r\xd8\x1b>\x97}\x0e\xde\xd1\n\xdeCv\xe76\x08\x83`Zd\xcfw\x91A\xd4\xe0\x10c\x9b4u\x06\x070\xa6\x81\x80\xba\xf3\x07\x0eIl\xab\xd1\x1e\xa4fG&%\x01\xb5Is\x9c\xcc\xbf\xc3\xc2\x84Y\x9fC\x0f\x98x*\x18<;\xac\xb9\x14\xfb\x13aL\xaa\xa3S\xfc\xec+\x02wVa\xb2\xf4:H\x9fV\x9dsz\tcYO\x1fE\xe0\x86\x8e\x8fQ\xe1\x82+\x80T\xb9\xe5\xecB\xf4)\xd9Q`\x0e\xdcN\x11AR\xac\xe9\x14\x01\xa2\x1a`\xe0\xc0\xff\xb4\xad=\x13\xd0\xdd\xf96(}\xf8b\xa3\xfb\x90\x1a\x0f\xf5\r\xf1\xf1\xc1\xe3\xd6e\x90\tH\x9b\x8d\x0e^K\xf6\x11\xab\xb8\xb0\xdb\xc3J$\xb7RwW\xc2&\xd1\x9a2\xdf\xab2\xc3\x18\x0c0\x153\xdb\xec\xae\x9f_\xa4\xb5\x14\xe3\xf2\x16\x18\xbf\xb3\x8d\xba\x8b\xc6$\xdezM\x05\x00\xf30?\x10\xb3L\xed$,4O\xb6|[%\xeb\x08T\xb8\x8dD\x80\xae\xbf\x07\x98\x07\x08&\xa1\x89\nItV\x06v\x10\xe0\x8fk\x9a1[sj\xe1\xcb\xd1Xj\xea\x987\x89\x80\xc3=\xe4\x97\x16\x95N\xc6\xe9\x8a\xdb\x0c\xb4O\xd8@\xae\xf9\x12h\x8e}i\x91A_\xbe\xd1\t\xbe<l\xbb\x11\xf9-\xf2/\x93\x11\x1a\x0b\xe8;\xd9&:\x0c\xab\xa9\xeb\x10(\x967P\xfec\x17v\x86\xbea\x88\xd2\x99\xeba\xe1m\xcdq\xcbI\xe6\xda\xc0f\x7f0 \xdd1\x8a\x92H\x85\xdc`\xa3\xf9 ;\xb3?4p\x1f\xcf\x1f\xeb/\x9d\x1cD\x11w\x99\x8e\xad\xc1\x87j\x07\xd7\x1d\xb6O+\x04c\xf3HY\x06K\x03T\xea\x11N\x18S\xd7KMqIZ\x899%:8\xfe\x8a\x05\xfe!\xd5\xa7\xd2\xe8\x9d\xceM}\x81o\xeb\xf6\xc6\x00\r\x8d\x02\xdev\xcf\xab\xf3\x8e\xa3^\xea\xc7v4\xcb\x07\xce\xec\x11\x92\x18\n-i\xaf\xf6\'\x16y\xf3\x1e\xcf\x91\x7f\xf6Q\xf6f\xe9\xe2\xdb(k\xd1P{\x02S\xd4\xb7\xbb\x89?bH?>\xffaO\xd3yMu\x9a\xe7\x8e\xc2\xaes@c\x88DHEf\xbb\xb6\x1b\xe8\xcb\xae\x10\xae\xf6F\xde-\xa2\xcf\x8e>\x90\x97:\xb7\x15yC\xa0\xa3r\xcf\x84\xa0\x13\xf1\x909\xbf\xff\xad\x0b\x1a\n\'\x88;\xfcPTA-.\xe6\xbe\xd4\x97\xf0q*\x83\xfc\x94e\xdf\xf2\xb5\x01/\x1fR\'\x17\xcf\x82!;h\xc5\xa4\xcb\xa8\x0b>\xcf\xc0[H>\xaa\x80\x83\x851\xea\xce\x84\xfe\x1a\xea\x97\x88\xa6\xa8f\xd9\xfb:\x87F\xfc\xb20\xee<\xa6#\x19\x18J\x06R)\xdb{\xa6\x96\xfe\xc05\x89\xea\x91\x1aT\xf3\xb0\x04\x1d\xf6g\xb1\xe8\x9f\xe6\x7f\x9b\x1b\xf1\xa5@\xb9\x1a\xee\xc5g\x14\x04r k\xaafy\xf8\x17GH\x942\xab\xdf\x9b\xfc\xb7@\xfb_\x82\xae\xc4\x99\xb2\x12T\x04\xb6\xeed\x8f\x95\xce\xd8|\xca*I\xda\x187_s\xec_5a\xb7\x7f\xbf\xe2T\xd2\xa5\xd8\xe5\xc0\x87\xa63@\x81\xbc\xe2\xcb\xde1\x9e\xfe?_\xfa\x1bc\xf9wY\xf2\xaa\x9b$\x9d(+[\xd5\xa4!\xb61\x98\x8a^\x97\x92\\\xcd\xac\xf9\xd8_q\xe8F\xa2\xb9\xc3\x88w\x9f\r;o\xe7\xfa\xc8\x8a\xc4\t\x94\xa9\x0b|[\xc3::}:!\x12\xe2yx/\x9f\x9a\xda\xd0\x9fu\r\xcf\x98\xbd3[\r\xfd\xb3\xad38\xd0%6Gl\x80\x9e\xda\xac.\xd72\xd5\xdc\x1a\xad\xe8\xe4LrE\xd9\xa1\xf0\x19\xf0\x00\x04K \xc3\xad\xaf%\xae\xa0~\xf6-w\x0b;\xa7\xb8\xcd\xed\xbc8+Y\x9e\x11\x92I\xaa"c>\xcb\x86\x1a\xccU}\x9f\xad\x88\xeak\xbf\x15\xd4\xcbP\xb4\xeeb\xeb\xcd\xcf\xbe\x11\x81\xe8\xfbX_w\xbc$3f\x8c\xb4\x92a\x1a\xbc\xc5,\xd7\xf7J(7W\xd3q}\x82\xd4\xe5\xa7\xb0\xb4.\xd4!\x10\xda\x0b\x8a\xdfs\x97\x9f\xdb\xfe\xfa\x17\xf5\xdb\xcbg\x17\xa7\x95\xd4\xbd\xdds\x95\xacC\xea\x9d\xfd3\xb6\x0e\x13\x82\xb3\xc0\xed\xe3/\x8c\xad-o\xc9\xed\xae\xfdl\x99_\xc9\xf9t\x87\x19qX\xa4\x14\x01\xdc\x05\xfb\xcb\x0b\xec\x9e\xbc\x9c\xb7B\xa4VMx\xd0\xda\x8f\'VaN\xd0\xfc[\xfe\x1c@\xe15\x85\x1d}\x80ItO\xf0\x80\xf8\xcd\xd9\xf5Q\n\xc7\xdcL\x8fRj\x02\xc7e\xf9{\x8b\x1c9\xf3\xe7\x85\x98\xad\xd8\x16F\x0c\xd5\xcb=1\xcc\x1b\xd30PdB\x11\xbb<\xd9\xbb\r:\xc1\xa9\xd2^O\x10\xe4\xc2\x16\x0cJ\xab_\xea7\xb5l9\xda\xf4\xc4\xf5\xa7\xdf\xde\xafM<\xaa\xd1,\xeb\x83k\xf8VF\x1a\x13$}\x86\xd6\xb4\xc3\xde\xc5i\xc7\x84X\xfa\xa6\x86a\xe4\x03\xb6u\xccq\xc2\x84(N\xb18mY\xb1H\x9f\xa3\x11~m\xd1\xb9I\xd9\x80\xc3\x8af\x10\x81\x7f\xec\x1e\x0f\x93\xc3yZ\x06\x89V\x80\xc5*V\xc6\x81\xdb\x14y\x89t\xea9\xf7\xf6\x98\xb6H\xe2\xeco\x97\x1f\r\x91\xbcy=\x07\xe74\x7f\xf3a\x1e!\x11\x1d\x85HM\x0e\xa9\xcb\xa6\xf7F\xc4\x15\xde\x98/\xbcc\x8b8f\xc7;xE\x9c\xa7>jKRs\x9a\xd0\x12\xd4O\xb7\x94\xcc>\x0c\x15\x80V\x11\xed~\xc9A\\\xdf\xc21\xd7\xf4\'\xe9\xb8TC^\x08\x19\xdd\x9d\xdb\xa3w\xa8\xe9U{\xba\xb8\xbb\xf8\x0f\xf1\xb94O,\xaf\xd9\xc2\xb7\xbf\xf9%\x15Gu$y\x1f\xe9\x99\xd4\xfd\xa5m\x84[4\xcb1Lc\x1a4`\xa7\x80\xdd\x0b\xecKe\xec\xb0\xb4\t\xad\xeb\xbfe\x9bQn\xaaq\x1c\xba\xea\x96U\x0fD\xfa\xc6\xaa\xbd\x03q\xf4\xe3t\xce2\xe6X\x04\xaf$\xba\x89\xd7!\x1c\xf5V$Nk\xcc\xd6\xdf\xbd\x81\xe0+2\xfa\x8f\x8f\x1f\x04\xc6\xd4\xf2\xe9\x118`U\xf0&\xec\x9a\x9cH\xdd*\xf4\x98+x/^\xb3\xbaT"\x91\x8d\xb1\nI\xc4hy_Y\x9f\xad\x19\xedJ8\x07\xdb=OC\x9a!\xb4\xdei\xcdtI\xed\xb9\xcb\xacI\xe4\r\xd4\xf6\xab\x9e\xdb\x85$\x80\x13B]</\xf9\xac\xafH\xf8^\x8e\x17\xd3\x15-\xba\x17\xaf\xd7g\xab\xb3{\x1c\xd2\xd5\x8fmG\xfe\xd1\xa2W\x86v\xcf\x9d\xd0;\x97o\x1axMW\x1f26\xfa&Cu\xffF\xe4\x00*\x88\xb1D\x04\xf5ud\xdb\xac\xcd$\xe1\xc6z\x1cC\x9f\x95\xf7\nQ\x94V\xb1\xcf\xe6\x1f\xe2[\x15*\xbb"\x82\xbe\xc1\x87\xa4P\x844\xab\x9c\xd7#\xc8o\x00\xfb\xc5R*\xe3;\xdf\x06Vu\x02\x98\x17\r\xae6G\\\x9f\xc9\xe6?\xac\xedj\xeb>;W\x18\x91\x95\x03\xb7\x97\xac\xcf`k\x13\xd4\xd8\x9fQ\xba)\x19,N*g\x12\xec\xc5V\xcd$\xc4S\x16W\xf0f\x05)\xa2V\t\x90\x98\xe1\x12\x8b\x15X<0\xc0)\x9a\xb2\xf6\xf7\xa27\xdeS\xec\x8f\x80\xa5\x7fL\xf4NW\xa0P\xdb\xfe\x0f\xe4\xee\x0e\xb9{\'\x7f\xb2\xe3:\xea6\xd7\xf2\xe2\x1d\x92\xc1\xca\xedK\xcc\xe7\xff\xc8\x9d\x847\r\x9a\xb3\xd8O\xc4]\x86~VN\xb0^J[\x93^PK\x17Kj\x08\x86\x9d\xbck\xab\xf3\xe2\x05\x80\\\xd6\xb2\xbf!DS\x1c\xd1z\t-\xb2\xa8@f\x95\x14\xca`\xbfl\xd3`f\xe2]D\xd7\x86O\x1e&\x13\x10\xea\xf4\r\xa6\x9d\x105\x9chr\xb5wOC\xf5\xc0j\x9f\xd7\x87\x07\xf5l\xf7\xed\x1c\xe5\x9eokK\xcc\xc3\x8c097\x91\xda\xe1\xde\x8a!\xde\xa5\xf9c\xf6\xfc\xfc\xabq\x14\xfb\xbe\xd2\x84\x81\x91yg\xad\x95X%T$\xa17\x94%\xb4<\nscA\xfbD\xbb\xa3\xd1+=\xe9\xe0\xefO\x00>R]\x15\x7f\x86|\xd3\xb7X3n\x9b\xf2Q\xbbH=p\xd5\x7fI\xf2@\xc6_\xf9\x021\xb9W\xae\xd26\x1b\x13z_\xf9\xd8\xb8\xb6\xb8[\r\xbeY\xd3\xf3P\xa4d\x87\xc3\x97n\x9c\xa4V\x17P\x83\xb5\xd7\xc7\xacJ\x02@\xc9\xddK\xfb\xc3\xda\x9f\x8e\xbf8\xa4\xee\'Jf\xef\xe3.\x83\xbef\xa7DO\xb8\xf5\x8e\xac\x07\xbc\xcd8\xb8\x08\xfdX\xea\xef\xf88\xe9\x03s\xd8M1g\xa7K\xb8F\xcc\xc7\xa2\xe5q\xaf\xb9\x10\xf3\x11A\xcct\x90c\x85\x90\xe3\x9e\xb33\xfd\x98\'\x7f\xd9\x8az/@m\xddn\xee\xf1\xdey\xd9\x99~\xc7\x01\x18\xd9\xe0\xa9\xcf\x944\x03\xda\xf52q\xce>((y\xb5#\x1cw\x1d\xb7nF\x10v&\x93\xb7\xbe\xceu\xf0\xf8\xf8\xfcY4\xec\xb7\xec\xef_\xea\x96\x1b\xc0\x80\x18c\xccr\xa6N^\xd9:o\xaf\xac\xc91\x08\xf4\xbd\xf6*M1\xd2#\x8f\xd5\xdf\xdcu]C@\xc5 tq\xa1\xb9>Q?\xf2\xfe\xde\xb9^{\xf7v\xc9\xb5\xa2\x1a\x84\xe0\x11\xcc]\x9c\x13\x9aO\xc2#\x9c\xb5\x05\x9b\r\xed;\xd2\x9bE\x7fM\xde\x92\x8d\xabL\n\xe3\xc1\x80\xd7\xa2\xad|\xbe&Rx\xf3\xda)\xbfbG9\xeb`\xce\xed\x8ff\xd1\xfa\xf8\xfc\xb3\x81\xab\xd8|z(\xa7\xb3\xe4\xae\x99\xf1\xe6\xe6%I\xa5\r\xd3\x98t\xb0r}S7f\xae\x08}_\x18\xf3~\xa5\x83\x8e\xe7\x1bC1\x06`\\\x8b\x1b\xd7\x05n\xa1\x1c\x0c\\\x01#\xe2;\x81\x99\xf1u\xc4\xe3\xe9\x12\xc9\tA{{C\xcc\x1d.\nW\x850\x9f\xc9Vn7)[/\n\xe2\xea`\xd7\xab\xedoa@a\x9d+~"a\x9dW\xf4J<\xe5\xb1\xf7\x1a\x81\xae\xf3H}\xa6\xafU\x8d\xbc\xe3\x8b/\xfc\x0cT\x03\xda\xe7+jF=NJ\x9dd\xf3\xbd9sT\x00\xaf>\x0f\xd5\x9al\xfe\x10\xa8C\xf0\x1f\x9dk\x15\xd7~\xd6\x1b\x03cS\x1a\xa8`\x1d\xbb+\x1d\x98\x9d\x88\xff\\\xa7\x1ax\xfbPw{\xf6{i\x97\xe1t7\xc7\xd8Y\x9dx\xf9@Cf;\xeb.|\x9f\x07\xae\tEG\xfe\xc8\x97K\xef\xe0\x9a\xfd\xd0\xd7\xb6rY\xd6\xe8\x9c\xc2\xf3\xce\x8fI\x91\xb0\x10\xe0\x996\xa5\x9a\xa6_\x9c78Z^\xf5{!\x07\xd7+\x03\x10B\x86\xa1\xaacO6\xe8\xd2\xccq\x92\x18V\xc2\xc9\xef\x14\xae\x05}\xa4N^d1\xe7X)\x0e`O\xec\xe8G\xabZ\xa4\x9e\x07-\xc70\xed1r\xeff[5|<\x89\xc0\x14\xd4\xef\x83[G\xd7f\xd6rA4\xee]\xeeZkq\xcd\xc50\xed\x13\x16\xecy\xdf\x91d\xbd\xa9\xb7~\xd7\xc9\xce4\x0f\x91\xa5\x7f\xfd\xd8=\xf4\xdf=>jj\xdb0$\x14&r\xb8 \xfd\x98\xfb\x97\x87\x9b\x91\xfc%\x1bO\xf1\xac\x1c\xc6\xd9\xef\x88\xc5\xf8\x18=\xa78\x11NE\xad\x92o\xb8\xcf\x89\x80\x1a\x7f\x9d\xb1\x076\x07\xb4O+\x9e=\xa7\xc5\xf0F\xafx\xe4\x00\x00\xd9\x19R\x87j\xa6\x8cbw\xc5e\xe9\xd0(w\xdd\tP\xeb\xacJk\x0b[\xfc=\xa7\x979+\n\'\xa2I\x98\xaa&&\xbbA\n\xf9\xea0\xc7/\xa5s%\xbe\xa8J\xa1E\xfa\xc7\x08ZJ\x1dK\xdbcj\x12\xdc^\x0b\x93\xa8\x86\xdb4dC&\xe8\xd5\x9a]Nz\\c^\xdb\x87\x0be\xea/\xab\x8f |(\xd4\xd6cN\n\xe0\x97-\xf2\xado\xb9\x98\xa1\x7f\xda_R\x85/{\xbc\x1aw\xffN\xd3Y\xd4\x1a\xbe\xed\xa2\xa0u\x02@\xab\xd3\xdbX\x9f\x7fI\x07\\\x156T\x87\x00;\xfd\xc2\xfa\xed\x9f`\xc7R\xa3 Y%\x02\x11\x0fD\xd1\x1f\x8a\xc2[\xdd"\x8d\x13\xfc/sp\xd4\x7f\xb0Y\xa7\x9dN\x89m(dG^CY\xb2\xa8]\xfb}\x12\xbb\xb1\x94\xc6\x00\x8f\xb6y\xeb//\xa6\xb4\xb1\x85J\xad)\xae\xcc\x91\xc2\x9bN\xce\xfb\x9a\xa66\xb01\xceh\x8d\xd8\x84\xb56\xc2\x14\xf3\rt\x07\xb9M-\xe6\xb2N\xee\xbb_\xcf\xcb^9\xebs\x16\x16\x8b\x9b\\*z\x1e\x18\x15\xa0\xa5\xb6\xbbj5\xd6\xa6\xec\xc0\xb7/\xa6\xcd\x85\xd5\t%\xeewd?\x14I/\x02\xb4\x90A\x11%}\xfc\x1b\x93\xfb##\x81RZ\xf5\x1e\n\x9f:>\x1a.y\x9br\xc3\xc2\xfe\x8e\x1d:\xa80c2\x1a\xd6(|Z\x07\n+\xa4\xaa\xc3\xa3\x84Y\x90\xa6\xd5c:\xbc\x17y-\x1c\x92\x81\xf2\xf10\xac\x9a\x05\xd8\x8f\xa59\xb0\xadm_\xaa-\xea~\x97\xbai\xd9/\x7fll\xee\x1b\x8e\xcc\x17\xec\xfbj\xf4HL\xf0\x8d^\xa7\x8cU\xc05^V\xe0\xaf\x06\xf7\xf6\xf3\xe8\xc2\xc2\x9d\t\xa8\xe7\xcc\xc09\xde\xe5\x9b~R\x9a\xd1\x80\xd2\xe6\x90\xef\x08\xaa(|\xd5\xc6>\x05\xfc\x01\x7f#\xb1\x0c\xbb\xd3\xc9\xaf\xe7\xb2\xe7!\xdbr\x11\xeb}J\x95\xec\xbc.\xab\xa9I\x8d\x82\xc7\x87jH\x125\xd0l8\xd0\xd8\xd9\xa0\xbeQ\x1fh\xb5\x83\x85\xa4\x96m\'\xcf\\\xbb\xcd\x12\xa3\xabN[\x12\x9cs\xe1x\xe3[1\xfd(\xadc\xcd`\xc3\xef/\xe2\xef\xec\x12>M\x0e\xc5\xbb\xfb\x138\xdb\xec\xed8!]|\xdb4A\x08\xb3T2k\xf4\xf3W\xae\xdf\x1e\xe9\x8b\xbd\x0c\xfc\x01\xfa\xcb\xd2\xa5P\xf1<\xffc\xc7\xde\xae\x8d\xc5m+`\x93\x94G\x8c/?\xac\x17[\xeb\xd1\xcf1\'Vr\xfc\xbe\xac\xffu?&U\xc2E\xabC\x87\x86\xd8\x84$\n\xff]\xecZ\x9dPr4\xe9\x0e\xc2r\x9dA\x84#-\xe6z\x1b\xd9\xd39\x99\xbeh\x8c\x1e4\xdcz\xc8\xfd*\xc5dL\xed\xc0\xdd\xe5C8\x19\x84\xbf\x8e\xe1Tp\xe0\x9fo\x99ByD\x98Tdy\xd3\xcc\xc7:\xfe\x9bj5V\xf2\xbc\xf6\xe5\xe3\xfe\xc5\x89\xa2\xcd\xa2u)\xde\x1f9\x13]lN\xfcz\xa2\xc9L\xebBJ\x06-\xd4\x9f\x8d\xdc\x9cT\x94\r\xdc\xc6\xa7(\x06%t\xabS\xa4\x0c\xd4\x0e\xe0\xbb\x1cN\xc1W\x8a\xc1\x08\xb1\tM\x94\xcf\xae \xc4\xa7\x9dOBD\x95&\xdd\xf2\x03p\x9b\xc0N\xbd\n[\x0f\x87\xc6\x10}\xa3X\x7fDNQ\xefVs\xfb\xe8\xd7\xb6n\x08.\x1d\xa96\xfbG\x18\x1c8NJ\xf1\xae7p\xb8\xe5j\xcd\xf1R\xc8\xcd?s\xb6w\xae,\xdd\xd8\xf5\x93\xf4\x7f\x8f\xd2T\xe5h\xe0\xa8\x0c\xd2U:\xba\xe6\x90\xbd=M\x0f\xed=\xb51\xbb\x05$\x8a\x8fV.\xe5M\x017\xad\x9b\x16\x16h\xa4\x8b\x06\xea\xbd\'\xd7\x87\xd4\x1c\x03\x9c\xf56\xb8\xa9\xb8\xfb\x12\xba\xda\xd1\x18g\xd9\xcf\xe8\x1b\x15\x1a\x11\xd7\xc0g\xd8\xd5\xb4\x1e\xca\x0b\xd9j\x7f}?\xe7\xae|\xa2y\x9cy\xb7\xb9\xc14t\xf5.\x99\x9c\x12\xdaeA{d\x10\xa9\x16\xf6\x9d;n\xb9\x17U\x92\x9d\xafiu\x97\xd6\x1c\xc4\xb3l\xdb99\xd7!{\xd9\xec\x93\xceY>\x0b\xc6O\x1c\xff\xd2M\xb4\xab\x08\xba\x9c\xae\xc1\x94\x9d\x8c\xd0\x04S\xfa\xab>h\x85\x0c\xf5\x014\xbb\x92\xc4\xf7\x954.\xe8\x99\x07\xdd\xe8b\xc3f\x12Q\x0e=zK\xfcr\xd5\xa8\x95\xc2\x8b\xa2\xd9\x1cjG\xb9]\\A\xf9\x19\x03\x87d\x0e\x08\xbb\x9f\xf6Hit\xa8\x0f\x15$m\xed\xe93\xe4\xe4\xab\xafqI~\x855F\x06\xf4N>s\xd2\xb7X\xb5Eb\xd9\x1e+Vt\t(\xe8\x10\xe9X\x12R)\xa6\x0e\xe6\xab\x93\xa2$\x1c\xf7\xf6J\xfd0\x7fO5\x85%\xd6\xdeU\x13tp\xdd\xbdW\xb8\x14\xbch\x0b\xe5t\xec\xfep\x12\x1e\x01g\xcb1\xf7\xf3\x0ff\xe7xA\x08\xbc\x98\xdd\x9c\xaf\x7f\x18\x82\xc4\xb9)\xc4\xb7\xb7\x88\xb9\x98\x97Z\xdbx:\xb0t\x16\x01\x8d\x0f\xf5x\x01L\xf3\x88IU\x84\x10m\xac}\xf2\x053\xefz\'\xf0\xdc\x8d}\xc2\xccd\xb0\xbf\x10\xb0s\x9d\xa1\xd9\xd7\xfb4\xc2 6\xe4N\x90\xc6\x88d\xd3Zvrn;)5\xc6QwN(\xa5\xbdF\xd9`\x90P-\xd7\x02\xc0\xee\xe0\x83\xe1J_\xca/\xd4`\xd7X\xe0\x92s\xc7P{\xc9\x7f\xe6\xd0\xfb\x93G,\x80G\x0c\x9e\xd4jw\x87\xce\x9be\xa0\xa0Z\xf6\x8d\x97\xc2\xc04\xe0\x92\xd5\x8d\xbc\xbacc\xba\xa6\xaf\x0e]j\xbd\x0bM\xbe\xbb\xde\xdd\x91\xf8\xdf*h\xdf\xf9\xb6%I\xe5\x945\x95\x03R\xcf\xf3(\xd2,\xa6\x01\xe8^~\xd5.\xe5\x12X\x84p\xf8\x87\xd1:\xef\x82\xa4\r\xe5A\xd8[\xf6j\xb2\x105\xf7\x87\xa5\xad\x14.\xddc\x0c-Ln\x05<\xae\x90\xf4\xf2\x04W\xd5\xb0&\x0c\x98\x9a~\xb2S\xe0\x1cQ\xaf\xce#.\xfb\x87T\xcaTsq\x05\xb4\xb8v\xaa\xe6u\xdf\xbf\xca|\xac\xe4\xf2\xd3\x0f\x1b\xeeT<\xb2)(2\xd0\x98\xac\xd65\x99\x1c\x11\xa4\xb9\'ltK\xd7\x8d{{m\xb6\xd3\x1b\xb6H\xbd\xa8_\xab\xeb\xb94\x12\xf1j\xfe\r\x1fK<\xebn\xc1\x18t\x17\xf6\xf73\xa2\xf2\xf4\xfe\xca\x9b\xc6f\xe6m\xd3\xe6\xc6\x9c\x02=7\x1f\t\xda\xcf\xfc\x96\x9a\xac\xc4\x9f\xb7\xb9\xee\x9bJ~Kh2\xf6\xbc\xbb\xe4\xc5P\xe8G\x0e\xb5w\xc0w\x02]\xf2\x9b\xfaI\xff\x82\xa2\x96zn\xfdk\x0c\xeb`UR\xd1\xf2Vn=g\xdb\xf3\x82\xa9!\xe75\xb6\xd6v\x1a\xb0\xa5\x93\x01\xba&x\x96\x0b\xfc)\x87\xad\xe6\x8av\xe9/z\xa9ECt}\x93e@\xac\xc1\xa0\x8e\x8dR\xa6.\xdbP\x83\xc3\x91\xfe\xb8\xf1\x92\x19\xe5x\xb7\xc7r\xbf \x11qINM\xab]w\xa7\x8d\x84\x17\x00\xf9x\xefp\x92BV1AV\xca\xca\x9a\x12o5\x8dw%\xb51d\xdbz\xbe\xc2\xcbe5\x12\xec\x88\xd5\x962\x8cN\xdf\xd4\xc7\xd1\x99q\x8b\xa3@}\x1a\x98o\xb9\xe9\rxHK\x84\xd0h\xab\x97\x1a\xc0Sh\xfe\xeew\xfe\xbb;gQ\xce\xd6\xb6wv\xfa\xda\xd8$\xea\xb64\x81\x01\xd5\xd9;\x18\xe5\x84U\xda\x87\x83\\v\xea\x8b\xe3\xb6\x87+\\9\x90\xb1cI$\x88\xab\x95\x1d\x92;\t\x8cL\xc1|}y\x90(u\xa8 \xa2w\xca\x82D\x9e\xc6\x1d=\x18\xc6\xc8\x13\xb5a?\xfc\x8b\xdf\xdd\xc9\x18h\x15?\xfao_;C\x1d\xaf\xa5\xa21\xa3\x18\xff/\x90o\xbb\x9f\xc4\xae\x06\xa0\xebkt\xb4\xf6f\xf8\xc1\xba5)%.\x7f0|\xf9aN9)\xa3(\xeb\xf4\xd6\n\xc5\xdd\x08\x14\x84\x0ce\x11\x9fHV\xfdw\x87\x85\xc6\xb9#\xab#\x07F\xbc\xf7\xb6\xa9\x10Y\xc8\xdc(\xcc(\xbc-\xe6\xf0\\\x04\xa7Z\xc5\x8a\xbb\x00\xf7&,|\xd9\xef\rs\xd7IP\x7f\xeb\xf7Q\'\xd4\x00:\xeee\xaaH\x9e\xd0+\xef\xac\x12&\xbc\xcd\xef\x91\xe8\xcb\x16Ma=\xb3\x86\xb7uq\x7f\xb8D\xf2\x08\x8d\x1f\x8d\x151\x9d\x8b\x9a\xbf\xda\xa2\xb1R\x17\xec\xf0\xab\xf8;\xb1\x86i\xd9}\xbcvz6\xf7\xca\xc1\x87\xc9\xdbG7{\xe1\xe2a=\xd3\xa6\xaab\xe4"\xdc\xa1\x1d`\xe0a\xcc\x12\x8f/\xa7\xdbt\xa1\x01\xdcoU\x99\xa6ivM\x99{\xec\x11\xba\xce\x82\'\x9b\x82^\xaa}\xb2\x0f\xa4\xbd\t\xeb?j\x83\x17u6|M\xfdf\x92-\xda-\x13$\xa0g\xbe/\xb3\xe6\xf0\xaa\x9e\xcbo\xb2\xf9\xd9,\x07\xd9\xdbs\xfd\x13\xf8kkV\xdf>\xed\x1d\x11\x02\xbc\xeaB>sR\xac1v5\xec\xb3K\xd7\x95\xa2\x94\xfe\xa7SN\x0fL\xb1\x0c.\xa5U\xa7\x83J\xb5\xecr\r\x98\x06\xf8Z\xea}\x9e\xd19\xc9x\xd5/D\xaeZ\xfbVo\x83@\xcd\x8f\x92\xff}X-\x9eX\x99\xb2\xbf&\x1d\xf6\xc0\xa7hY\x89\x92\xb6\x16\x17\x19\xf7\xf8K\xa8\xfe.\x85vQ\'\x12\x17\x83\x1d6\xd7\xab3\xd4\x16\xf4\x00\x0b\xb8\xc4\xac\xac\x9b\xe6\xf6\xf9!\xd72\xdc\x17x\xabWL;fz1\xd70\x92\xa3\x86\x9b\x06\xe30\x98\x8d\xd8-\nh\xcf\xdc\x10\x89M\x97+\x9e\x12\x93n0\x0eN\xe8H5\x16q\xcfB\xda\xcf\x11\x0b%l\xebo&Xf\xcc\xd6\x9d@\x83\x94k\xda\xb4\x17a\xb9\xff\x8e\x05\xe5\xc6\xdf[\xe3\r\x92\xe6w\x1f\xa8\t\xed\x86\xac\xb4\xe7\x1dd\xd1n\xeez\xe5\xbe\x1d\x94\x14\xcd\x8f>\x96;\xb8\xdbQ\xd7\x8fZb\xab\x96\xc6\x04\xe0{\xeb\xcb(\x88\x0fH\xd1\xa0\xb2s31a\xb9\xeb_\x95\xaf\xc9\xb2(\x9b\'O\xde\x06\xdaN\xa9\xd6\xdf\xc3\x1f\xfe\x8b/o\x86h\xe89\x9c\x86\xb2\xeb~\xfc\x11\xa0\x81\xaav\x10Z\x9b\xd3l\n\x9ck\x9c\x0c\x9d\xae\x98\xfdX\xdb\xcd\xedS\xb9iS\xbd\x8bt\x95\xcb\xe2\xa5\xda\x86\n\xf4s\xbc\xbc\xee\xe0\xce\x84 \x9e".\x807\xbe\x94b\xdcv\xc5Q\xc9\x1a\xec\x08i\nG\xe0\xf8\xfc\xe9\x98R\xc4\xe2\x8b\x82\xa0\x9c~\xe3t\x01\xa4(|_1M?\xdcI\xb2\xb2\xbf\x9a\xe2X\xefZ"\xe0]\xa3\xaf.@2\xfeM=\x84Q\xee\x13\xdf/\xb5\x8cn\xf4a\xcf?\xd6\x8f\x16R\x91\x8d\x13\xff\xdclX(\x7f\xad\xba\xd5\xa7\xcf\xec\x02\x86}\x04\x9d\xd3\x13\xcd\xa9\x07\xd0\xa3C\xea\xbfW!Z\xb2\xbd\x8fS9\x03z\xb5\x9ds4f\xfc\xafQ^\x1c\x9d\xd5\xd1\xbeq@\xdf\x98X|\x14iW\xb3\xe5\xcbo\xd0\xe3U\xb9G\x1f\xfeVDW\xce\xfbT\x9e\xa6\xed\xf2C3Y\xf3b\xed\xd7\xad\x82\xf6\x83\xdb\x8f\xfb\xf3N"\xf7\xc1K\xa2\x0b\x16\xdblc\x99\x9b;\xf4\x10\xc3\xca\xc5\xaes\x8a\xeaJ+t\xe9\x81\x96J\x8c:\xae\xc7\xe1N\xee@M\xd2\xd8\xfaY\x07($\xe0\xdd\xf6\x04m\xbe\x93\xdfl2\'sp\x81\x92\xa3qa\xe4\xdc\xa1\xc1\xf7\x0bJ\xcd\xa2\xa3&\xad\xa7\xef\x9b!\xcc\xdb>V\xf7\xaf\x1e\x0c\x8c\x9b\x19\x8d_~B=C\xcdR\x1fg\x148I\x03\x91\x1f\x04C\xa5\x9c\'Y\xean\trU\xcd\xcf+\xfa\xbd\xc7\x8c\x16\x00\xa2$\x02\xfa3\xdao\x8b\xd1\xd6F}\x1c^h\xec\xf5\xb3\xc7\x8e{\x8a\x1bI\xae\x02\xf6\xc6\xfee/6/RZ\xd1c\x88\xefWm\x10\xf7:\x00\xc6J\x8cG\xcd\x8f\xfb\xc3\x8d\xf5M\xec\xda\xa5\r\xc4z\xd8\x83\xc3\xd4\xd9\xd0\xd0\r$\xb3\xb1\xbb\xcd\xaft\xb2Hij&J(\x9b$g\xd1)\x92\x95\x98!2\xed\xa71\r\xf9\x8a\xfa\xad8\xf0\x9bx\xd9\x805\xcb!(-\xd9\x9a,\xf9n\xb9\xf4\xc3\xab\x00\xebw}\xdc*\xac\xc7\xcd\xd7\xfd4<\xac\x15\x91\x0b\x01$\x9f\xfa\xe89<\xfd3,+<|\xee\xc5\x9c\xfdq\xadR\xbb\xfc\x06\xe6\xf7\xb2\xb1\xd2\xc1\x83\xff\xea\xb8\x95\xd1\x84J+\x8c\xca\xed\xa0\x94![iwSe\xa6:\xd4(\xbb\r\x9fIcDid\x90\xa2D~tz\xe3\x0eg\xad\xc8\x8c\x18\xa5\xaf\xd7\x86w\xdc\xa3\xc9\xf1C\x14\xb0\xc3\x0fmR\x018\xf8\xfc4 \xa2\x15\xadO\xa2\xde\x05(\x8f$\xa61k\xf6\x1f\xc0\xd3\xd5\x968\xe6~\x7fY\xe8\x98\xd9\x8e\x19\xf2\xbdTT\xa9f\xac\xf2\xf6\xd5\xad\xf4\x0e\xc9\xef0D\x96\xfc\x0c\xfb\xdb\xdfGd\xbbK\xbb\x03\xda\xe3`\x7f\x9fGn\xa8J\xe8\xbc\xe9\x13eW\xb4\xe4I9)\xadD\x15\x108\xa6\xabg\xa2\xaf\xdcZ\xa9\xba\xcd?\xa5\xc9K\xf9\xa0a\x98.\xe7-\xd9\xb9DW\xbf<\xa2`\x97m\xa6"\xef\xf3C>\xd8T+\x81X\x14\xcd\xe1Q%X+\x7f0T\x98\x16\x80\x8c\xc6u\x9e\xf1\x96\xd2d\xe3\xb9_1\xd3x\xc1\xcd\xae\xfc\xbbLu\xbb\xb6i~\x08\x86\x99\xabJ\xcc\xe0\x13lH\xd8O\x16\xd7)\xe6\x03Ux{EX\xe9\xad\xdc\xad0\xe4G\xdc\x8d\xcc\xffrR\'\x94U\xb5F?^\xf7\x1cLQ\r)\xb0"\x1f\x06<T\x905\xe49\x1bZ\x18xM\xf97)\x8a\x18\xaf\x18|1\xc0~\xffP\xd8\xfb\xc2n\xc9\x9d<>\xf2\x05\x9a\\\xb0\xdaD\xe7;\xb1>m\xa5\xdd\xc8\x89;\xc6\x9a\xac\x1c,\xa2\x12\x8d\xa8\x19H\xa2\x95!ikE?yY\xec*d\xe6\x1e1\xde\xce\x9d^8\xdc-\x88M\xb3\n\xe8X\xda\xe1\xe9\xf4i\x0c\xba\xd7\x83:M\x9d<\xadjX:KNn\x89n\x1d\xcc\x89\x9d\x91,\x1b\x9b\xcd\xa1"\xa7\xe5\xc0\x1d\t\x7f9\xc4\xc5\x0f\xfe-\xe4\x9e\xaa{9vN\xd5\xd9\x9d\xa8Id\xdf\x7f!\xde\x1a\xae4h\xdd2B\xe9\x12\xf4\xff\xeam\xb1\xc3\x19va\xb3\xffl\xb5#/\xf20\xef\x83Vje\xe7\x8dj\xf7\xde\xd1\xd5\x84\x13\xbf\xed5\x9azZD\\\xf7C\xb8\x0b(\xddU\xc0\xd6 \xd0\xa8\xd2\xb5\xa2Uz4\xc9\xaf\x13_\x9d\xd8\xe6~\x90\x96\x82}%l\x9f\x05m\x82=\xabJ\xa5>\xe7\xd0\x0c\xb9\xfe\x9b\x9f\xe4L\xd9\xdf2\xe5\xba\x92\x1d\xbap:\xc9\xed\xed\xa2\xa4\t\xf1wk\xa6I:\xba\xcf\xe8\xb5\x18*\xdd\xcab\xd6\xe2S\xda\x87<~\x82\x87t\xe2\xac\x92n\xdb\x1b\xf2\xfc\x1b\xd6\xb3\xf0m\xe5\xf0^\x8c\xc3u\xc9\x97\x11\x9b>\xec\xdcl\xda\xda\x98\xdeU\xfbZ{\xf6\x12E\x1f\xc5\xdb\x8f1\x9aL9\xc7\x9f~=p\xe6\xb8M\x91=\xc6>\xe5\x02\xfdO\x94\xe5Y^Y/\\y7\xacM\xd0\xf9H\xb4\xbd\xe8\xfb\xd4\xb3\x1d\x99\xf4\xe7\xe4F\xa9\x18\xb3_\xcc\xfa\xc0\xc3g\r\x19\x15\x17^(\x0c\x04s\x1a\xff0\xba$\xc1\x14a\x17\x11\xdc\xd0Py\xd0c\xa3\xc7d\xbc\xa5\xb5\xebIx}\xd1T{\xa5\xec\xeal=3+\x04\xa4\xf9/\x9d|\xd8\x1c\xf9\xbd\xc6\x13v\xd6\x85w\x01\x1fz\xf0:\xfd\xbc\x81\x152\xeb<\xa6@[\x04\xc5N\xad\x1d\xdb\xb3\x1d\t\xceT[\xd1\xfb\xf1r0\xb0\xe9\xcd\xe6\'U\xbd1\x0b\xfd\xba\x08\x16\x15\x98\x8d\xed\xa2pXn\x10\xc0\xa8\xe3\xa6d\x88\xd9;\xe4\xf0}dB\xa6G\xcf\xc9O\x1d\xa3-\x03\x9b\xf2K\xb1\x9e\xc5\xbaY\xba\x05\x98\\m\x83 \xf5\xd9;&\x023\tz\xc5\xf2}\x96]\xe5\xbd\\\x87\x97\xb3\xcbS,VQ\x97\x04\xffA\xa8\xa5\xea\x95!Uko\x1f`\xf9\xc3\x9dt\xd7\xaeo\x95\r\x1b\x1b\x1f<\xed7+\x90\xfd\xd7.\x98\xec\xc4F=7\xfb\xf9j`0\x9c\x9c\x91v\xdf\x0f6\n\xfe\xa3\xea,\x04KXn\xd0\xe0\xbe\xf3\xfc\x9f&GE\xcc\x93sS\xb8\xc4TlMz\xd6\x8fw\xa1\xad,h\xc1\xb6\x10\x8e\xc8\x8b\xa1\xe9\xd3\xf7\xf6\xdb\xf7)\xb5y\x86^o\xf9\xbc\x96\xab\x07;\xc0)\xb6\xeb\t\x0e\x80\xf5z!\xb2j\xfeH\xa6h\xaf0OgG\x90\xef\x14\xd32,FO\xfd\x1ft\x93c\xf4:\xb8r,G\xc3vW\xa3\x13=%\xc2\xf7\x04o\xa4\xf8\xa44\xcc\xeb\xd8\x93T\x95t&\xd8|C\x84\x1e\xef{\xf8\x90\x8e\x19[\xc3\xdcj\xcf.\xdfq\xa9\x94!\xd6\xf2\xdd\x9e\xbd\x9e\x95-\x83\x9f\xed\x9c\x1a\xae\xdf\xa7\x92_\x84\x99\xcd\xa8hu\x1f\xc8\xc2\xc18h9\xf7\xa3+\xf6\xd0i\x12\xc0k\xbf\xf8Yy\x81\xfe_uc\xc0\x9f\xed\xf6\xe0\nlz\xe0\xc4f\x05t\x86\x1f\x92\x9a\xa4\x17)x\xb3\xa4\xa7\xd99\x80bR\xee=\xa6\xfbr\xe6\x11+\xc7\xf3\n\x13\x89z\xd6\x027\xb1\x980No\xbd\xe5@\xbbJ\xc6P\x99\xe3\x897g[\xf2Q\x99i\x0e\xcdZ\xd6\xc1\xa8\xe6A\xd0\xd4\xce\xc7u\xa9t\xf4\xef,\xd1%\x9bvI\x83Z\x88\tp\xdd\xb6\xfd)|\xee\xdd\xf2\x81\xda*I\x08\xb1\xe6\xd0p\x92\x1al9\xa5$\x95\xdf\xd7\xdc\xd4\xa0\xe3\x7f*\x8e\x17,\xd8\xd1\x89\xfd\x81\xb0\x13C\rQ\xb9\xc2Ef\xb2\xfe\x80\xf8\x7f+g")m\xc7\xf9rKa\xd8\x0fj\x13\xffd\x85j%\\5\x8f\x10r\xafbi\xbd{\x00\x1d\xe98\xae0*\xb5\x7fe\xb3\x92\xf6U\xfc\x8a\xc1\x87\xd3\x12\'9:\x80\xa0\xee\xe7\x01\xfb4\xe7\x1f\x9c\x98\xc8\xda:\x82\xd5\x1b\xd5+SvzR7\x01\x17$\x14vsP\xad\xd6y\xe6.c\xa1O\x90\x17\xb9t\xdd\x15\xd9\x0e\xbc\xf9\xbd\x9f\x8ehY6\xa9yY}]\xd6a\xb3\x8f\xb2\x00\x11\xba#l\xe6x\xf0\x9fP\x9a_\x16\xcfI\xc8\t\xfe\xdcj\xe1Ki\xa1.\xb0\x95=e\xf1\x93\x02Z7\x97\x9a\xcbk\xa0Wm\xb9J\xaf\xca\x00\xa9,U\xab\x1d\xd3\x19+>\xe8\xeaj\xa01\xec\x8d\xce\xaaoG\xa0zzB\x1d\xfa\x91\xd8\xe5\x97\xbfK\xa6\xe3\x87V\x92|\xc5d#\xfe\xa1]\x0f\x9ajAs\x04\x95\xe0n\x87\x89\xc5\x8c\xce4\x14\x85l\x889,\xe69\xfb\x19\x99\xa8\r\xe8\xfc\x08\xe8\xf7\x98\x88\x14\xed\x14\x92D\x84\x1a\xc4\x9a}\x8a\x88#\xac\xc6t(\xef\xdb\xd2/\x07\xb3\xe9\x04\x17U\xce7Q\x9c\x91\x1a\xb0\x8f9\xc3\xf3\xbd\x8b\x94\xe3\xbc@\xb3\xddr\x06\x03\xbc\x07\xd2\xd7c\rU:\xf0 \\\xae\xf1\n\xa9iZ\xa4\x8e`M\\x\xe3u7\x02\x03\x8d#\xcf\xcc|<\xad\x1d>1#\xda8\xab\xbd\ro\xf8\xd4R\x18Y\xe8\xcd\xcd\xd3y\x81\nm\x1a1nu\xf2\x0ex\x07 \x9a\xfa\xfew\x980{N\xafz\x8dd\xae#c\x9c\xb5=\xdbG\xc3\xa3kf\x9a\x7f-}\xde\xaf:\x03\\6x?Pl%\xbf*"\xa1 \xb7\xa3\xe1\x05\x1a\xd5\xa1\r\xac\xc1\xbdL\xbf})\x17\x93O\xbd\xdc\xd6\x1f\xaa\xd8\xaa\xba_\xeaNtM\xd0\x1b\x8a\xf0\xb3\x91\x82\xa864|*\x94\xfb\x1a\xd2\x86\xab\x1a\xc5\x1d\xd1o}\xdbM\xc2\x00\xf7\x10S\x89Wj\xdc\xfb\xfct\xdd\xc6*c5\x95+*\x9cj/\xa0\xd3&\\u0\xe5\x12\xb2\xa8\xc67\xd6\xc9\xdc\x1c\xceW\x9c5k \xa2\x07\x12\x9f]\xb4\x8culOQ5&\x9d/?\xc6\x83\x1e\x97/q\xaclU\x1e}\xa0\x83\xff\xc8\x9eZ\xf5\x18%\xc7\xe8ir\xeb\xf7\x1f\x97[\xd1\xd5\x1a\xf6\x92}\x9b\xfc\x05\x0c\xef[i\rj\x7f\x08\xae\xb2t]w\x0f\xc8\x16\x07+wVy?\xec\xf8\xa4\xdcl\xde,\xb2\x14\x89q\xc3v2\x84\xf0_\xb0\xf9O\xe9\xac\\\x0c\x91\x82\xb8\x88\xd2\x0ec\xcc\x1b\x8f[\x01Q\x1cG\x1c\x1f\xf3\x0b\x1e\xc3L\xdf"\xd2\xf4\xed\xaa\xc46o\x19\x1b\x14&`\x07wb)[\x86\xb7\xf8\xa9A\xb5\xaa\xf8g\xc1\x1a\x1d\xcb\xf4#\xbe\x1c\xadS\xffC\x1d\x0f\xaf\xfa\x98\xebH!\xc2\x00L\xaer\xe6\xc5{e\x17\x07\xb8O\xfe\xe0\xc3\x9d\xa1\xb6\xf2\x99Y`\xb5\xfd\tx\xd4S\xe1\xc0\x7fco\xc9\x9c`M\xb9\x90\xcd\x9d\xf7\xb1\xe1\xd2\x88\xbc\x8a\x99\x9b\xf9\xa5)\xb3\r\xd0\xb0\xf5\xf7\xfd\xcb\xfb\xcf\xbfS\xc7\xd7\xd7\xd1\x06\\Gq\xdd\xd2y\x9d<\xc1l\xbeB\x05N\xed\xee\x91<\r\x17\x11;R\r\xaaW\x08\x8a\x01\xf5\x87\xd7\x85^\x17v\x7f\x15\xcd\xf0\x1b\xd7\x8a[\xe0<\xa5\xd1M#+\xa7\xf8\xf2}\x15"\x1e\x13\x1c\xaf95h\x1c~~Uq\xdf\x8b\xb7TM\x03w\x9b\xf4fc\xc9\xcc\xbb\x81\xcdmV\xa8Z\xa0\xdaF\x90\xa26E\xba\xfdX\x82P2\xbdv\x08\xfd\x14J\x8eH\xfe\xb9\x99\x9c3\xd7L\x99\x8c\x14w=6\x0e|\xab\x8f\x1a9\xe5)\xab}B}Y#\xbd\x1b\xbe\xf2*\xf5g\xd3\xea\xcas;l\x16[\xb6\xb5\x06.\xbc\xdd\xb1~\xc04(\xc9\xc1\x7f\x03\xf4mf\xaa-\xd2B\xe1\xe6\x91\xb5\x94\xd7mQc\xaf_&)l\x83\x8e\x1c\xf5\xfeD;\xdc\xbf\x1a\x10\xcf\xc1\xf0bh\xb6\xf1\xdf\xbb3\xbf\x1c$\x9f\xae[\x85\xbcS\xe3N\r\xfdu\x12\xe3\xa4^\xd1-\x15\x95\xa6\x9f\xca\x93\xd7\xbf\\\xe1*\xa2\xad]\xf6!\x01\xa5\x9c\xf9\xeam\xa9{\x00\x87\xbe\xb8\x8c\x96}\xb6EP\xbc\x84\xf10M\x19\x1f\xffj\xa0\xacc\x92\xe8\x18.\xae\x9e\xf8\xab\xda\xf7\n0\x03\x10A\xbe2<\x8e\t\xe0\xfe\x80d\xb5\x1aZ\x97\xc4\xed\x00H!\xbc\xc4\xd2\xd1Xg\xe1\x83\xca}\x9d\x86}\xbd\xcet\xfa-\x81\xc5\xa6\xd9\xa1\xee\x99\x9c\xc9\xa6,\xed\xe9\x86\x1c\xeb\x14\\yAv\xce\xbc\x8d\xa75\xe3Y\xfbu\x91{\x15\x9c\xee-\xe3\xa8J^\xd7\xaa\x11\xaf$\x01\xe3\xf8\xd6\xf8=\x1e \xdc\x95NB\xaf\xe3\x04\xc6\x13\xe6\xe9K\xa0\x8fu\n\x02\xbb\xc2\xdeO8\x902\x12\x0bD\xfdMb\x99\x8c\xc2\x0f\xf3M\xa7E\x15\xfa3\xc9<ie`j5\xc3\xf9\xef\x0b\xb2$M\x81r\x05W\xf9A\x7f\xc4i\xfb\xac\x91Z\xe4\xf8\\\xe9\xban\x17O\xea\x93I=\x9e\x1fS\x16\xdc.\x92\x82\x91\xc7s\x98-*\xb1\xf74*e\xd3\xe6\x92\x85\x88}\rr\x84\xf9\xe6>;%p0R\xcd\xef\xac_\x9e\x00\xff\xa1"H\x12\x00z_\xfd/\xe3\xdd?\x06\xde\x94\x93\x18U\xf3\xccJA\xad_\xf1\x14\x9d]F\xd8\xd7\xca"\x897\x11\xae\x1c\xa3G\xcd\x97\xb2=\xd1/\x9f\x17O\xf9\xf9\xc3\x1e9\xd1\x1e\xd6\xfa\\\xf2\xed?\x8d\x1c\xa94t\x7f\x96\x96\x9f\x02\x7f\xf9\xcb\xf2\x0f\xbb\x14|\xa0u\xfb;\x050)4{\x9c\x17\xdfv\xf2\x9e\xd7f!\xfc\x8f\x85J\x10\xd0\xc9\x0e\xa0\xd5\xfd<\xf0\x11\xba\x0f3\xd5$\xb2x\xba\x1f\x95\xef\x99f\xaaA\xe4dJ\x91\xc6\x08%1v\x03Qv;\xa9\xc1\xad\xda@bA\xaf\xae]<\xffE\xc9\xab\x1d\xfa\x85-%B\x9f;\x87:\xee\xa7U\xce)\xcc\xb0\xd5\xe4\xc7m\xb8\xf0m:\xbc\xf3\xe0\xc8\xb7g\xd4\x10\xcf\xda\x07\xea)E\xbd\xb4\x9a\xd2)\x19+x\x90\xb3h\xda\xfc)x\xaax\xc5\xf0\xbdr\x0e\xc4S:<\xac\x8e7\xb0\x10\x07\xd1(\xcc\x0b\x86\xd6k{\x95\x89u\xbc2\x89\x16\xd4\xe4\xdbV\xe4\t\x97\r\xdf\xa1\xbe\xca\xbf\xde\xbf\xbb\xcf}\xbc\xb8{\xc30\xe5%\xff*l\x93/I\xaa\x94\xf8\xd1\xe0z\x96\xa4\xfc\xcd\tW\nq]?\x95:\xf7S\xdcm\xfcUg_\x18\x8bd\xf0\x8bQG\x8d$Nb\x9a\xea\xa2\x11\xf5\xce>\xcf\x84\xffY\x1c+[\xfc\x9b\xe8\xef\xc0i74\x1eW*l\xa25mxp\xf28R)X\xa6eJ\x05\xcd\xc8\xf5\x18\xb1\xf5\x13\xd22\x95\x1e\xe6\x1d\xf1\x0eXyzLJ\xa4U\xed\xe4\xfe\xa3\xb0DD|IR\xfc\xd1\x03M\xb6\x93\xb37\xee\x16\xab\xd8r\xac\x97\x8d\x81f!\xc0\xdb\xad>y\xf8\x99\xd5\x85%M\xf3\xdax\xa7K\x04a\x8e\xdc\xc8\x8e\xfbU\x11j\x91\x81\x9f\xae\xc6[U\xba\x1a\x8c\xcaX\xb92(^\x84\xe9\xc1m\xdb\xe8J\xe8\xee\xcf&\xdbQ\x8d\xf6u\x98E\xf6%\xda9\xfdz\x95\x0c\xd1\x13\x99=o\xf1\xfd\xb3T\xa5\'\xee\xd6\xbf\x88\xf7\xf8\xf1\xe7\xde`\xa9\xca9\xf3\xd8\xfb;@\xbc\xf7I\xe4T\xa2N\te\x07p46\xe5&\xe8\x05\x94\xb7E\xf8gV6]\x18\x84k\x93\xeaN\xad\xbd\xde\x1d\xa4\x05\xd1\x9f\xa5\xab\xd6B~\x08\x00\xea\xaf\xee\'\xec\x07\xf6m\xaa\x85=\x83J\x1d})\xe8\xb3P\xa4`\xd1\x8a\x1c\x0e&\x06\x17\x82\x9f\x97\x1dz\x05\x1a\xce\xfc]\xa5\xa9\x00\xcd\x1f\x13\xfbYK\x9c0\x8c\xca\xe6D]\xfe\xfc\xac\xe1a\xab5\x99\x89\x8e\x9f%"\xa1\x19\xc1\xc96\xc5\xdb\xbe\x02\xeb1\xa1\x07\xed\n\xcf\xa0Z\xd2S\xee\t\x9a\xac\xe1<\x87n\x85\xb2os\xf2\xa7\xc4T:\x07"\x7f\x17D^\xe8\xff\xb8h\x1e\xedz\xdf(\xf6\xb4\x14\xdf\xb8p\xd3*kS\x00#kh\xce\xb2\xd4] \xf0\xf1\x14HV\x14\xf1R\xf1\x13?M~\x12\x08\x02\xd5\xcc\x1f?\xa4C\xe1\xec\xbe\x12J@\x1cV\xff\t\xdf\x8b\xd97\xfb\\*\xff\xe2\xa96H\xa7\x9f\xf9bc\xfb\xed\x85\x9aaJ4\x16U\x97\xaev\x88\xef\xde\x04c\xc8\x02v9\x92\xe5\x9b9\xa6K&\xb6\x0c9\x0c\xe8kO\x08\xb6\x1b\xfa\xe0\xb3\xdf\xd9\xab\xb9q\x13\xb0\xc8\xcb\xc6\xc8sHz\xd5,\x968\xfb\x1d>\x165\xdat\xd4\xbd\xa0~\xc8\xf3/.\xed\xa3\xe4O\xd8\r\xc5\x1f\x85\x03,\xc3\xa1\xf5\x03\xcb\x83\xc6^\xd6\x9f9\x8b\x8a\x15\xe7\xaf\xcd\xc86\x97\xb8\x8e\xae\x0ed\x96\x90\xeb\x05zr\xcf\x8e\x14\xc0\xad\x93\xa7\xaa\xb3\x81\xba\xd3\x15\xdd\x87?\x96\xbb2\xb3\xda.(\x84\xca\x08v\x9e\x8c\xc1B\xe3\xd7tN\x9eAw\xb8!\xd2\xaf\r\x98O\xe0+\x80v\x1b\xc0,\xf6\x93c|8\xcbL\xe8\xb5jM\x01\xe2\x1bb\xe6\xcc\x01/\xed\xc9K4\x86-N|\x95`ZPi?2\xeb\x05\xf64\x0e\x90\xd3\rBU\xf6\xb7\x8c\xc4<@\x9e\x9fK\xa0\xd4 \x84a2\x90\xf1\x1e\xd3\xcae\xa7k\x82A\xe3\x8d\x82\xae\x86N7]\x15\'\xd7\xb0gMs\x0b\x89\x97OF\xae\xb7\x8c\xfa(\x95\xbe\x07\xbb\xafeo\xa6\xf4MNe\xcd\xb5\xc6\xdf\xba\x87x\x1b\xb7P\x1f\xd2\xab;B\xf9\xf6\x96\xbd\xba\xb1\xd6\n\xa5\xca4\xe3k\x01\x069_\x14\\\xf8\xa6J\x8a\xeb\xcae\xc3\xcd\xc5\xca\xf7E\xe8\x1ciU.\x88\x92S\r}\xe4;\x94\xef6`R\x02\x9a\xa6\xb8\xd7\xe9K\xf0\x8a\xea\x949-\xa9\xdc\x1c\xc9\xc6\x8f\x9ch{\xa5\xb6k\xe8\xbd+\xa2\xf5;&\x0e[\xc2\xd5e\'\x94\xfe\x19\xc1\xe0\xf1\xa9\xe9;\xfeu\xdf\x04-\xa3\x8f\xcf7\xb8\xe0X\x8e\xd0\xc6\xb8\xaf\xfbm[\xad\x97\xe4T\x9c\x82$J\x83\x1b\x82k\t\x0f!\xd0\xc4t\x92\xb6\xc8j^\x93\xb2\xd7\xe4\xe5\xf8S\xe8\x8e\xa5\xc0?|\x9f\x16\xcf\xed\xd5\xc5\xd8TZ<\xce%\xb3\xd4F\xf2\x9a`\xf9\xa3\x10\xf7\xb3\xd9E\xde\n\xe3+\xeby9\xf6\xaf\x85\x05\x1f\x8f\xe8\x8e\x0c>\xfe\xe9\xff\x9c\x91\xbb\x7f\xae\xe63\xdfu+\xbb\xd0\xcfS:;\xdfS\xa7\xe7h\x08\xc9/m6Q\xca\x9e\xa4\xedb\x89 \xee\x8eb\xbb\x83\xae\x15zP/g\xf7q2\x04f\xa9dL\xf0Y-\x8bs8\xe4"h\x18u\xd0\x85B6\xbc\xd19:\xd6\xdb2\xb2\x9a\xc4\x8f\x02S\xd0K\xc0\t\xe7\xa7%\xbf%\\\xb4\xd7\x9b\xa6\x08\x86\xf6\xd2-C#2\xb88\x86\xbeS\x8c]F\xb871n\xaa\xe6C\xb9t\xa7\xd4\xfdv\xed\x8a\xe1\x86\x88|\xf3\xdce@0GvE\xe0\xb8ht\xf8H\xd2\x91u\xb2\xff\xb8\\\n\x95Cq\xf0\xb5\xef\xba\xf3\xc6_\x10\xeb\xb6\x06\x9d\xfc7\x89\xc1\xa1\x1b\xbbPG\xc9m\xa8{\xd7\x94(8b\xd0\xc0ql~\xe0\\\xe0M\xb5Q\x11\xac\xdd\x10ao\x9b\xafs{\xef&<\t\xf4hUZ(:\xfc\xca\xfcB\xe3\xf4B\x1c\xf0\x05_\x19\xf2_\xcbF\x89\xd7\x0emq\xb8\xeb\xdb\xa3\xc0J7\xb7\xbfJ\xb4\x94p\xa6`\xfa\x9a\x9e#\x86\xc8O\x81\x8b\xbdn\x1b<>\x06\xd2\xc6\xa8\xe8\x7f\x83M\x86w\xbe:}\x044\xfe\xd2sq\x9d\xe8\x89\xa2^\xb5fu\n\xda\x9c\xeb\xa8\rDb\x87j\x9cx\xb4\x1b\x03Km*d\xe33\x1c<g\x07!\xf8,\xecQ\'4!\xd3\xb8\xb8J\x11\x04\xbc\xda{#o\xba]\xe0\x99\t\xe3k\xa0/\xca\xf2\x96Y\xd4\xceo9h6\xb4\xc4\\~0~}DF\xad\xef\xc2H\x07\xb4\xf7\xcd\x96\xb7a\x9b\xf3\xd2z\xfb\x94\xff\x1b\xf1\rPB\xe4\xe4/gh\xe9\xb0A|\xda\xf8i\xb7\x8a;\xa0f\xe1\xd9\xcc\xb9\x8a\x9c\xa3\xe9\x19\xea\x81\x83\x0b\xf7\xba\xe7\x99\xe4)m\xcfM\xf9\x90+y\xf1\x91\x90!$\x8brj\xa6\xb2.\x83\n\xa4\xb1\xef\xbc~<v\x8b\x91\xdd\xb1u\x80iE\xc9Z\xd9\xf6|\xd1\x14\xd1\xca\xa6\xa0\xbe\xda\xde\xc6t8xu\xd7\x95\xe7\x81\x99\x9a\x85\x1d\x0fn\xedp\xde\x03\xf0Q\x0b_\x83\x92v\xce\x95\xfb\xe5)@z\x17\xbc\xc9\xa9\x1f:\xd9\xe7\xccy;~\x04\x08\xb4\xad\x9d\x0b\xce\xdd\x8b\xaf \xf2\xb8\x91?\x0f^\xc3\x88\xd88\xc5"A\x11\x9b\xa9]\xe5\xbf\xc5e\xad\xdf\xe8&\x82%\n\x88\xecA.\x87Dj\xbe\xb9u%71\xd2\xd2\x06\xb0\xd4`\x90\x07\xb25\xb3\xc5;zE8\xd6\xc15\x82\x10\x02\x8b\xd9jNbb\xa3i\xd2\x8d\xde\xc2\xa0\x88\xd5\xf0\xf9Y\xd2\x0bw\xd9\xba\xceAD\r\x8a-\x8c7\xc0\xb2\xb6h\xff\xac\xb7\xf2\xb0W\xf9Rm\xed~\xa92\x9f\x01\xd3v]\x01[\xff[\x8cn\xee@\xee\x1foO9\xd4\xb8\xea\x1c\x03\xf11*!O\x1e\x944\x18C\xd7W\xf4\x17;5\xda%\x17o\xbd\xab\xc1\xba\xf7\x9d\x1dIki\x94[\x0b\x9f\x80\xd5i\xf2@\xe2O\x9e\x8a\xba[x?\x8f#.X[\x1e\xb5\x8c;\xd9F\xaa\xe3\x01\x8d\xa3~\xea\xbd\x83\x05=9#\xfd\xd8&\xf0@\x1e\xff\xde\xb9\x91\x0b\x97\x16\xdf0\xab[f\xb8\xbd\xa1O)\x1c)C\xa6dW\xcd^\xf9\x82~\xa6\x8c\xce\x95j\x1e\r\xcc(\xdaU\\\xa7#d\xf4\x88\x04\x1b\xc8r\x9e\x88x\xa4\xd8\xc8\x02\xaa\xc6=\xa1v&\x800+N\xa2\xb3\x1b\x13\xdb\xbf\xa9;xh-\xef\xe0\x1b\xfa\xf5\xbf%\xff$F\xd6\xa37\x04r\xacj\r\r\x9aM\xc6\x8d\xe3\xa8bw\x8b\xee\xea\xa5;\x1b\xcb\x1a`O\x05\x12\xa9\xc2\x18o\x14\xc4\x1d\x1a\xd2\xa5[T"$\xc63\xd3\x120a\xdd.|\xea\xb0\xa1v\xf8\xc6\xbd\x9ao\xe2\xb6\xbb\xe4B\x8d\x95\xc1x\x0eI\x95\xfa\x8a\x1b\xd7\xfb]"W\xe7\xca\x9c\xc5\xe9\xea=z]\x80\xc6\xdelc:\x0fu\xb1\x06\x94:\xca\x9dI\\\x9e\x96\x04\xceK\x96\xb8.\xcf\x9c\xa9\x05\x8dF\xbeQJ_\xf8\x95pZ\xdc\xce\xe3\xf7\xa6\xd3Y,x\xbd.8\x84\xb1\x94\xda\xa8\x95\xb9\x951\x07?\xdbXR\x98\x1a\xa1\x1424\x17\xfa\xca\xeb{\x93\xd0\xc1\xa3R\xb6\x1fI\x10s7\x15\x12\xfe\x8a\x86\x11\xc8~\xc0\xa4\xd3\xec\xd0xS\xc9C\xd9\x87\xe4P\x9e?\x8e\x9a*/\xdb\x95\x07\x1f\xfc!&\x19\xe0\x89=\x91\x93\xf7\xba\x96b\xf7b:]\x00\xdb&s\x14|\xc8\xdbS\xcb\x19\xb8Y\xe3V\xdd\xc7\xfcw\x1b\xf7\x03\xab^\x9d\xf9\xddV\xa2G\x1e\\\xbc\xed\xdf\x0fK-$\xe5#\xbb\xe3e\xeaLu<\xe7P\xfc\xc1\x96\xda\x88x\xd0\x1c\xeb\xdc\xdd\xfc\xa3K\xa3\xc9\x0e\xaa5\xbeja\xafS\xe1/\x9f\xc9\xbd\x05\xbcEO\xac\xf4\xe7\x99\xa49XN\xdc\x1a\xd0>\xdc]\xbed\x82\xce\xcf\xdfs\xb4t\xa3t\x06\xac;\xa3\x04\x0f\xe4U\xbd4\x97\'\x7f<ST\xab\xbf,\x82\xbb\xae\x15d\xc4}\x894\xbagT\xf8\xb7\xafC\x16\xf9\xfe\x85\x11m\x88\xe4\x12\x11\xbb\x8b\x98\xba\xf5\x11\x973\xd7\x1e\xb5\xb5\xf8\x1b~k\x18n\xcbS-\xef5\x8a\x10^|\x0e\xa5 \xc6\xbb\x9f!.\xe9\x82\x03\x16\xba3"\xd32\xb4|\xc6\x9e\xa1\xce@\x9b\xd8\xac%5\xab\xad,\xd4{k\xa7j+K\xe7\xab\xf8\xf1Y\x1a\x08\xff^\xa7I\xf4\x81\xe9\x13\x04/lb\xe5\xf1\xd0\xb9\xe7a!\xa7\x9d\x96\x06-\x88\x97\x1d\xe0rF\x82\xa6V\t\xa6\xdb6\xc8\x7f*D\x18\x1e\xda\xf6(" \xd9\x15\x8c\xa5\xbe\xa4Q5"\xfe\x8a\x81\xe2[\xf9{/O~\xc6\xa1/qL\x90w\x8b\xb8Oo?\xd3\xfb\xbd\xd4v\x18\x1b\xad\xc8S\xd9_PJN\xda\x8b\xc2\xe5\x068\xdf\xb3\x86[E\xb8\x91I\x7f/\x1c\xf1\x1a\xec\xb5\x1fF\xff\xe9\x04\xaf\x81\xefz\xd5\xd8oNQ\x17\xac\xfb\xf0M\xb3\xf1\x14\xe5Z6\xe8\xbf\xbf\r\xca\x00:\x0by\x0b\xfdU\x05Z\x95^G\xceY\x99\xffuzkEE\x7f\x12\r\xfc\x8cGC\xa8\xfb\xe4B\xb8\xd6B\n\xed\x80\xee_!\x1fTvr\xbc\n\xe0J\xa3\xa0\xa6\x17w\xd3\x18\x95\xc4\xfdk\xf4\x952\xc5\x18z\x1fR1\xeb\x89\xca[\xf2P\xe1\xfd\xb5\x87\xa4\xeb\xc9M6\x18\x83b\x86\xf2.\xb8\x0c+V\xc0\x1c\xff|\x94F89\x9b\xdc\'vy\x0czb\x1a\x19\x16\x85\x16\x8d\xdd{\xce\xe1V\x85\x9c\x83_}\x81`\xed\xef\x97;\xeaN6\xc9\x9eH\x9b6\x8b\x8dD\xef@"F9\x94@\xab\x9b\xec\xf8\x94\xc3\x88@bS\x03\x0c\xb8_\xe5M\xbd\x9fD>s\xac^\x83s~\xcd\xbfa\xa4\x93\xe6>p\xea\xf8\x17Sf\xd3z\xa1{(\xeb\xd5\xd0\xbeS\x01\xe9+cy\xcd\x15\xbb\xb7\x827B\xd6\xe4\xcf\x89\xdd\x97L\xbf\xda\x7fW\xf1\x97%\x8a\xdf;H%~U\x816\xfb\x9c\xe1\xe7\xafj\x17oG\xd6l\xb6\x8e\xcc\xdd\xda\x90F\xe8h\x1cx\xca\x1f\x8ez0\xda\x9e\x89\xdd\x88d\xb3U\xfbS/Lw+\xa0\x94k\xee\xcf\x03\xeb1\x95\xe96;\xdb^[\xd4\t\xdf\xf9\xd9\xd8f\xc8\x90\xcd\xd9\xc0{\xdf!\xd55\xc1\xb4~\x01\x0f\xea\xfc\xcd\t\xb1\x1c\x90\xc6u7c\xe3R\x9cP|f\xc4,\x80\xfc\x86v\xaa\xd0\x9b\xbbO\xa0>m\x93\xa8A\x98K\xe7\xd4MI\xf1\xeaG\x0f\xd8r\r\x1b\xbemc\x93\xb8\xea\xba\xcb\x02\xb9\x99\x0f\xb07]5\xcb\x87\xf8\xbe\xf7\xacl\t+f\xf3P-ME\xe8{\xfa\x90\xdc\x0b\xab_\x02I_\xb2\xceo\xc5\x95\xf2\xae\x8e\xc4\xb76\x1b\xe6G\x9d\x0e]@\xec\xa9\x11D\x8d%\xae4\x1c\x8e\x14p\x14\xf1n\xde.\xbf\x1d\xce<\x19\xd7\xad\xa1H\xa1\x0c3JS\xb8\xa4\xbd\xe4N\x89\xd2\x14\xdf\x08\x99\xdd\xc9\xfe;N.\xfa\x1b\xe4wV\xd4\xf6\x0cP\x12\xfc}\xfa\xbc\xce\x16\xd0\xa6y\xab\xfc\xa1\xa4\xbd\xe0\xd6:\x9fG\xc5\xd3\x1a\xc8\x8dG\xb4\x1e=\x95\xb2\x1e\x9bz!m\x1dh\xaa\xa6\x1c\x14\xdd\xda\xc9\x94\x0c\xcc\x1aM!\xf9X\xad\xa6\xb6\xf4U\x1e|\xdc@\xdb\xbas\xfbe\xcdi\xb6\xfb\xcda\xaa,\xc5\xef\xfd1 )0\x15\xf4\x0f\x87\x04F\xc5km~,5\xce;\xbbs^\x86\xc4&8\x1b\x19\xf6\x19\\H\xdb\xd2],%^\x9f#\x8d\x14\xdcG\xce\xf6\xfdE\x8f\xda\x1a\xf60\xf2@\x87\xf2r\xb2\xdfx\x1e\x1f\r\x9f\x8e\x89\xf4<R\xf6Z\x93\x9b\xde|\x08\xbc\xdf\xcf\xda\x08Kle\x99\x9d\x7f\xc8C\xfd\xbe\xe3\x9d\xf3\xdd\xf8nkd\x94\xc0\xcf\xc6\xda\x90\xa1N@\xff\x97\x7f\x9ck\xd8\xc54\xe5\xf2:YLnk\xb9\xceT\xe2(~\x1a\x12nt\xee\xfdey\xfcw\x82\x88\x16~\x13wc\x8cgM4M\xae\xef\x88}\x96n\x83)"\xf6\x9az\x15\x9b\x97m\xb2\xcd\xbb\xdb\x85O~\x94l\xd6\x83Y\xe4\xa55\x0c\r\xf2\xbf^\xfd\xae\xdd\xce\xa5\xfa\xac\x1c\xf6\xb6\xd6F\xed\x99\xa4\xa7\x9b\xd3\xc1`\x02\x9b\x8ds\x97\x1a,t\xd1\xf1.\xd6`{mv\xec\xa0\xfb\xa5\xe7\xfa\x07\xf5\xfa\xf0\x8bS\xfa\xc2\\\xd0W\x1a\x82\xf9\xe6\xa5\x8an\xce\x8d\xa6T\xd7\xc5\xe6;\xda\x03\xf5\xda\xd8\xb4:\xc7\xf4\x80Y\xd4\xd8x\xde\xb5\xf1\xfe\xe7\x08\xca\xea\xe3\xa0\xa7\xa9F\xd0\x19j\xb8y\x06\xa8\xd4_\xd5^\n\xe1\xf1\x81W4\xdf9?\x90\xb7\xa9>~l\xc4\xed\x87\x8ab\x9e\x90a\x1f\x81(@\r{\xceM\x17J\xa1?\xcc\xebo\xbc_\xf4\xb2\xb3\xb2a)p\xbcQi\x06vM\x03\x04\x89\xcft\xf3u;1l*\xf6\x07)\x9f\xe9\xffC`\xdd\xdd\xca[\xb1\xfd6;\x91\xbep\xf5 y%\xf4]j\xedp\xf8_F\x1f\x83\x86~1\xd0>\x1b\xb3dN\xb6\xb7D\xfa\xb8\xa5\xfe\xc2\xd6\xb9\xc7\x84\x99\x13\xb7\xf2_\xe4%\xb6d+\xe2\xbd\x03\xd3\x97\x9dO\xe9\x11X\x8f\x10!x\xa9\xb8\x16\xe5\x9d6\xe7\x02GY\xe4\xb9<\x7fZG\x10\xcf\xc6\xbe>G\xe4kn\xb2c\x8cs\xcdi\xd3)\'"\xf2\x8d\xf3\xc3\x98\xcc\x91\x94c\xdf\xf5\xaa\xc1\xa0\x1b\x95\x95742\x0eb\x8f\xb7EO&I\x80\xbc\x1b\xa7+\xa6\xd2\x9bw\xee\xb9g\xca\xe3&\x17\x8d\xdc\xe3\xc3<z\xa09\xaa\xc7*\xebl\x97D\xfb\xac\x86\x7f\xd4=U\xf7\xf8$\xccG\xf0\x9c\xb2\xcd\xf5\x1f\xd5\xbdBy\xdd\\\xe4\xa4y\xf4\xda\xe0\xcb\xdc\x97A\xb7\xda[\x92\x94\x0e\xc0i\x8e\xc1\x9e\xc6:\x0e\xeavv[g8\xcb\xee\\\xfa\x89\xf7\x12\xff^\xb7\xdaf\t\xe7\x04\xb5\x1c\xf0\xcd*+0\xe4}(b\npe\n\x10\xb3\xfeYX/\x15+\xea\x1fM\xfa\xb5i\x1b\xf8J\xf0\xdb\x98i-/~S\xcb\xec`\xd7\x95l\x91\x97b0=C"\xbe\xb0\xfa\\Nk>\xc9\xc4^\x9d\xac\xf3\xdcc\xd3\x89M*\xca1\x13\xbcW\xd76\xfb{\xc6\xcf\xf2V\x1a\x9e\x07B\x92\x08\x13\xddJ}\x93\xd8\xb7-\xe7\x96\xbb\xb8\x0f\xcbl\xbf\x9e\'=\x00\xac.\xf5g\xc5\xcd\xffd\x96M\x1bk\x12b\xe09\x9d6N\xe4\x90Bo\xb9Hq\x14v\xa4c\xad\xe5\x92\xdeSN\'\xf5\xae\x8b)\r\xd6\x1b\xbb\xca\x89\xfd\x8aS>5Z\xffh\xff<\xa6\xe5\xbc2\xdb\xd7\x16\xa5\xc5\xcc8\xc3\xff\x9e\xfb9\x95\x7f\xd9w\xfeO_\x8b#\xf1Q\n>C\xad\xf3\x19~\r\xaa9`\x87+\x95:\xb2\x03\xa4\x9f\x84\x1e\xc5\xe2ow\x98\xdc\xc8\x99\x97\xae\x03\\\xde#B\xed\xb5o\xdc\xdc\xae\xe9\xd9\xa8\x00\x90\x8a\xb7\'\xfbx\x16mc\xbc\xb7\xf4\xcb\xa6\xc7u\x942+-~\xd2g\x8d\x9c;\x8c*\xcc\x96\x9f\xe5c\xd4\xa8(\x9a\xf2I*\xb4\xe3\xea\x1f\xad\xb7\xe1>9\xf8#3\xfc\xc0\xb7\xea^\x9bp\xd9_\x02a\xe1p\xaf\xbf\xeb\xf0r\xd4\xbf\r\xd4A\x93G\xe2\x93NG\x94;z+\x89,\x91s\xa3\xa4\xdb\x88\x90\xfa,\xf4\x13\xb7\xc7\x0b\x93\x95)\xf3\xcd\xb3\x15@u\x08\xaf\x18\x84\xa5J\xaa\xa9\xe5KT\x8b\xe0_\xdb\xa6\x06\x9c\xba?\x02xk\xdalA\xf7\xc2\xdf\xc7\xa5\x06\x9fk+w\xbc\xefW\xda\x0c.\xb1k7\x8cr\xcdE\xd7\xd5\x11F\xe4\x0e\xe2\xa6b\xce\xfe\xbezK\xa0r\x87vuT\\\xc1\x9e\xd9@\x86\x83~t\xb8\xfa\xfe\xb9!\xb8\x13\xb8\xfa\xe1\xaa\xf3\xf7\xd2\xef:\x05\xe9\n\xa4\x7f\x8e\xd2\xd5\xe0uD\x8f\xf1\x17u\xc0\xfd\t\xb2*o!@{^\xe3T\x80\x07\xf0:\x19\xd5R\xd7\xe8\xe0\xa36\xf6\x00X7\xd2{\x13\xa5\xb43\xe9\x1eo\xa0-\x03\x8ex\x86\xbf\xd8\xf6{A\x12\xbb\xa6\x92_TlP \xd4\xbc\xfb\nO\xf2\xf1-n+F\x01\x8e\xe8F\xc0\x0f\xeb\x9b\xadD\xe6E\xe5\x96"\x9a\x1a\xde\xaa\xe3\x0eUYC;;\x84ad8\xd6\t\x89\xad;e\xf4\x83\xc9\x94T\xb0\xaa\xc79\xaa\xd3\xeb\x8d\xd7\xff\x1c\xba\x16\x1a<\xf0\x903\xc2=@\xc1\x10\x16\xf1\xe8\x02\xd1~\x87\x1d\xc6\x03\xc5\xd7\xaf@\xb3\x83\xc9\x9c^\x90\xc2\xb5\x12Xq\xed\xc3\x87\x1e\xd4\xee\xcfS\xe1\xbd\x8dc\x1fD&\xaf3"\x9bm:\xe4\xc0\xaf\xbaD\xb9\xf1\x0eC\x94\xef\xe3{\xeb\xa9T~\xea\x85/G\x9cP\x16\x11hx\xfb\xc5N\x17\xd3\t\xd8\xf9\x909\xc69\xc2g?{\xcc"|\x8d4OM=~\x8e\xae\xc5\x81&\x15h\xf1\xed\xa4;D\xf2\x95\xd3\x17A\xado\xea\x96\x03\xb7\\|\x953\x8c\xfb\xd0Y\xeb1)R\xa9\x84\xdc\xfe\xd0\xa6\xd9\xd6\'x_\x83\xab/%Vc\xc9\xa5\xfc>\xb2\xed@o\xdf\xdfZ\xbd\xb3\x85\xaaN\xdb\xc2\x1a\xb5_^\x19\xab\xed\xa4L\xc6\xba\x03f=\xfc}|\xbah\xe7}\xd6u@cAn\xcc\xddV\xa6+Q\xe4\xf5\xf9\xa0\xb8\xfeQ\xa0!\xfa\x15\xd8\x11J\x08\xec\x946?\xa9h\xfc\x01\x84@]\x94&&\xde\xf3\x8a\x05\r$\xbd\x01y^\xd3\xa8\xc8\xeb\x17\xe9^\x19\x16\xe8[\x96\xf7\xda\x0b"\xdbS\xea\xfdJ\xafC\xbeH*\xfc\x189\xa8#\x08\xf3.\xa9\x90\xc3\x04\xe6\x87w{O\xbf\xc6,\xe4}\\\x02j\x9f\x9b\xa9\n\xf4\xf8\xde\x03{7\xa4\xe8V\xf9j\x12?<\xcf\x87l\xa1\x17\x89q\x81+O\xaeU\x7fBX\xd3yB//\xe6g\x00$\xbe\x85\x0c\x07\x99\xac\xb9\xab\x93o5j6\x8ax@\xa4E^)\xf3\x89\x0b\xe3\x11\xd1\x88o\xcc\x03Zj\x16\x04]\xed\x16x5r8\xf8\xcbw\xf2\x1b\xa5\xd7\x8azc\xda\x07\x04\x07UR\x95\xf5L3a\xc2\x1c\xbd\xb4C\xe7;~\xcf\xafN\xed\xb5\xec~e22\x02r\\\xba\xabf_\x03\xf1\xf9\x8c\x18\xa3\xc16\xa6\xdf\xc2[\xff\x05\xbe9rQ\xaa^6\xa9\xfe\x916\x8d\x1e\xa4\x1564\xcc\x1bx\xfb|-\xf20W9\xd8\xda\x07\xbe\xb1\xbc?\xd5Wk\x05\x9e\x8c+\xc0<23\xf2*\xe7LY\xb6\xc1,\x9d%|\x8f>\x96\xb1y\x93\xe6\x1e\xc9\x0b\x1b#K\x7f\x10\xe9{\xb6_\x9esG\x0f\xb2\xf2\x02\xaa\x9bMMX\x02\xff\x008>\x97S1\xe7O\xaa\xb3\x1e\xdcO\x86\x8d4\x90\xff\xa49\xb8\xbe\xb6\xf1)\x0c\xaa\xf5\x8e\x86;9\xa9C\xcciC\xad\xa1\xdd\xee\xca\x83P\xb5\x01\xc3N\x7f\x06\xca$\xc9\xee\xf1\xdfZ7\xfc\x08W\x7f\x96|\xff\xf5\x9e6\xd2\x80\xc5\xf0\x92\xdd\x9b\xd6\xce"w\x8ak)\xdd\x05\x11{\x0e{\xbe&\xfe\x97\xa5\x86\xe4\x8f\xcf\\\xf0rg\x15\xd0u\x8bE\xc9{;\xe3@\xb1p`u\x98;\xe1X\xadNWl.\xb2\x17\xe2\x0cJ\xb5\xa5\x8du\xae\xac$jk\xbd\xd4\x8b\x89{\xac\x1e\x10\x84\x8b6\x9c\x84\xee\xd5\x81^\xbd\x16\xf8#\xf7\x80\xcau\xa8#O\xd6\xc4[\x8d|\x10\x0f\x06\x0e}\x88\xede%\xfc%\xe8\x9b\x86W~\x0b\xba\xddW\x8e\n{\xa8\xf6J\xf0j\x8e\xf7\x18\xc7\xd9\x10\x0f\xc0k\xd7\xdaz\x10\xee\xdco\xdb2v\xc9>4K\xce\xb9\x9fY<\xa4W\xbf\xe2\xc0\xce\x97\xa0\x17\x11^T\xde\xc6;|\xbb\xce_\xd9V\x88e\xf3\x1b-\xb7@\xb8\x97m\x87\x0c\x15\xb3x\xa5\xb8F*\xec0\xdd\r\xc9B_\xbe\xe7\xfd\xfe\x1d \x99\x96.\xa4\xa3\x82\x193\xfd\xdc\x19\xc5\x8d;\xfe\xd7?\xc8\xca@bAo\x0cV\x9c\x02J\xe9\xfak\x92\xaf\xcb\x87\x88$X\xaeKj\x12J\x8c|z3\xb2O9\x97\xda\xf8@\xd8\x16\xf8\x9e\xed\xba[\xb9\xe6\xfc\xca\x88\xa0(x\x9f\xa1\xdd\x93\x99%\xe9\xf6*\xc06\xfe\x8eYH\xb0n\xfd;C\xfa;\xa1\xa8\xe0\x9ba\xd5 \x15M\x97x\xe8x\x17\x90\x9fh\xd8\x99C\x0fq\x13\xe5\x1b\xef<-\xb40U\x90T\xdbB\xf6\x04I\x8e\x92\x7f\xc4J\xa1\x1eK<\x87+\x03\xeey\xd4n\xfeq\xfb\x0b5\x83\x8f\xd12\xd2]\x95\xb6\x0bi\x9c\x1d\n\xa5\xce\x19\x7f\xd8w\xe5\x1b\x14\xd7\x98\xe2\xbd\xdf\x13\xe8\x8e\xd8\xcbF&\x94\x83\xd4\xd6\xa0\xfek\xeb|\xc1\xd0\xc4=J?\xc2%/\xa8DW3\x978\xac\xdboJ*\x0b\xb9\xaf\x04\x82\xe0\xf4\x16\xdb\xc2e\xc1/\x8e\xf4\x1e\xe9)g\x81\xaf\xa3\x92-\xdcp\x0f\xc3x\x1a\x0c\x97\t\xe6\xc5\xad\xae3g,E\xa73\x1f"B\xd1m;\xe9\x9d\x9f\xed7\x13\xf5\xbas\x83\xa3\x8e}\xfbHb\x9e\x92O\x86\xea\xba\xc6Z\xd3?c\xbdu\xd4\x1a)\xd1VE\nK\x1b\xb3\x12xm\xea\t\x00\n\xeaK\xac\xac\x93\xfabW\xc3q\x13\xfb\xba\xdb\xe9m\xe5\x9aL\xbf\xe0\xfeg\x9d\xbc\x1dEB\xe8\xf7\x8eR!\xe43y\x11\x1e\x1aS\xba\x9e\xc463\xb1o\xbeK?N\xf8\xf2\xc3\xa4\xb82\x9d\xa1h\x91\x02\x91\xc1D|\x94XI\xcf\x8d_\xa80\xf3\xb2\xc2\x84\xce|\xaeV\x0f\xf9\x04^+\x8b\xef&L\x06%@2I+Y\xcf\x9c\x03\x1c\xc3\xc0Tz?#/\xb8\x83;\x0e\xd8\xe9\xe4\xb3\\\xfd\xeaa\x16&\x9c\xa2\xad\xe50\x834\xf4+qc\xf2\x95\x0b\x7f\x0e\xd3\xde+\xfa|x]oWI\xed\xd0^q7\xb8?o\xbe\xa8\r\xd5\xd5/\xd7\xef\xf1\xf2\'\x93\x03\xcf\xe7\xc6\x11\x06\xcc\xe4\x93\x03\xf4iy%q)\rGq>\xc5\xac\x05\xa3z\x07\x80\xd3M\xbf\xc5P\xf8\x1f\x96\xd9\x19\xa8\'\xf2wv4\x1b\x8c\x88c4\x8d\xcb\xc9\r*\xa0\xe5\xa7\xcb\xd2\xa4\xa8d\xd6D\x8d\xa0\x81\xa6\x13\xb5<\xbd\x11\x03e\x8e\x1d\xd0\x90ma\xef\x96u\x7fe\xe0\xd1&Cc;\xdf\x88O\xc3DH\x8b\xde\'\xc4\xf4\xf8f0\xb6"\x14\xf3\x015\x1e]7\xe3\x05\x8e\xac\x0b{\x88\xa8\xd5\x01\xd9!\xe6\x91\xb1\x1a\x84-\xdc\r\xfa\xe6G\xc8\x8e\xde\x02\xea\x9c\xde"\xc3\xd7\xa8\xe9\xc4\xa6\xccw\xd2V\xec*\xf9\xa1\xb9\xf2\x17\x94+\xe0\xd5o\xdd6\xc1\xa0\xe2\x9d\x9e\xb6\xeaF\x9b15\xfc\xb3\xef\xd5\x0f\n"\xa2Z\xd6\xb5\xfc5\xc7\xe6s +\xe6\xeb\xb2\x96\x8f\xe5@<\xe6O9\x7f\x9dr\xcd\x04\x1a\xf7E\x86\x10\xab=\xb5\xe2%\xd8\xf1\x15\xb7\x91\xcd\xed\xeey\xf7\xe9\xee5\x0e\xe2\xf4\x04\xbak\xa23b\xfdHh\x9fe_k\xdb\x97\x03\xddU1\xea.{b\xbc\xad\xf7^9G@\xd8m\xb8\xb6O\xcc\x8d\xc83<\x91\xbfa"\xb1@\x99bO\x9f\x92.\x11\xc5\xe6j\xafg\x00\xbcZ\x15A\xea\xe84s\xcc\x00\xa3Z\xd0\x18\x1f\xfd=.\t\x10(\xeb_*\x83%\xd8\xc5pb\xb59,\xa9\x8c\xe6\xecy\xc6\xc1Fa\xe9\x90\xc3b\x8e\xca\x8e\x14\x88\xff\\\xc9@\x1fM\xd6\xeb\xb0\xe3;\xe8\xb1\xef\xbe\xbd\xad\xc2\x98\xea\xfa\xe5C\xd0Z\x0e\xba\x9c\xd2\xe2\xc1\xf8Q\xd3\x99\xb8\x8fS\xf7\x05\xb5\x19/\x95\x9cUC\xbf\xf1\x08\x19j\xe7\xf3\xb2\x89\x13\xf1\x18\xc2e\x07\xf7.W\xaa\xf2\xa6\x9ed\xbc\x0c\x0c\x97V\x96\xc1\x19\x05\xff\xeb\x9aW\x8b\x94\x0c\xe4l0~\x0c\x83\x13\x08\xcbOr\xa6\xe1\xb0d\xbd\x11Xf\xd9tf#A\n\xden\x13q\x7f\xe7\xf8\xe5K\xd2\xef\x8a\x02\x05\x05\xadM\x8d.f\xee(\x13\x12\xd3\x19\xda\x17>f5F\xbd\x1b\x1a\xea\x95w\xd6<\xa1*F\xf7K\x9a\x9b\xce\x84\x81\xde\xef\x065\xe6\xd4\xfb\xecx\xee\x9bH\xd0\x82\x1e\x97nx:\xa0u\x1cR\xe0C\x1ew\xf7;&\xba\xbf:\x9bu%\xf8\xbb\x1d\xe2\xb1i,\xee\xef\xc5\xb1\xe7\xf6\xba\x8b\xa6$\x18A\xf2\x8c\xfe\xfbE\xb1\xb6\xe1\xcc\x1b\x9a\xfe\\\xa9-7\xbf\xa1\x15\xe0\x92\xe1"j\x8c\xa90w\xbaO\xd6\xb1^\x8bP\xfb\x80em\xdb\xc6=\xe4\xc8\xc7\x07nA\xe1\xe8\xc87\xa1n\xf6\xfe\xe3\xf69\x91A\x07\x9d\xc6Q\x87%\xaeJ\x89\'\xe7\x83L\xfa\xa4\x1c\xc6\xf2|$uH0T:\xd9\x12\xfe\xe5\xe8iv\xb4E\xc2G\x1dj\xda\xbcC\xdbat4j\xf7\xd0\xe9,\xe7A\xf1\x14w\xe8\xa9\x197\xf4\xbf\xabV(4\x02rx\xef\xb6o\x01\xea\xf8\x9a\x073\xba\xc4\nw\xba\xdd\xce+*\xf8\xa9\xa0W\xa8\r\xff#\xe2\xa1\x18\xd9\xd3\x19M\xa8tz\x07"\xf2G*\x1b\x99\xdc\xe5)[\xd5\x7f:\xeb\xb5X\x1e\x90\xec\x10Y\x06YZys\'[,\x16\x8eT\xec@-\xf7\x05a\xfba\x86&\xbf\x9c.\n\xaf\r\x04\xa5d\x026\xa6\x92bg\xb4\x7f\xd3\x94f\xfa\xfe\x81ZX\xf42:*K\x88\x93\x1f\xd2r\x88\ry-{\xef\xbd\xddS=]q\xecyw)X\x84[&-<\xe0\x1a9\x9d\xf6\xc4\xe6\xe5\x02\xf8\x13d\xdc\xed\xbe\xbb\xf7\x89\xcf^\xa0\xfd\xa1\xdeg\x91\xfd\xb8\xb0\xa0\x85\xc5\x06\xec;\x01lf\xc6\xa4\xff\xaf\x0b\xdb\xe7}S\x0c%j\x9d\xa7\x17\xfa\xbf\xe7\xd4\xd5u\x12q"\x17\xccU\x1a\xc3?\n\x83Nj\x19\x9a\x1c\xd5\x8aG\xfc9\x7f\x83\x1f\x17L\xe8\xdd\xaa.\x12\xfb\x87\x1a\xfdy\xfc\xbdI.Od`\xe6\x8aA\xf1\x10\xe6\xc6\xeb\xbcgj\xa9\xcd\xac:\xa9\xfb\xddX3\x93\xcc\xd5^\xbb\x83\xa9\xa7\x84U\'\xc5\x10\xa7\x9b\x03\x90\xe2\xe2\xbc\xd0\xce4\x833\xe0\xbd`\xf7\xcau<\xe5\xdc\xfe\xa7\xcds/x\x9e\xc9\x84\xbc\xac\x7f|\x98\xb3\xd6~\xd5\xf8&\xfd\xde\xcd\x80\xa9~\x8c\x7f&\n\xed\xed\xff\xe9\xb6\x8e\xb4\xd7ABZ:\x03\x1c\xdd\xa2\x9d\x9c\xf8m\xc6\xff\xd3\xbc>\x87{\xab2\xea\x89\x9dY\x0c)\xbe\xf5\x8c.\xef\xf6\x06\xd8\xcb^\xb6\xd6\x9bz\xbd\x8e\x12\x97\xab\'st\xfeCn\xd3\xe49wg\x94\x11\xffW\x8a\xeb\xb0\xc3\\\xe9\xa5\xfcY|s\x03\xc7\xc7\xc3\xb3\xa5I\xf6)\xcb/\xe2\x12\xa7\x96\xe7"\xa2r;@5\xcb\x91\xab\x0f\xb8\x18/\xf3jN \xbbi\xde\r\x1f6\xc79>\x1e\x990\x15\xc9;a:3\x0ce\x8f\xe5\x98\xbe\x87J\xe0d_\xda\xe5\x01-\xca\x9e\xb3\xb6\xe0\xa2\xd7\xe7\x1ce\xd1\xe9\xe4\x1b\xb0\x18`\x81\x12\xa7\x9au\x15t\x84\x8f\xa7,\xd3/\xa9\xcd\x1eT:\x1bP\xb2\xd56\x18\xec\x15\x7fZ\xe9\xd1\xdaD\xbb\xb2V\xe5Y*\xc2\xf2\x97{\xac\x13o\x00n,;\xfdw\x93j\x13c}=\x8e\x00z\xa0.\x15\x9a\xd7`\x82\xfcbo\x9f\xe9\xac`l\xcb\x81i\xaa\xc4^\xc8\xc2\xe7E\xfc\xbd\xee\xbb(\xfbR"q\x01)\x03L\x8a\x98\xd9\xb6.\x16izu\x94\x0e\x00\x8dL$\x14oVt\xb8\x8b\xce\xdc\xa8\x92\xf5<\x1cr\xb0\n\x9aa\xf06+\xeb\xa3j\x12G\xae\xbe&\xdc9\xc8\x8a\xa0\xb9.]A\xcc\x15\xc1\xca\r\xd0.\xde\xbd\xd9+\x0b\x1f \xc5\x8d\x8cd\x9d\x17\x1e\xbe\x99|D\xd5\x8b\xebn$Z\x0e\x00\x0c\x9f[<M\x9aB\x03\xb1\xdcEQv=\xac\xa2\x1f\x95\x03@\xc4w\x1a\xbe\x8a\xf8e\xd6H%\xae/-\xa8u5\x1fk\xed}q@\xfeI\xc3,6\x03\x1e\x03\xceQ\xcf \x8a~\x90w\x8f\xbc\xe4\x83\xb2=\x14\x1e`\xa3\xc9\xd4\xf34b\xc4\xa9\x12O \xc8\x19\xb0i\xc5\xeb\x8b\x82\x8b\xbc\x07d\xdcz\x1d\x04\x9c6\x9fS\xb7\xa9l\xd4T\x12|\xf1O\xf5G\xba6*\xdcy\xcb\xd9f\xf6R\xb31\xd6\xf9\xa9y\xab\xee\xcb\xf4y\xb9\xf0)\xa9\xb8\x97\x8a\xe2xs\xaa\xce\xa55\xe2\x9f\xbd\x1ac\xd4\xf6\xf4\xd6\x8d\xf5\xf9\xf0>\xdb\xe6\x97\x8eL\xb7\x17:\x9e;\xd3M\xe0q\xc8\xabC\xec\xc3dY\xb5\x0c\xc3\xde;\x8bf\x1e\xd6\xdfk\xb1\xdc\xdfR\xa5 \x19&\x11O\xb9\xdf\x1c\xfeq\x84\xea\x1c\x1c\xdc\xf2\xfe\x02h\xe0\xa37\xc9\xcf\xfc\xd2\xcejS\xc5\xd8\xebimX\x99\xf4=\x1f=e\xd4\xcc\x9e\xd4b1\xbb\xce\xafD\x8d5S\x85x\x83\xa3b\xb1\xf6_nQ\x04\x84\xa2\xb4\xc3}Q\xf48\xfa5\xd5\xf5\x7f%A;\x03\x99\xde\xd8(\xc3A\xe3F\xe7\xb2\x11\x01\xe5\r\xdbx\x98\xe6\x89\x19T\xe1\x99\xdc\xbf`\xd1\xb2\t3\xf58\x87\\uY\xe9\x81\x8c\xafc\x01\xben\xea\xcbW\'4\x17\x84g\xbc\xbdj\xe5\xb4\xe3\xd9\'%w+\x89?f\xe6[\x9e`\xac\x07s\xdcF\xa0\x1b\xa6\xa63\xa3O\x89\xe6\xdc\xeb<\xe1\\C\xde\x82M\x1bb\xe1\xf4\x9b\x11}sh\xe0\x8c\xf0Q\x96w7\x13V\x961\xb5r:;\xda\xdc\xb1\r\xadJ\x7f\xf8U\xcc\xe3\xfd#T!t\xcb\x0e%\xf5\xba2\xf9B\xa9\xeay\x7fa\xefO\xc0\xc4sC\xee\xa7]\xda\xec\xfa6\x92"*\xec\xf1V\xa1=\x9d\nyX\xc3\x0e\xb2L#\x98=\x13\'\x8a\xb5j\x01\xbf>@\xc1\xf0\x9eb\x0bm4\xf74P\xf2S\xa1\xde\xd8\x06\x1fau\xd0\xc3\xb6g#\xde\xd3`\xbdb\xf7\xfa[#\x91w\xb6%\xd5\x07\xd0\xb3oZ-_\x9c\xa8\xfe\x05\t\xe8\xad\xba\'\x89\x05~\xfe\xea!\x9dW\xea\xab\xa2\xb9\xaa\x96\xd6~\x83~\x9c\xa3\xef\xf2\xc2\xfb\x7fu\xf5\xee\xffL8\xe0\xffw\xe5-\x95\xa4R\xe4\\\tIHr\x18F%$$)9/\xc4r>\xccqlK\x8ar>\x849\x8b"9\x1f\x86a\xce\xc7\x9cO\x9b\xe3\x9c\xe7\xb01\xcc66\xe3\xfb\xf9\xfcp\xdf\xf7\xe3\xfe\xfe\xf2|\xbc\x1e\xd7\xf5\xb8\x9e\xd7\x9f\xf0r\xed\xcc\xb2;v\xbe\xaeD\xba\xc5\'/\xb9G\xb7\xa4\x85\x06\x12\xe9ve\xac\xb3\xe16\xc0C\xf1Eu\x91\xad\xdd\x9f\xac\xdf\x92\xc8H\x19\xabu\xcb\xf1\xa1I[\xeb\xd8\xef\xe6\xc2\xabg\xfbY\x8c\xcfbo,\xf5\x80\x9f+\xd4\xca\x86\xce\xfb\x9e\xbbj\x99/\rnz\xcd\x14xV\x9b\xd9\xe9\xb0\xdc8W(\x01\x96b8l\x16\xcf<\r\xd0\xc1\xfe.\xd6\xd9\x17\xd7s\xb0\x9b\x8e\xdc\xef\x82\xfe\xde\xfaU\xca\xeb\xa2\x7f\xbc\x93+t\x7f\xd7M\x1df\xa9Z\xcb\xdd\x0b\x8e\x02j\t\xbf|\x9c\xe9\xff\x8c\xc5\xef\xe5V\x0c\x90_,Uq\x81f\xc25\xecO\xe6~\x96\xb9{\xfe$\xb8w\xa3\x8e\xf5\xc9\x0f\x99\xbe\xd7}\x8fDr\x87N\xc3\xeap\xef%T\xc1\xd1U\xd9\x1ea\xa9\x96\\L\x8dp\x9eU\x1f\xce\xaanin\x8c_~\x84\xf9\x1fQ7\xfdP\x80\xe8\xd8\x0f`\xd0$\xcf\x1b\xe6\xd4\x8b\x01w7fyy6\xd4\x19%\xa8h/\xe7\x1c\xdf~\xd0\xad\xdc\xf5p\xe1T\xc0`E\xf37\xe2\x8c\\\xfd\'[^\xf6\xb5\x9f\xe3\xd8\xd2\x85\xd2\xc0\xeb\xd0\xd0\xcf3\xe0\x98s\xe8kG$\xaf\xb0Fb]Y8\x0fpg4\xdcQ\xd1\x87P\xc3\x87\xf6\xac\x99+j\x9f\xb5\x8bW\x1d\xe6\xe61\xf9AB\x85\x9b}\xf3\xb5H\'}A\'\xdd\xaa\x06\xe7\xbe\xe5\xc0\xef\xdf\xd1\t\xc1\x08\x02\x13\x05\xb5\xfa_\xbd\x89\xe9B\x92\x14|3A2\x1a\xff\x15H\xfa\x8a\x95\x8bo\x0f\xdd?\xafJ\xd8\xa8\x9dWX\xdc\xfb\xa5i\xd4\x87\xe7*VLj\xea\x0e\x1a\xd9xsk\xb3\xd5\xdd\xa8\xec\xaf\xf1-e\xcd<SD;\xde\xff\xbb\xf3\xe2\xaf|\xd5xjb\xbd\xd4F\xbcn\x93\xc6\xde\xbc\x18.\xf1\xc6 \xcdq\xa9l$(\xfc(\xe8\x9b\x8a\x18A=\xbb/\x19M\xb5\xa0(G]\xc1u,g\x1e\xf6\xa7GUlJ\x89\xa6\x9c\xd5]X\xd3\xe6\x98\x18*\xde\xc4u\x1e\xfa\xaeO\xb6\xff-\xd7\xe8\xf0}\xbdgU\xc5\xc1S\xf6%q\xf9<\xd0\xab\xb4?\\\xb3\xa6f\x90\xf0\x01\x90#\xbfi_o+[\xf6Q\r\xb8\x98c\x1c4=d\xb8;\xbfd\xdc{xW\x02\x1e\x14\xd9S\xd4<\x15\xdc\xf0\x9d\x9cb\xc2\x08\\@\xba\xe5\xad\xb5\x91"\xe9\x8e\x10E\x00\xa4%\x81\xde2g\x15\xa6\xef\xe2\xcf\x0b\x13\xe5j\xed_\xde\x19\xe98\x89\xb5\xaa\xe2Y.\xff{\xffQr\x92\xc6l\x9e\xf2>/w\x1d\xc1\x9f\x94\x14}\x98\xbd\xb2\xd5\xad1\xd4k$\x18\x8ae\xda\x9f\xd2\xfc\x16\xd4k\xf3\x84D\xb5e\xbe\xae\x02\xcdo\xf1\xcbCWN\x12\xcc\xd3j\xe8\xe1\x00bOm\xde\x1a\xc0\x1eH@\xab\x9d<u\xd3\x1fv\x7fT\xf6\xdc\r\xda\x9e\xf1\xa1\xe1]fJ\xabWv\x99\x04\xf6qYR\'A\xf0\xf0\x94 \xc8\xaf\x9f\xab\x16DZ1\xf5\xb2\x11\x03\xb6g\xf3\xeb\xba\xe0\xbb22\x8dO\xd4\xc7\xf7g\x18\x8a\x99\xb1\xae\x87\xf9y%k\x0e\xc5o\x95H\x89\x99?H\xc6\xea\xe3\xe2\xd7\'\x0b\x96;\x19?\x040QwR\xca\xcbD2X\xd4\x10\x0bd\x07>\xf3\xb7\xe8\x02\xb3\x9a\xce\xf0\xae\t\xf6\xcbW^\x14\xa6\xf2\xb6\xae\x8c\xd16\xec\xb7[ja\x97\x0b\xcb\xa4\xcc#{\xe1\xd2\xfd\x13\xdf%\xe6\x0fR>+\x1c<\xd0\x8f?<?8T\xfa*\xc5(S\xf10\xaet\xa5\xf1zG\xe7g9O\x1b@\xfb\xe7*\xca{fQ7}t\xb0\xa6?/_\xd6O\x0b\x1f\xd6\xf4\x03\xd8v_\xae\xdb"\x98\xd6\xd2\x86\x9e\xfb\xa4\xcc=\x0f\xa8\xf5\x8a\xceJ\x18\x0f|\xa2,\t\xb8\x88m\x18~\xc9\xce8\xe6\xf4\xdf{\x93t\x18:!\x81\x04\x9a\xbe\x8cR\x07\t\xe3\xc5\xbf\xa2(\x99\xd8I\'+P\x04\xecZ\xa9\xb7\xb1\xc6\xa8\x88DPI\xc3`zh+h\x9f>\x1a\x0e\xbbT\xcc\'/wcJHe\xeb\x16 \xe1zu\xa96Mo\xe0\x8d\xffA\x1f\xad\\\x99\xee\xecr\xa8\xbc2k9\xd8 U\xb4f\xba\x96\xb2\x7f\\\xf9\xeae&\xab\x94/\xd9\xff\xe3$\xdb\'\xd6\x87\x1c\x8f$\xfc<Q\xcds]\x11 \xb8\xec\x9f\xb3\x1a3\xd2<G\x92\xdb\x14\x9c\xec\xfb\xa2H\xf8\n\xd4\x15\x1c|\xe8\xf2\xb6\xb9\x93\x81,\x97)\xde\x12>\x18\x088\xea\xb7\x020En\xc9-\'4\x18\xb8z\xa4:\xae\xcf\x05\xaa-\xfa\xdf\xf9\xbdu\xba\x17\xae[\x15w\x9cev\xe5\xe8<S\xb2<\xfb\x83gD|-uSy3\x01)vK\xe20\x8f\xfe\xf7\x8f|\xb6\xa1O\x04I&\x1b\xe4N\xd3\x08p\xc2\xaa\xff\x17\xaf\xe5\xfa\xdf\xfd\xae\xe9p\x94\xdd\xb9Tzf{l\x1a\x93\x0b\xf1P\xd2\xa6\x8b\xf4KF\xb1>\x86\x16\xac\xbc&\xa0\xe0\xe4 \xab\x8e\xbd\xbcv\xe1\x86\xcc\xf3\xc2\xder C\x1b!\x03\xfc\xf0\xb3\xec\x97\x08\x07\xe5\xd3\x1f\xf2\x0b\x8c\xeeQ\xa0O\x97\x81L(Q^\xf4\xa1\x91\x90\x1d\xef\x9a+Jj\'o<\xb0\xad\xa9\x84,<\xd7\x0cE\xc6s6U\x8b@\xefHF-.\xb5a\xf98\xcb\xa2\x83^\xc48S\xd4\x98\x02\xb1E2\xc5f\x9a\xcc_J\xb1\x80\x85{\xa2\xa2\xa6\x93\xdc\xb8+3\xf7(\xa7\xf3\x02\xf7/\x9e\x9c\x8b\xf2\x85W\x1eK\x94+HhG\xeao\xf3\xd0\xb5\x83\xfe\xdd=~\nO\xdb\xa5\xfa>\x12\x135\xfd\xb2..\xbc\x8a(\x9aw\x10\x8eD+\xc7L\xca\xd9e\x9a|J}]k"\xc8s\xbf\xb0P|\xfb\xa6\xe4{\x97C_\x89\xb3\x01=[\xe2(d6c\xc2\xf3?\x11?\x037U_\xe5\x08\xb1)\xe9\x9f\x8bG\xae\x98<`3iD\xb9"\x7f\xc9\xcf\xce>\xd5G+\xeb\xe3\x02\xd3!Y\x06\x02\xf1s\xe4p\xc9\xed\xf3\x9d\xbdJ\xbfy\tl\xe8\xe3N=o\xd9WK#\x87\x90\xc54\x84\x18\xddSh5\x98\xbb\x04\xda\x1c\xb0Y+\xf0qm@\xf8x\xe5P\x06\x13U\x9f\xc1\xd7Y\x1a\xb8\xe5\xa0\xd6\xed\x9ckT\xaf\xfc\xb7\xea\xe2k\xdd-\xbe!\xbf\x7fB\x96\x1a1?\x0b\x1f\xbb\xf6N\xc6\xed\xde\x05\xec\xa6\x82\xbe\xf6p\xb1\x9br\\+\xc7\x1d=\x1f\xaa\xe8\x1f\xdd\xac\x1el \xcb\xd8\x11\x1f\x12\x1b?\xe3\x99?\xea(\x1e\x9b\xe98\xe3\x90\x1ba\x98\x88\xcf\xf3rJ\x1c\x1b/\xb9\xb1\xa6k5\xd8,\xbf6\xb0p\xcb\x84dL9\x9a\xf7;sS+O)\x8f<Q\xac\x14\x87\xc9W\xb1\xffZ\xaf\xb6P\xd1\xf7\x93a\xec"\xdbR\xdc9\xba\x14\'\x1e\xf8\xce[\xc3\xab`7b\xdc\xd7\xf8$\xdd\xf0$s\xb2\xf1\xb6cn\xc6\x99\x81\xdd\x17\x86R\xf6gR\x99\xc4\x0c\x94\x1c\xe3\xba\x02O\x1e\xed\xf0-\xa5\xbc\x7f\x11k\xda~\x07\xd6\xfa\x1bw\x15]\xda\xb4&G\xc6\xf9 \xe7&)\xf2\xc8\x14\xc5\xb1\xf9\xe6\xd1\x97\x07\xfd\xbd\xaf\x8b^\x94T\xd6\x0e\xcd\x8a\x0e\xcb\xac8\x1d\xc5\xdbqOR\x9e\x10\xcf\xfa\x89e\x1a\x97\xa2\xf0_\xa7\x7f+}\x82\xa5\xd8\\\x9b\xeb\xc6o\x8a\xa5\x94p \xa6\xf9\x8b/\xc1\x1f\x91\xb2\xbe#D\xef{4\x9a\xb1\x92.\xd7\x9e\xac\xdc\xb7\xc5\x8e\x95V\x9dEU\xb2o\x02\x7f]\n\x05\xe4\x88\xfa\x9d%\xe7\xbae\x93\x13\x83R\xd9{D\xb0Y\x1d\xf7I,y\xae\x94bA\x98\x9d\xbbo\xa8\xa6\xbdl\x06\x8c&\xa0F&\xbe1\xe3\xea\xceQ\xe8\x17\xdb@ \xcd\x849\x82\xe2\xb8\xd0\\\x1b\xd9mM"=\x1e\xed\xa0H`\xad\xbc\x97\x82\xbd\x1c\\\r\xf7\xdb\xa6\xa1\x06\xff\x14w(\xe8vf{\xa4\xdf\xcei\xdfQ\xaeP\xd4/\xb2\xbb\xc5\xfb\xb26e\xe9\xe7\xfc\xdc\x0f\xc5B\r\xca\xa5\x14\xb8\xfb|8\xc3\xac\n\xd4\x139\xfc.N\x01\x18):-\t\np\xdd7\x14\x0eW\xa7\x7f\xd3=\x9a_w2)\x83\x8f\xc6\xd60\x00\x8a\x1a\x88\'\\\x06\x15u\xe2e.\x10yB\xa9U\x8c\xdf%Q\xc5_\xddS\xcbs\x1c\x0c\xbdkx\x9f|\xf9\xf8\x8a\x81\x0c9\xef\xaf5\x84;\ne\x1f\x10\xd5\x88\xba\xcc\xf0\xd7a\xef&\xaf{\x16h\xfb\xb2\xb1\xbc<\xbd.\xacD9\xab\xbd{Y\r\xe9\xb9\x9e\xa14L\x0c\xd1wIM\xcf\xd1y\xefA\x1e\'|\xb1\xc1\xd9\x07\x15\xa8\xa4\x84\xa8\xacvL\xdd\xd0\x9e\xd7\xf2\x8e\xc5\xec9>\x8c\xac\xcf6\xd6kz\xf8q/\xf6\r\xd4\xa5\xa4!\x11\x9f\x08\xdc\x9f9\xbb\\\xc9we\xf3\xbdQ\x14;\xf3\xc0p)\xd5]m\xc8\xf5\xe5\x17`W\xc4\xa6z\xa5\xff\x9dJX7\xd2\xbf\xb1kB\xe1\x91\xe4\xae\xaf\xa1!\xffR\xd5M_D\xbb;^-hD=\xb3\xad)+\xec\xc0U\xa0\xc3\x94)Am%y\xd8\xba\xe0}\xb0r$\xc5C\xff\x15\xab\xb8\x02\xa9\x86\xa4p\xc6\x83\x06\xb9~\x96\xf2\xa0\x82K\xeb?\xb9z\x87\x05\x11\xef\xa1\xf4+\x9b\x15*\xb1\xc4\xa3\x84d\xa8\xf2\x8d\x94\xdf\xf85H.\xfd3\xa0A\xbd\xfeP:r&\xfb\x9fh:\xc4\x7f!8\tY\xf5zrh\x02+\xa5\xc7"~\xad:\xfa\xe1\x82U\x85.p\xee\xfd\xdc\x87\xf9\xb9\xbb\x0f%\xe6\xd0\xc0\xf1F~\xa6\xcd\t\xd9\xf8k\xe4\xcb\x1c\xe9f\xfch\xe4%j\x1a\xee\x89\xbf\xbdK\x96a?\x97\x88\xb7-\xf3Nd\x16`v\xdd?1o\xff_\xads\x96d\xcb\x83\x0c\xdd\xf4w\xa6\'6\xb4\xb4p\x984C\x9fG\xfaY\xc9\x8eA\xbf\xdbc\\\xab\x8d\xa2e\x7fC\xe2\xc1\x9bG\x01\xdf\xf4N\x9eu\xfc\x86\xdb\x1db\xde\xff)\xbca\x9f\xff\xd2Aq87$\x05N\xd9\xc1\xf5\xd8\xcb\xfd3et\x1f\xb5hTr\x87\xe5)%\x1c\xc6\x07\xc2\xd6\x8f\xb4\x122\xf0\xf2F\xaf\xa1\x0f4\x13\n\xea\x83\xcdC\xe5 \xf5q\xedw##PL[>\x13\xb0\x1d\xc0\x9e^Y\xa8IO\x18\xcb>\xbb,Z\xbc\xf5\xc7\xfd\x85Z\xd1\xc7\xdd\xdf\xba\xe2\x8a\x9d[y\xd4$\x1b\x17M\xb9\xd1\xba\x89\xc9\x1f\xfff\x9a\xdf\x90\x1a\xf2n\x00\xbf}>\x0f\xb0\xa0\xe9^|ED\x1aM\xc6v \xb4@\xba\xbd\x9a\xab\xc8\xa2%\xe5\xf0M\x1d\xb4\xe7\xc8^\xffIN\xb8=\x9f\xec\xc3B\x98\xd2\ry\xb8vFh\xe0\xeb\x82\xdb@\x8e\x8e\xe0\x95\xf7\xf3\xae\x1a\x1a"%\xb2$z(r\x84\xaa\xc2#{\xec\xb5\x1auk\xbc~\xb0\x83\x83\xf5\xad\xbb\x07\x91\xd9:\xaf\xdb\xd9\xae\x00 \x9b\xa2\xc3\xd4}-[\x8d\x82\xa6MR\xed#\x84l\x06\xabt\x84\xc2&\xbfT\xb0{Oh\xc1\xed74!Y\x8f\xaa\xff-s\xbco\x8bS\x9cEn\xb4\x88T\xb2\x86\xbe\xd8I\xb3\x9eUL%C\x13\x98%[\x89/@/4.`t~7\xb5v%\x8c{?\x16\xeb\x99\xd55\xfb~\xec\x83d\xbdp\x03P\xef\xef\xc4k\xc7\x99\x1b"A\x12\xf2u\x9d\x10GE\xf6b\xc3\xb0-\xadY\xad\x13gOD\xb9\x8b\x92&\xa4H3b\x0bs\xb7\xfeS\xd3\xd7\xba\xbd\xdccd\x85\x02\x11\xfe\xabT\x1e\x9f\xc3K\xec\xbb/V\xb2\xdbt;[\xcdf+"\xe5\x95\xddW\x92{\xcd,*b\xc6\xa1\xe9g;|Bz\x05\xcb\xae\xbfP5\xf9.K@eVn\xc4\xeb\xb6\x9c3\x85\xcf\xd1\xf0\xe4L\x0c\xf1\xe9*\x8f\x99\x9a"M\xe3\x13[\xdc\xcc\xca\xd1\x1b\xda\xbf\xa5\xfa\\\x9c\x16\xb0l\x8a\x1c\x83Y\x8eC\xe8\x87\xab\xe2~\xe2yQ\x9ak\xeaO\x1ae\x15\xcd\x98 \xd8Q\x0b\xcc\xc3M\xee|h\xe8\x03\xfa\xcdW\xd6\xec\x15>\xbd\x80\xb3\x1d\x80\x0c\xa7\xac\xf5\xe9\xab<\xbe\x96\x83Y\x84\xfa\\\xf0;\xaf!\xf3\xc8\xba\xd0w\x03\xdc=\xf7\x1f\xae z\xa5\xbe\xb2!Tjv\xdd\xd7\x10}=\x1e\xe5F\xe2\xbe \xa8\xdf\x8dpY\xe4\xfc\x02k:\x1d\xbc\x1b\xba\x16S\xafB{\xa9,\xfa>\xf0\x8e?r\x8e\x97\xea\xf5F\xd3r\xc1d\'$K\xb0qlc\xa1\xb2+\xea\xc6\xc6\xa7Y\x00\xaa\x1a8\xa8\x89\xe3\x9e8\x1c\xf1>\x17\xb9\'3\xe6[\x8bB+\r=\x8f\xd6\x11\xc8\xe2P[\xf5P(\xe4N\x02x"\xfd\x1dvN2`\x93\xcb\xcd\xca\x1b\xc3\xd9I\xa8\xc7\xe3\x82U\xcd\x1b[\xe4\xf7{^E^7\x83\xec\xdc\xcf\x8a<\xe2r;20T\xb1W\xa2\x05\xa2\xaa\xcc\xff\xb60\xd23\x0eC\xfd\x8bV\x15Oh;\xd6\xac\x92\xf1\x9a$%\x89X\xab\xc1\xebF%\xc7\xb3\x19\x05RA\xb7(L\xa1\xc1l\xe7l\x7f|\xb0>f\xfb\xdf\xe0\xc4\xad\x82\xa9\xa8@\xf7>\xcdx\xba\x9eF\x11y\xa3\xfbF\xe5\x1d\x918xb\xcbL\xef|\x94\x9bF\x93\x14N\n\x85Z\xd1\x98\x9f*r4:\xfa\x0c[V\xcf\x16[V^\xda"tv\xbd\x1e?\xbc\xd2\xd7P\x83\xbb\x19\xe4\xa0\x8c\xbb\x8cq\xf2(\xfb\xe6v\x00\xa8\x96!\xff\xa6\xa4\xc1F\xfa\xd5@\xb0\xca\x8ax5F\x95np2\xe7\xa5(\x00[f\xe7\xfeWHR\xbd\xe0\xe6K+Z\xd7\xd6\xa5\xb2:\xd0F\x8a\xd5\xf6\xc4\xa0\x15\x18Q]$\x89\x14\xf6\x17\xa9X\x9b\xe6\x12\xbft]De\xe0$\xb30?^!\xa3\xfeN\r\xa4d0"\x89\x01s\x10Q\x1f\xa78.I\xae\xce\x1e\xc0\xcaE\xbbb~v\xdd\xba\x07\xf9x\xb3G\xd8p\x8ciD\xe3\xda\xdb\xd1\xa8\x1aP\xdb\xb9\x83:O\x828Q\xbc;$so9\xcdt\xcd\x1b\x1a\x85?\xa3\xf0P\xd2\xe4\xffnBb\xd3\xf4\x81\x85\xc0\x95`I\x86\x07\xed\x0c7.\x95fnq\xca\xa3\xcf\xc0\x84\xe6 \xfd\xa9h\x8f\x13|:)\xbc\xc5[h\x94\r\xfd\xb6\xa9\xf6\x89\x91jU\xec\xfc\xeb\xd5]\x06\xf2:\\\xdaV\xee\xcf9\x94\x1d\x18\x1b5\xf7\x92=%\xbf^G;E\x1fO\xd37\x9c\xf9\x8dS\x15\x14\x8c7&F\x1f{Sm\x91\xbe\xba\xf0\x10\xf9\xeb\xd5\xec$)\xd4K\xa3\x8e\x8eo\xba\xa6\xd6\xc9\xe8\xe6\x9e\xce\x855p1\xff\x1a:E\x95\rl\xe2\xa6\xe3\xb2vY\xbe9&\xe0\xcd\xa38\xd1\x16\xad\xd9\x86\xe5\xee\xd2\xb7AW\x86\x1ez\x8f\x93\xec\t\xc7\x0b\xea\x06\x9b\x87j(#mk\xbc\x03\x1d\xa4\xe8\xc7N\x04nOr\x0e\x936\x0c\xaf\x862\xbd\x82WWC\x04\x19\xd3;\x00\xc5N~\xdf\x0fO\\\x14\xbf\xbd}5\x83\xbf\x87\x8a.\xe0\x17\xfd7[!\x83\xbe7\x18\xfe\x99%\xe7z\xff\xb6\xc23\xffA\x93\xfc\x8c\xd7+p\xee\xd1\x83\xf9\xbd\xe2\xbc\xb9\x95\x85\xedJ\xfe@\xaf\x8d\xddv~\xc3\xce2\x92\xcb@\xf1\xe1\xcbq\x1d\x1d\xa5p\xa9z8\xa1\x86\x96gj\x7fv\xc3\xe9\x97W\xc3\xbe\xde\xf3\xa92y\xf0\xcb\x8aQ\xe6\xcbb\x8b\xf1\x8cx6\x92\xe2@F\xd5\xd6\x86\xc7\x90\x9b\x80\xd0OL\xe4\x1b\xe5\x1e\x0b\xfbA\xb0\x94\xf4\xfe\x99\xc8\x96\xdf\xfa%R.\x1b{\x9c.\xdf`\xc1p|\xe3o\xc9\xfe\xb7\xe7\xe3M@\xd1\xc1.t\x87\x97\xbc\xf6\x15\xa9{9WqH\xfa^E\xf3\xf8\x8d\xa4\x1a>\ty\x01\xf0\xa6\x85\xb5\xd5PA\x1e\x82\xf7\xd0\x8f\xa08\xa9%w\x13>\x8bA\xd5H\\$\xb67\xccE\x89\xd7Rj\xb3\\\x18\xe2X-\x93\xbc\xf2\xa7F\xb2\x8bL\x07G\xf7\x8a\xda*\xf6\xab[F\xe3k\xe9\x9aT\xf3\x82\x0e\xad\xa6\xe9T\x9d=b;\xff\xcb\x1f}\x97\x1c\xcb\xaa\xc9C\'!\x0e|\xb0f\x9d&\xc1\xed\xb0-\xb7\x11\xfa\x9d&c!\xa8\xcdL\xd5\x00^\xea\xf4\xc1\xcd\xb2D\x88o\xefOE\xc79\xf2\xf4\x9e\xdc\xb0\xb4\x81\xf8\xd0\xc75_\xef\xae\x99\x7f\xb6\'K\xde\xdf\xb38#\xc3\xfc*+\xa7\xdf9\xcb}\xf2\xaf\x11\x07\x00kG\'?a\xaaf\xcd]+\xeeR\xbcF\xd9\xc8\xfbu\xe9\xa8\x95\x03e\xcd\x8f\xdd\xcf\xc2\x9fX\r\n\xb1\xf6\xba\x86\x8a\xc3\x04\xfd<{\xf4\xca\x8f\xd7\x9f\xce\x10\xbcD&\xbe\x04\xdf`\x8e\xa1y\x08\xe9Z\x15\x05,\xabu\x99\xdd9o\x9a\xa5\xfd+\x1a_\\\x93TKS\x13ctc\x9ap}R\xf4nu\xe0\xb9\xcd\x1f\xbf\xcf\xae[\xaa\xae\xdc+%T-\x7fh\xcbX\x0f\x1d\xd0z\x00\xea\t\xb4\xea\x9d\x1f\x82\x1a!uW\xad*?\xaaU2/m\t,?\xdf<b\xe1\xde\xd8}k\xf7nx;\xdb\x7fP{\x1f\xe4\xbfb\xef\x97\x0c\xf6\x1e\x91\x0c\xc5@\xecg ]\x07\xa1\xe6\xc1!FnS\xe1\xd2\xc5\xa0;Fn\xe0\xdd4\x85\xdd\x07:\xa4\xa9\xcan\xbf\xd0@\xa7\xaf\x84{\x93\xb1\x1fx\xbbj\x11\x89\xfa![>\xb7\'\xfe\xee+<\xf1B\xb8\x927\x93Q\x97HW\xc4\xa0\xde\xfb\xddw\xb7\xc1\xb2\x04NYE\xddb\xaa\xf0\xf8\x17\x88\xcdj\x84\xc5*-g2RH\xe4\xd1\xab\xe8\xce\xa5\x95\xab\xa6 BZJP_\xf4\xb2\x9a\xb2\nq:\xa8\x1b\x1b\xf9\x9e;\x9d\x17\x0e\x1f!v0\x14\xbb\xeb3\xb3Lh.}\xe9N%\xe01\xd2\x8c\xf2\xe4Nu\xe1\x08\xd9+J\xdd\xad\xb3\x1e\xb8s>!\x8f&\xd3t\xc9\xbf\xf8\xed\x7f\x9a QM\x85\xc2\xb5+1E!X\x13\xc6u\xcd\x9a\xe1\xe0\xda\xd6\xb7L\xe0s\x9b\xd9\x03,\x8c\xb2\xaboo^\xa8\x85\xd4\x93M\xc4,\\\xa5\x86\x14%\xee\x03\x991\x10\xe5%\xd7JV\xdc\xb2]\xe6\xe1:\x04Q\xa7+\xfb\xe1\x0e1\xa0U}W\xf5\xb0\x03b\x9fl\xfdZo\xe8\x01\x91\xbb\xa4\xb4\x80.\x8d\xe3~\xbfw\n-@\xbf\xe8\xael\x1d\x1d}b\xe5\xae>\x97\xf2)pk\x1c\xeb^\xa1\xfb\xf6\x07h\xc2\xe3\x05wz\xe1t\xac\xef\xb5\x13cP\x83\xcd\xb5\xeb.\x7f\xb4ZQ\x0fua\x8b\x08\xb3\xe2\xbb\xa89m\x1d\x16\xe9\xdf^d\x06\xf3#\xf3\x1cR\xe1\x8b\x99Dn\x8dW\xbb\x06\x7f1\xdfMIW\xe6\x1a\xe1\xb3\xb4\xc2\x86\xa8\xfdR\xf3\xb9\xdd\x8a\'\x17Ri\r\x0f\x90^\xf3\xe5k\x96q\x0f\xf8\xee\xa2\xd6\xaa$\xe7\xec\x9d}\no\t\xe8\xc7\xb6\x04Q\xaf`de\xf5\x14v{\x99\xbc\x18.\xf3p\xc1\xc0\x0c\xed\x8b\r\x8b<)\x0e%\xf6\xcb\xefI\xad%\x99\xf2\x03h\x8c[-Z2\xcb\xf1D\xf5\xc4c5\x98\x9f\x1b-\xe9|\xc6\xc1\x91$[\xce\xf24\xb0\xc2z\x1d\xcb\x95s\xef\xbd\r&{\xff\xae\x03\x8a\xf2\xd9\xaab\xd5\xa3\xd0F\x7fB\xe7/\x01\x81\x1c\xc1\xf7\x05\x06\x9eV\xd8\xb81\x87"\xb0\x92Q\x1e1\x02\xca\xe9f\'d\x03\x8aEd`/\x9e\xb2\x88aH\xf3\xf4!&\x15\x8e\x1d\xf0_\xbf\xb3\x91k\xfe8\xc4J*\xad\x15\x1c\x16eN|G\x92\x91\xa3\x05\xcf\xb6Ei\xca$X^\xac\x8dY\x88Z\xd0\xb7\xc0|\x00\xdbB=\xaaBe\xc8\xe3\xb5Vww *\xa9\xd5-C\xfe\x9c%\xea\xf7IA\r\xdc\x11v|\xdb\xa9v\x9d>\x90\xd5\x1c\x18\xd4\xd3\xc3Z\xcd\xc35x\xaf\xd7\xdax\xb8\xaf\xa7\x13\x94\x1c\xab \x8a\x7f\xf4\x14\x89C6\xf2\xd5R\x19\x18[\xe6o\xd9\xd9\xf3\x8e\x80\x87\xcd\x9e\\\xb2/\xeb*\xd5\x91.\xc4V\'\x17\x81\xb5\xc2\xb7\x06w\xad\x80\x01\xa7\xb3\x86^\xc9\x1f-\xa9\xe8\x0b\xd2\xea\x9f\xc6,\xfd\x85r\x17\x12\xad\xfej\xb4\xccn\x1c\x18l\xb3A\xeb\x1c\x1d\x87n\x07\x96\x13\x92\xa8w$\xa3\xd78?\xce\xbf\xb5\x0f\x9b\xccRb\xafIT\x8d\x18\x9e\xb3\xc3C\xbd@\xee\xd7\xc5\xce+\xbf\x04\xf1r\xb3\x02n\x14\x1c8\x87\xeaZ\x8eo\xd7g\x18\xbe\xb9\xe5\x01\xc1\x8b\x87\x9a*t\x1c\xfd>\x1c\xa6H\xd9e\'\xa9\xf3{\xc1u\xc3\xb2\x82R8\xfa\xc8\x0c.V\x90w\x01\x18\xf5\xde\xe6\x0f\'\x1e\xc3\xff\xa8\xbfX?Ts}we\xb8 \xcd7\xaes\x8e\x06\xce\xf7}\xc2)\x18\x992*w\xfc\xedH\xd8\xe5a\xd17?\xe7.\x01\xf0JS\\7\xf7#\xbe.\x1b\xad\xc6\x82\xf1\xda\xab\x90{\x9a\xb8!A\x1f}K1\xfb\xacP\x17U\xa9\x11\x84\x81\xa3\x17%\xc3\xc6\xadLq\x8fe\xdcf\xd3\xf7\xbd\xeb\xb4v\xf8a\x01\xe7\xc0\xbc\xfe\x10\xfb&\xa7\nx\xb2*-d\x8a\xd2\xc6\xb8\x8a\xcd\x97\x8b\x8c\x84\xc5\xdc\x95\x93\xcf\x8e\xf5\x0bOs\xeel\x1a\xe5\xb4\x86\xfe\x85?F\\\x17\xef\xd9\r_f\x89\x02\x0b\x08\x8d\xc1\xeb"\x9e\xc7a\xd2k\x89\xc0\x83\xba\x97Y\x9e]\xaa\x14\xef\xe5\x8b\xeaW\x82J_\t~\xc3w\x10\xf0\xee\x1e\xf3\xa4\x8b\xe0C\xf4\xcf\x13k\xc0\n\x83\x87\xa6{J\x138~\xbd\x1c\xec7\xf9ay\xa4woi\x98\xcb\xab\xc9\xb8\xf5\x18\x95\xb4\xba\xfd\xa8`\xfaE\xb1\x84@\xf2\xd3=u\x92Z\t\xf0w\xb5\xacg\x16\xb2\n\x85x\xec\xf6\xdc\x8e.mv`\xd9\x9f\xf5Sg\xf3)&\xce2\xa1\x88*_\xdb\xb9@AM\xbf-<\x11\xfdmjR\xee\xd6\n\\"\xcd\xd9d\t\r\x04\xb9\xe7\xc7\xd2\xdd\x9e\x9dD\xab<!\\;R\xd5\r\xd86d\xf6O:\x0bU8n\x16\xdf\x18\x9b<\x0bZ\x16\xcaz7\x92\x8b?\xf9\x84\xbd\xcdgx\x06\x1a!\xf3\xcf\xc1\xd1w{\xdfA\x8f\x02k\t\x99\xc4\x95\x0c\xbem\x1e\xa8F\x98\xaa\xf6\x17\x9bYf\xe10\xc3\x08\x13}\xc6\x0b\xcc\xe3\x87\x93\x02\xc5NIwEk<\x85\xb1\xd7\x18\xff\x9c\xfc;\xdc\xb4D\x07/\xb3\xd2\'#\x1f\xbc\xe9\x17c=s\xb5\x16\xb3\xefp\\\xa7KR\xf0=r\xc7:\xfd5\xf7\xb7ej@\xff\x1a?w\x19[\xdda\xf9\xd4\xca\x95\x95\x0cL\x18mAs\t\xfd\x1d\x8e-\r\xbf\xf7\x17\xd2\x8bJ\x92n\xf4\xd3\xca2\x08\xff86\x83\xdd&\xe3\'\xb2\x9f-\xa9.\x1d\xbd\xef\x9f\x16\x9f\xac\x8f_7\x9f\x90\x1b\xab\x19\xaf\x01\xdbY\xbe\xcf\xf2y0\xa7T\x82\xb7\xc6s\xee\x1f\xf4\x8eX~\xf1\x93q\xf5\xd1\x8c\x06\x9e\xb8o\x9d\xa2\xceN\xe8\xbf>\x95]6\x97H\xebK8\xee\xd9\xd1<\xb2!\x0c\x7fKo-1\x99\t=\xb4\xba\xda|I\x91\xb8\xab\xbd\xfb\x07P\xdb\x94\xc5\xe1<;\x95\'W\xcd\xb8@*D\x87\x0f*\xcf\x19y\xeb\xc1e\xcc\x9e\xc9\xb3\xcb?\xbfv\tRa\xcc\xdd\xc6o\xb6Xw\xf1\xd1\xd9x~\x85b6\xb7\xea\x82/\xe3otk\xfc\xa5\x83"\xc6\xd3\xb3O\x02\xb9\xd3\xc7\x04\x04\xbe\xcf\x8cDF\xb3O\x8a\xce/\x04\x0e\x11l\x92\x17}\x16\\\x83\xa3\x13\xd5\x06\tu_\xf1\x812\xb0x\xadtK4J\xb1{%\x0c\xd0\x97\x0e\x11\xedA_k\x91\x98qO\xcf\x12WR\xa8\t\xa3\xa4\xfc\xe3\x1b\xe00\x11\xc5\xae8Q$\xc7,\xaewP\xbb9\xdd\xa4\xc0G\xbc\xe7\xb7\x95\x1ei\xaf\x101\x7f\xd7DSkw\x9d\xbb\xe2\xd7\xc9\x90\xa9\x8bk\xee{\xff&s\xd6\x02\xe0-\xf4\xfc-\x19l\xc0\x8f\x95\xd4\xe8\x99\xa3\x05\xc0\xc8d\x04e\x8eS\xb9\x02h\xa0\xcd\xa9nkD\x99I\xb7Z^\x82g\x9d:\xf2\x1b\xd4\xd8\xd2+K\x7f\x90\x1d\xed?\x8b\x05\xf8\x94\xe0\x1f\x94%PC%7\x1d\x1a\xf3\x96~\xfa\xd6/\xaaw\xefk\xc9sPZ\xa82D3 \xab\x16\xe9\xf4\x12\x88k\xdb\x8b\xb3^dl\xf2\x14\x9d\xaa\xd2\xb0\xa3\xc1\x9f\x05W:\xb0\xe2\x0b\xe0q\x8a\xe7\\\x88lv4\x01L\x0e\xdd^\xd0n\xd0m\xd4<\xe8\x07\xc3\x04\xc3\x9d\xab\x871s\x9d4\x97M\xa7<\xbd}"a,\xd8y`D\xa8;\xf3\xce\x90F\xed\xee\xf4\xf6\xd0\x9f\xe1\xfesA\xb8\x0f\xfc\xc2`\x9c\xce\x89\x0b\\\xa8\xe3\xf3Kv\xba,\xecf\xa4\xd5\xb5/MH]\xe4\xbcL\x7f>\xd8=\xbfu?%\x81\x1b\xe68\xc0?\x85[(\x9bj/I\x05n\xa2/\xe5\xba\x894\x15E\x8f\x98O,\x04!\xa5}U\xde\xdd{\xae\xe9^\xd5\xd0\x9e\x1c\xc4\xea\xc3f\xa4\x05-2#&\x9e\x99d\xe7\xf9\x9bj\x8cs\xeb\x16\x81\xe4\xfe0\x13\xa7\x0c\x94l\x9c\x8a\x94\xd3\xd7d\x9a?\xee\xd7\xb0\x8a9\x97\x84W\xa9|\xdb\xe2\xb1\x8cC\xb0:\xe7Y\xfd_j\x19\x864\xfb6@\xacV\xff\xd3\xcf%\x84`1\xec\xa8\xdd\xe0\'t\xed\xbfx\xb9\xc6\x87\xbe2\xf8\x9b\xdd5\x93G9\x83\xf7D:\x87U\xd73\xac\xb7\x04\xc3\x9b\x18\xa8L\')\x8co{\xa6Vp\xe6\x1f\x1d\x9dM\x0b\xcb\x01\xba\xb7\x1f\x0fz\xc2.\xa8\x9c\x96\x89Q\xc4-\xac\xeey8\xba\x01i\x94=|\xedl\x1f\xbf\x9b\xd0\x9eeb\xc0k\xf8\x90N\x15\xf6\x9d\x88W\xab\xcdD\x97\xcd\xec4\xa4\xfd\xc0[ \x17\xace\xd95\xd8\x1d\xa3\x9a\xe2\xedN\xf6\xe5\x00y\x1f\xff\x85\xec\xb7BVI)\xd7\xae\xc76\x15 e]\x1f\xa8\xd1\xa4y\xf5\x06\xff\x9eVKU\xb4s\xf7\xfc\x82.\xd8\x9a\x16\xf3\x1b\xa9\xba\xa5\xd2,\xb0\xd3\xd1\xb5w\xf8\x90\xc7a\xdd\x04\xbe%\xaf\x9f\xcd\x16"\x97\xb1%QU@m\xa3a\xa4\xbe\xf7M\xc6\xce\x93\x0e\xcd\xc8\x86\xb6\xcc\xdc\xf5\x0c\xb3\xc8\x9fE\x97kTDv\xa3\xb8\x14o\xe7\n\xc6M\xfb\xd2(\xf9Y\xff2\xd2\x1c\x0e{\xf9\xaf\xd4\x9cDWd\x93\xdcn\x81\x89W;\xb4\x919\x12\x1e\x9d\x8d\x12\xe08\x1b\xee\x94\xa7\x9b\xbba\x02\x1a4q\x8a\xe8n0\xb9\x18\x12\x1c\x830s\xa3C+R\xcb\x8d\xfe4\xe3,\x8a\x87\xa7/\x0c\xf6>-\xd3\xa8\xba\x14[x\xe2\x02\xf3\xfa\x8fSXR\x1bR\xef\x98,\xfa^v+\x84\xc8\xc6\xb2\xbe,\x89#\xc5X\xd0r\x9dT\xf4\xdd\xf4\x06\xf4=O\x88m\xf0\xb7*y\xe3\xfe3\rJ\xfb\x9c\xff\xf2\xdc~8\x81\x8e:\xdd\xc0d\xb9\xc0\x8f\xd8R9M\xb7\x8c\xc72@\xbb\xf1\x87c\xc1\xa1\xad\x17G\xa5%~6\x8f3\xc5\xa6\x9b~\xafz[L\x18\xd0\xd7\xfc\x85%;&H\x89P.\xd7(\x98zU\xf3A\n\xfeC\xc9\xe4l\x19RA\xb5\\\x94\x18\xc0t\xf6\xdc\x9d\xcf\xc0]\xb4\x89P\x80\tt\xbc\xc5]\xaf\xd0X\x8b\x92\xac\xe7#\x95\xbd\x9ee1~\x0e\xa1l\xea\xe7.4\xe2qi\xd6\x844n\x15\xb9\x16\xce\xb7\x9e(\xc5+\xa3\xa1"\xd3\xf9\xee= \x9a\x85"!\xb6\xe6\xbb\xe4\xf1\xde$3-\xaaj\x0e%;\x13\xdc\xec\xb2f\x82}\x12v\x98\xbd6\xbb\'\x1f\xb2\xba\xc5\x11\xfd\xe3\xfd\xad\x1d\x8fV\xa7\xda\x9d3{\xfe5\xa2\x95\xfd\x92\xb2\xa2\n\xa9\x86-A\x17\xfa\x8d\xd9y\x06\xf7Lcf\x17\x9a\t}\xc5\xde\x9b\xd9\x8aF\xe2H=\xe7\xfaG\xc7e\xed\xb4\xfc\xc0\xff^\xf6n\x1d\xb99\xe7\xce\xf5U\xad\xd6,\x8f\x0cN\x17\x07|\x86\xbe)\x1es\xce\xf5\xff\x94\xe1\xa2\xb1\xef\xb4\xf9\x19ao8v\xd0\x98\xfd_s\x96\xdfbG\x93\\\x90\xe9\xbb2%\x82\xe6\x0b\x0fNKA\x95N\xcc\x0f\x8b\xd7\xd8vy\xc7\x80\xaa\xd9\x8cI\xf7\x8c\x10ZH\xe3k\xf5\x84\xc9\x90\xa6\x92PwR?\xfeB\x93\x1de\xe3UC\nO\xa2\n8u\xf5y&,\xbe%9\xda\x1e\x86\xb0\xeb\xfe\xfb\xb8\xecp\xf9Uw\xd2\xf4\x8d\xda\x1f6\x17Z\xf6\x17W|PJ\x1eW\x8e\xef\x0be"n\x83/(\xe8\xc8w\x1b\xaax2\xfe\xdd&\x06\xcdY\xce\xc4\xd2\xe7\x1a\xd6$\xfb\xd5-\xeed\xda\xbd\xa7\xc1\x9e\x0bx[`%\xfa\xdb\xa4]\xbe-]Y\xab\xfe*\xa0\xff\x1d.9|#\xfc\xa0\xf0|\nv(\xbdY\xd0+\xe6Q\xd09T\x13Al\x00\x107u\x1b\x1a\xf6w\x98U\x1e\xd4\x1c\xa3\xa1\x05C\xd0\x16~\xfb)\x04\x9c<&\xc5*\x8eV\x9d\xb3\xfa\x88\xeaO\xca\x04k\xc1,\xd4\xf6\xffLG\xb5\xc2\xfc\x8ab\xbf\x04\x7f\x8b5\x93\xe1\xf2\xf8`H\xcf\xcdT\xed\xcdHm\xe9\xaa\x9bK\xae\xba\xb5Kvw\x9b\xb7!<\x9c\xe3Q\xac y-\xe7\xbd\xec\x17\'\x87\x07]}\\\x86\x8a\xf9\xf6$\x1c\xef\x9c\xb5\xce\x99`\xb4\x8e\xe6\xd5\xfdX\xd6&\xbe\']\xa0K\x0cU&\xe1^\xbc\xdcc,e("\x02\xbf\x9e\x80\x8a]\x85aY]\x9a\xd7$\x13\xda\xa7\xcd\xa2\xaf\x93g#\x80f_@)\xc8\xaadx\xb6=\xc3\x80D\x8f\x82]+l\xb6\x00\xc3\xe0\xcf\xea\\\tz\xfc\r\x96\xa5Q5\x11\xa8@\x11y\xebj@\xe7\xf7\xe9\x9f\xd7\xa9\xa7\xc4\xb0\xafv\xf0\x96\xf7gm\xb1\xa5;\xefGc}\xc3D\x92"\xbd\xd1\xa5m\xfe\xe9\r\xcf\x13%\\\x8c\xc8e\xc3I])\x18\xa6TP>\xa6\xac\xc7\xa8\x8cl\xdc\xfc/xH-\x88\x93K\xfe\x99t3.(\x81\xf4\xd6\x04\xb0\x1f\xe9\xde\x95\xfe\x81V\t;\xe9<V(a\xe8\xd1s\xe1\xe9\xe0\x8b\x94\xe7*\xca\x92\xce\x19\xb0\xb6\xc0\xef\xda\x14\xa2\xce\x11\x99\xbc`G\xc914\x93LK\xcf\x89ARD\xdf\xf3L\xf2aR\xb59-\xc5L\xb9&\xed\x14\xff\x01\xfb~\x8e;S.\xd0.\xcbD6@\x12\xa0i<\xeb^\x17\x88m\xaf\xbb\x02\xc1\x91RH\xad\xf7\xf8\xe3\xb9\xd8\xfbe\xa6LK\xe7\x9e\xde\xde\xe6\xb6\xed\x0ed\xba\xae\x10&u\x93_L\x14^Na\xeb\x0f\x8a\xf5\x94pY\x1aF\xa1"3:\xb3$\xff\xcc\x112x<\xd5fX\x88A/\x1a]\xb5\n0\xec?\\?:R\x86\xa8R\x9aq\xc1#\xb9\x9f\xc0\x93\x0e\xcf\xd8\xf1\xce\xf4}\xab\xea[\xb1\x99\x86+\xb0\x98[?\x9b\xdf\xc7\xc6\\Qi,r~\xfb\xab\xc0\xa1\x1al\rf\x1f\x88\\Wx\x8a\x1ay\xc8\xe1\xcf$\xf6.U\x97t\x0e\x93&:\xf9%j\xfckw$_+\xcf\x9f\x1c\xff\x88q\xbf&\x86\n&\x1f.\x9f\xae\xbb\n\x1bu\xfc\x8a\x83\xafSnd\x08\xaa\xab\xfe\xa3|%5\x04\xc9\xf9r\xd2f\x9fv\xdbT\xea\n\r\xa5tA\xa7\x97\xbf\x014f\xca\xaa/>(R\xcdq\x84\xfb\xb3\x9d\x88Z\r\xeb\x97:\xca|\x0f:/Sk\xfc\xe2dW\x88\x91\xf0\x98\x02w\x04Q6\xc5H\'n#\x19I\xa2j\xf6]\xd5\xdf\xf9\x17\xbb\xc7v\xae\xd3\xbf\xb3\xb6\x1c\x05W+\xf9\xc4\x15+\x8cA\xd9r\xc0\xda\xf7\xfes#&\xb9f\xce\xee[E\x0b9\x85]\xadKjH\xd1\xd0h\xcew\x82\xb1\xbe\xb4\xeaEA\xbc\x9a.\xa5i%gO\xcfN\xfe\x1a\x04\x1d\xa5\x7f\xb5\xf7\x84\x06>_\xc1\x17d?\x98\x10\x01\xb3\xc4\xf5\xb3\xdcRu\x9f\x17\x1fgO\xdf\xa1\x8d\x9d\xcb\x15mI\xb7-Ql\xb9:\x04\xc1\x1f\xee\xa3 O\x8f\x84\xaa\x83)J\x02\xb0\x82K"\xf4\x98\xc0\xb4\xf7\x1b\xd7\xb0\x12\xcf\xb4\x7f\x07\xbd\xbboZ\x83`<\x98\xa9\xd3\x1c\xe3\x10\xa4\\tQ!\xff\xeaV\x80\x8a\xc6\xb9\xb4C\rw\xb2p\x04Ag2\xaf\x05\xd8\r\xeb\xf9\xcc\xb2\xaa\x12\x19e\xc6I\xa59\x82z\xdf\rxd\xf1\xd7\xe6\x84?\xf2\x15\xeco\xb64,\x18W\x0c\xcb\xe3q\xd1\xcd\xa1\'\x04F\xad\x18\xb3Q\xb5\xce\xa6D\x15\xcb\xc4\x1f\x97\xa6\xd2\x9c\x1f\xdf\xd7\xb5\xbaS\xe7CQ=\xfc\xd45\xa7N\x17\x00\x0e;\xaf\x0b\n\x16\xa1\xed\xb9\x7f\xe6?\x96\x0b\xcdE5}\xaa\xd7\xf7\\\xe9h\xc7\xe3\xb1\'\xf3.\xd9/\xf1\xc6\x8ca\r\r\xc8\xb8\xbbA\xe1w\xe9\x16\xb1\xafsO\x9a\x0b\xac\xc5\xf8\xec\x83\xe9s\xf0\x9e\xd3\xb2n\x9a_^8&\x1b\x90\x1ao\xa4vKS\xc5\x01\xd8\x82vu\xd4 \xed\x9b\xa3\x9cw\xd7\x8d\x0c\xaa"\xd4\xe0G$WQ\xa8\xc2r?j\xbb[S\xae\x81\xb2\xd6\xb5\xf9G)Qr\xe2]z\x10-\xe3\xae\xe3\xb6\x9b\xe0\xd0]jt\xb8\x0c\xfa\xc3G\xc5\xee\xf3.$$\xb7\xe755|Lus\xdd\xca\xf3\x00\x9d\xb2\xe2U\x9c\xd0%"\xddC\xce/&u\xdf\xf3\x9fL\xd8%Y^-\x19f\x9av\x87\xe0\x82\x1d\x84\xd6\xa0\x18\xc4~?x/\xc9HL#N\x83\x17)\xa5\xc3B<+\t\xfeh\xcf\xb6y\xf8\x9b\x83\x87t\x15\xb2F\xd2\xfc\xe5\xdf\xb30b\xe4\xd2-6\x84\r%8\xb5\xa0O\xeb\xaf\tV\xc80\xec*\xc1\xe2\x0fVKV\xeea\xd2\x0b\xfcb4|H\xdc\x99<\xf6\xcf\xe7t\x1d\x0f\xcf\xad\xab\xa4\xc1\x95\xdc\x87\x1cH?\x13ASY\xb2\x9d\xa3U\xd4n-\xbc\xf9\xfe|\x97Dx\xe4\x86f5\xd7\xf7\xaet\t,\x07b\xf6\x1e6y2\n\xa3\xf4\xc0\xa8\xbfH$0KTi\xc5QS1&\xf1\x18Ij\xc5;\xcb|\x1d\x8f\x18\xef4\x94\x08N#m\xac\x0et\xfd7\x8e^Q\xa2\x89KH6\xd7\x1eJ\xbex\x94\xef<\xf8}\x80^Av\x1bNZ\xb4o\xf0\xc85EO\xc8)x\x05\xc1\x1a\'f\xdat\xab\xe4T\x08:e\x7f\x0e\rr\x9aI\xc4J\xdb\x86BX&7F1\x1c0N\x1d\x8e\xd5e\xb0\xe6\xa8\x90\xb5\x0fJ\xe6_\x02\x9d\x08\xdb&\x14\xfa\xb3\xb9O&\xb3\x90mp>\xe3\xc5N\xc1\xa1J\xe4Ts\xf4au\xbb\r\n\xec\x12\xcd\x9e\xacG]\xb9\xee\xee\x08i\x99Ii%\x13\x944\x1c\x07\x95\x9a\xca\xb9\x83s\xcd\n-\xc7t@I\xccM\xf9\x88XU\xcd\x8f^\xf9\xdaV\xcf\x1b\xef\x0b\x19\xfb\xfcq\xf0#~\xf1\xa7r\xca\xb5X\x9dtN\xf7v\x86\x00\xeezN\xec\x82\x16\x88\xc9\x9d\x93\xf4\x7f\xcd\x8f\x08\xf2\xc7\xbc\xfe7\x87\x1c\xc3\xbf\x9d\xe8l\xf9I\x1e\xc1c`%\x98\xdbU\x10\xbb\x19\xe1\xddK;\x07\xd5\xe5\xbel+kl;\x7f\x1er4d\x0b\xde\xf4RDw\x7f\xa2Z\xec\xba\x9e\x07\xd2\xed\xd8\r\xdd\x91\x93jbh}P\x05z\x9f\xef\xbd\x91\xb9\xf3\xafP\xee\xc8Z^\xfc\xd3y\xdc\x02Z\x8dz\xfeo\xa1=\xf9\x1a=\x97i\xbf\xa3\xb23\xf6*\xf2\xf1\xb1\xfaAs_}%\xbef\xe2\xce\x8c\x02[\xff\x1e7A\xf2\x91\xa7\xa8\x85\x8b\x1d[\xe8i\xd7\xef\t\xe2\xe1?\xb4\x8d\x7f\xf4(\xa7W\x1azU\xc0\xec\xc5-\xe0\xac`\xf5\x82\xbf\xeb3\x94\xe1Z\x9e;\x15\x0e{\x96\x7f_fv\x12n\x8b(\xb8\xa8p/\x1ce\x80\x1f\x05h\xf5\xaeN\xd7\t4\x05\xdc\xf4$(\x16\xc2=c\x9ed\xaf*\xbf\x07\xa0is\xea\x94\xb6\x17\xc4w8Qa\xbe\x9d\xfd\x14+d)\x1c\xfbF\xe5\xfb\xfe\x92\xb8\xec\x1fu\xd2\x03\xc6HRj$Qj\xbb\xd6\x89\xbbj\xe5\xcf\xea\x07&\xd5)\x86uF&\xdd\xb32N\xac\x82\xa4\x05\xf6\xfa\x1e\x1f*2\xae\x90\x11\xba\ria\xfb\x8c\x7f?\xacd]\x1d.\xb6ML\r\xcf"\x16!\x15\xcf\x81\xe6\xf4P\x01y\xdd\xf7i\xf6\x03\x88\xc8@%J\xd9\x1c\xa4\xa5?\xfa\xb8p\xff\xdd\xc6:@\x17\xdf\xcd8\xc7\xed\xc7\xd0\xe7\xf7\xb85\xd5\x8f,\x12\xcc\xfeQ4h%H\x92\xcb\xd0\xba\xee<\x98\xcb\xb8\xa3l\xf9\xf0\x8dg\xd9\xdb\xe9\x024c\xc9\xef79v\x8a_}\xed\xf3\xb9\x80\x81x\x98\x1e{\xac\xb13w\xe3X\xa9 \xdc\x9d\xb1\x08\xbde\x81\xeaV\xdbi\xba0\xf8\xed\xe7r\xee\x89<}\x90\xbb\xe4\xe7\x1d\xb5\x9d\xca\'2\xdb\xf2\xcaBD\xb3\x83\xf3\x95\xa5\x82VO\x98\xc5\xbfR\xc6\xa1\xf0\x02\xaa\xfa\x14\x1e6pz\x0f\x1a\x19\xe6qSc\x9ete\x9dZ\xbb\xf1\xb7\xc6\x01\x0b+\xe5\x18>\xa9\x99\x16\xa6\x1a X\x0f\x8a\xefNS[\xc4?*b\xbf"R{\xd4\xaez| V\xbdM\xd9\xf6\xff\xc3\x01\xca&\x97\xe8\xdcW\xe3\x9eM\'\xac\xf5\xa5-\x9d\xa54w>*$e,\x05\xe5\x81\x11[\x8b\xac\x03\xcc\xa1 \xe4\xea\xbd\x92\xa2\x1e\xb0\x94\xc4\x9c\xf8\xb1\xa6\x10p\xbd_\xf7\x91^B\xa4\xaeHW\'\x02\xa7\xf6V%!\xa1\x90\x97\x96%\x08\xf4\x96T\xf3\xd8\xde[5\x05X-\xf9\xd6\xee\x8f\xcek\xaa\xc8\xa9\xe2C\xf9\x862\xf8z\xddr\xe7G7\x7f\x1d\\9,\t\xd6\xd8_\xfb\xa2q\xda\xcc\x102\xe8i#Z\xc8\xa7\x10\xa83r\xc8\x10\xa4\xbd\x1b\x02\xbb\xcf4-\xda>\xb2\xe0\xdf\x0f\xec0\xf8\x94\x1d\xc01\xc3\xe3&\x11?k\xbe\xd1PF6}\xf0\xe5\rQ\xcd\xceo?\x85\x10${\xd1^\xe6/#\xd5\x9f7\xc0\xac\x83@\xe0\x00\xe2\x82\x17-\xba\x12G+\x97\xe7\xb2c\xa7\xcbR\xfe\xa5\xb9\xeda\xda\xf9,=\xf6\x83n<\x154,\xfb\xc5j_\xf7L*?\xe9y\xd4\xf0U5.G\x9f\xdcs\xd3j\x8a\xf4\xfd\x88\x96,x}e\xc76RE\x1f]\xcbo\x1e\xa7Y\x1e6\x80KA/\x9e\x9d\x95K\x9c\x1b\x13H\xe5&-FRQ\xdb\xa2\xd8\x17\xca+\x92\x1d\xd3y\x81\xfbiR\xee=\xadM\xc5\x9a\xda\xa0\x17\x17\x91\xbe\xa7\xd7\x02\xbd\xe7o\x8f\xc9b\xd7\xeedD\xf6\xea)\xa6\xee\xc5\x8c-U\xb8\x04\xd0R\x83\xf3w\xce\x8372\x1d\xab#\xdc\xe3Y:\x05\x03\x8da\x84\xb2\rA\xee\xd1\xf0*Z\xdb\x1a\x9br\xc9RFbF\xc2M\xb8x\xd7%\x8f\xd3\xef8}y\xd2\x89{\xd6\x9d;\xc5\xde\xc0\xd7\xdc\xb0=\x95F\xeb\r+\xe3m\x9e\xf4\x91W\x9a\x1d\xf8\xcb\x10n\xa8EA\xe2\xa2\x1e\xd1\xc8\x9f\x8c\xd9\xa4\x98E\xe1\x8a\xc5\xabu\x17p\x10\xd4\x94\xda\xbeo\xd8\xad\x06\x0b\xabG}\x7f\xf2\xb6\x836X\xa6\xf32\xac\xa29\xfa\x10\xbc\xea\x18\\\x93\xbc\x92H.\x91\xe0\xef\xde\xff\\\xdc\x8f\xc83sm\x98\xe25\x7f\xb1:\xe2,\tU\xfd\x86[\x0e!N\xe7)\xff\xf8p\xae\xf99\x8d\x07\xbaaX\xec\x0b\xd5\x13\xfeXL\xad\xc0\xe5\xe9\x1c\'\xe3\x0b\x96w.1\x7f"\xcd\x06\xf4\xe0\x89\xbb\x19\x1cp_\xe1\xdaI\x11@\x92\xe52\xa2";\x1f|\r\x10\x16B\xd3k3\xbe(!Z"\xa9Sa\xa9\x11S\xfa\xb7F\xce\xac\xa61fwT=j\xdd\x84{h\xdf\xb6\xfc\xb7\x92hcGa\xb7\xeb\xc5X\x98\xcd\xb0\xd1Td\x1c\xf3\xb5G\xc9q\xd6$\xac\xea\xe3\xc3k\xf7\x96eM\xa1\xfe\xc6\xe49\xc7\xe2;\x9d\x95\xd0\xcf"\xf0,\xfbL\x88\xa3\xb1\xeb\xd6\xd1\xc7\x07\x8aE\x87s\x16\xa9,W\xfa\x1f\xda\xf7\xa5\xcc\xbb\xd4^\xd3\xc03\xec\x877\x9f\x8c\'i\xe01\xcd6\xef\x9c#\xcf\x8a\x9a\x9d,\tW-\xa7n\x0bO[\\\xd6\xb0\x86#\x94\x8c\xa3L\xaa\xfe\x85CA\xdf\xe1\xf6\x95i\x86\xd7q\xae\x83\'\xcb\xe9e*:\n&\xc8\xed\x07\xd2f\xc5\xf1O0\xf4\r\x0e7\ro\xa9Ck\xf1\xc3\xd5D\x8f\x9e@\xf2@\x81\xe5*\x8fa\xf4\xc9\xd2\xf6\xf4N\x8e\xcbgUN\xc6\xc2px\xd7I)\xf1\xb9\xb7\x1b\xb2H\xd8<\xbf\x9b\x0f(\x0e~j*YG\x7f"\xc7nw\xaepQfX1s=\xbb\xaf\xe9\x13q<\xab!\xe0,\xe9\x02\xf0dl\x97\x94N{\xc8\xd9\'\xe8\xf4@\x96\xf4\xae?\xf6\xbbVX\x15\xef|\xe6:\xbc\x8c5qKA\xb3\xec;V\xd0\xb6\x17\xe2=hs\xdeB\x8e\\\x89|\x8dk"\xb0\xe8\x85`\xdf\xd7\n\xe0\'\x8do\x15F"\xb9Z2\xbdQ\x1b\xa3\x04\x1c%\x02i\x18\x03\x0eW\xf7\xa3\x07\x8aF\xde\x92\'u\x11{\xe6\x06\xf6\xa3\xb8B\x1a~\x89\xc0\x80\xb73\x0b\xe2\xc3\xdb2\xa4i\xb7\xa3\xf3\xef[:m=\xe4\xa2`5\xed_\x1a\xf8\x83>\xf3$gh\x8aTw%\x15\xe3\xdb\x1e^\xb7\x16^\x9a\xff*:\xfc\xf8\nd[\x19\xec\xb1L\xa6=\x0cif\xf7|7\xb8\xf0\xe6\x04\xbfb\xde\xa2v\x8b\xbb\x1e\xb7\xff\xd9\x0cee\xb07f\xccQ\xd9\x9d&\x00\x86g\x97\xca\xa4<\x83\xf6g\xee\xbf\xfd\xbc\xe6\xfe&\x92L\x9b\xfe]\xc4\x8fM\xb5\xad\r\x88\xf5\xca\x9f\xe2Pg\xdb!q\x06\xa5\xc4{\xfb\x08\xce\x9e\x99\xa5\t\xf0\xe3\x96qY\x9av\xcdz\xac\xd7\xa3\x0c\x8cW2V\x81\xab\xa0\xc8\x17d\xc7\x9a\x16a6\xd5\x86)\xafa\'\xf2\x05:\xf2N\x03_\xbc\xf2W\x14\x10\xb6\x8b\x8b\xfc\xa8\xa7I\xd5\xbb\x8bIf\x02\xba\xd2\xc2G\x8b\xa3_1\xe7\xf2O\x0e\xd8I.<\xe0\xb4\x12\x19\x11\xdc\xe8L\x1dY|V\xdf9n\xddMK\xf3+\xda\xf6\xe5Rf\xf5T\x1a\xd6+\xab\x90\xf0\xec\x81\xd8\xbc\xb3\xc1fr\x0c\x1c\xb4\x06\x1c\xe5\x1cI\x87\x94\x05\xa53\x93\x14\xcd\x1f\xc7\xb6\xd6w\xce,\xdd\xcb\xcb\xdf\xac\xbc\xadqN=6\xedf\x06[\x98\xc9\xf0X\x0b\x1b\x1dG\x8d[\x19\xa8W\xf3U\x06\xc4\xcf\xd7\xdc^\x0b\xd8\x82~M\xd9(\x19\x7f\xc3\xa5t>[\xbdV\xb2\x147\xad9J\xcc\xcf\xc5\xd9\x163\xdf\xfb9:B\xc2\x84^ \xa8\x974\xf3\xd9\x0f|\xe4\x1d\x953vw\xe1\xe2\x97\\\xa6L\x93\x8c\xc8\x8d!\x13\x97U\xfap\x92P9\xf6J\xf7\xf4\xb5\xe7GR\xb9\xfe\xb8\x9bo\x84\x8c\xb7\'\xde\xc8\xdf\xa3gm<\xd1m\x8e\x8c\xc2\xbb\x00K\xce\xaf\xd7\xea\x1a6\x8ds\xd6w\x87m|\x9cs\x9e\xe5\xdc/k;a\xab\xcdR\xff\x11\xe9]\xf4FR\xcd\xdc\x95L\xe0\xf6\x1b\x8c\xb5D\xc4\xbe\x8f\xa0d\x1e\xfa\x0eO>\xa7E\xee\xe6+\n\xe2T\xbe,=\xb9\xc0:\xebs~\xb6A\xbe\x9a\xdc\xeb\xd84\xea\x9bU=u\x1a\xbdz\xf2\xbaynip\xbf\xbe\xa4\xb8\x8bo\x88\xd5\xb23u\xcd<s{\xde\xe6\x8aeOj\xe0t\x85j\xa9\x0fp\xc2\xdf\xf3ow\x87\x92\xfdR\xa3\xdc\x8e@|\x86\xc3\xae\x15\x94\xb3P\x18\xd3\x9f\x06h\xb0\\\xde\xf9\xba\xc2\x8c\xbfH7#\x9f\x852\xdd\x08\xd0\x98rS\xc7c\x9d\xec@\xb9\x00\xa3\xb99\xa8ni\x0f\xed\xe6\x10j\xf7\xc5\xd7n~\x98\t\xb3\x9d\x0e\xce\xe0\xd1\'\xbc"\xcb\xd0\x1f;*\'a\xdb\x83\x9a\xd5sR\xc4\x07QG\xe1\xb6\x92\xf1F6\xfb\xcf)\x9f;\xca\x1e\xd4\x1f\xb6\xcco\x98D\xaa\x88\x9d$M\xb9Xg\x8a\x07*\xb9\xd8\xd2N\xa3?\xe8\x1d\xc5\xbb\x915\xaa_\xda\x86mJ\xc8\xcb\xddB\x08DY\x04?\xec\x8f\n\xaaW]\xb2\xf9m\xbadJ\xbe\xea\xac\xaf\x14\x026\xbb\x8aYB&\x8d\rD\xf1\x14\x1c-\\\x06X\x95%\xc8@\x19\xec\xc1\xf0\xb1\xf7\xae\x03\xf8.\xa0f\xf0=n9lY4\xc5\xb2\xf3\x1c\x00dY9\x9f#\xcbA=\xed\xf8WpW\xa5s\xae\x9f\xb4\xfc\xe8\xc5\xfc\xd5\x0f\x0f\x82\xba\x99\x02U\x8bi\xc7\xff%y\x1d\xf0\x8d\xe9\x08\x96\xe1q*\xc4\xcd\x8f\x86\xc2n3\xdc\x8f\xc3\x8d\x8b}\x92\x17i\xfc;\x7f\x04\xe0\x81\xfd\x1dm\xaa)\xa9\xe7\x07\xc5\xb0\xb9+vP\x96\x99\x97\x83\xe3\xbb\xaco\xd2\xe2\xa2\xf5 \xf1\x99\xe2\xd5\xf5\x97\x96@uE\x81\xa4,n\xd4\x02\x13\xd8\t\x9f\xe4\xbe\x10ca\x10 \xbbh\xc0=\xbfH\xd77dn\x88\xa0\n\x1a$\x1f\xe8>\x89}\xdb\xddEy\xe2a\x16\x17N\\\x84\xa0\xc0\x8f\x03-\x8a\xd3\xa7\x92\x02v\xc6\x04\x9b\x82\xcf\xca\xd5e\x0e\x0c\x93\xe7\x88a\xfdt\x17L\x9e\x12\x04\x9a\xf2\x89\xce\xa7\x00y\x05\xae\xd8\x94\xffJnPo\x15\xfd"\xb2\xd2\xe3\xb1"\xe0\x93\xcd\xdf\x18y\xed\xf8\x96z\xc1\x9f\xb2\xc5f\xcaIH\x7f\xa7\x1f\xad\x89[}\x07p\x96\x1eP\xe0 Y}\xacVL\xc8s\xcf\x9f\xbd\xd0\xf7\xd7\x83\xf0\x90\x07\x0b\xa9\xc8\xd0\xb2,nNG\xb4\xa8}\x13\xb0\x9c-*\t\xeb\xaa"i\xd9\x9d=H[\xf9\xc4\xdbh\x99\xdc!\xaf6\xb3\xf1I\xfd7\x8c\x8e,\xc5\xe9\xe2g\xaa\x14<o//(#\xdf\x10\xb3\x8d\x0c\xefd\xb1\x07\xd8\x82\x80\xadWY\x17\r\x07\x1d\xf8\x85DJ\xc8\x86\xe6d\x812\xf7\x97\xd1+a\xd0\xa9\x0fo\xf8\xbb\x90\xae\xd8\xd9p\xffV\x03\xdcET\xd1\xe7\xdf\xdd\xef\xfa\xa3\xa7%\xf7\xf5[\xc0\xe2\xfe\x92\x1b<\xa0\xa6w\xb3\x10\x12\x15\xa4\xd4\x0e\xabk\x86\xff\x1aZZ1Z\xb2\xb5\xdd\xfb\x15,\xd0\xcb..\xea\xde\r\x99v\xfc%O!-\xc4\x0e\x19\x98e\x03\xda\xfe`z\xaa>\x02>s\xae\xc3\xfe\x84\xff\xccN\xee\xbf+m>\xe4}|\x85Y\x83vr\x9b>\x92\x1f\xe2J\rM\x844\xd94[\xd0\x84\xd1\xe7\xbe\xc6\x163\x03\xa7\xb2~K\xd1\x1d\x8a\x99\x9f\x02\xadJ\x95\xb5y9\x87\n_0\xd7\xf3\x82\x94\xc6o\x85\xd0\xbf\x18\xc89\xf6\x984`\x1b\xc2\xb3K\x03\x88\xf7$\x91\x87\x99Z\xbb\x08D\x8b\xb9\xa9\x13+\x05\x89\xbd\x02\x91\xe8}qa[\xfd\xc3\xa4l\xe3\xd8s\xcd\xa2\x80\xef\x96\xf0\x045\x84;\xaa\x14\x12\x14e\xc1\x16\x9c\xfc%xhw\xc5\x7f8\xd3J\xdb\xd4\xe4\xbb\xa6*<\xff\xbbF\xf8-\x98T\xdb\x9e\r\x98\xa3\xcec.\x957\x10\xe1IH\xdc\xf7\xf1\xb8"\xda\x84>\x1aV\xf5\xedO\xed\xb0\x1b\xcd$6\xfc\xea\x0e\x9b\xd5]\xc6Ye\xe7y\xfb\xcd\x08TR]\xde\'?*\xf7\x81\xf6D\x9b\xbfk4\x00q6\xf4)u=\xb9\xdd]\xf4*\xf3G\xff\xf3\x82\x90\xf7\xa7\xb73\xf7\xdbj\x92\xbd\xad#\xcbn\xa8\x038\xb5\x10\xacK\xc2\x1e\xc4\x01\x7fK\xe9\xedNJ-\xc6\xc8:\x1a\x17\xf4\xb7}\x17\x1b\xd0:5<\xf8\xf5n\xa0\xb2A,\xf1W\xa7\x9a2\xd4\x03ri\xc3\x99\xac[\xc4\xa9\xd2P\x06\xa3\xb5\x91R\xbfE\xca\xd7\x96\xf4#\xaa\xc0N\xdb\xcd\xd5\xe5U\xb8\x07\xe7Sy\x8c\x96\x99\xa2b\xeb!\xf2\xe4\xe7\xb5\x05\xe4\x0c\xddU\x0b\xb1:\xba\x17K7]r\x1d\xa5P\xecU\xf4\xee\x0eY\xde\xc1\xad\x9d\xfa\xa4\x8c!uxo\xc59\xce\x7f\x9f\xed\xb8\x07\x7fE5\xae\xe2P\xf2\x9b.\xe8\xa6o\xe9R\xc2F\xd2\x92[\x1blc\xa6I<L\xca\xdc\xfe\x0fK\x83:\xf8\xa2\xbb\xef!\x9b\xfb\xf3X"\x81\xd9\x87\xfe\xd4xk)\xbco\x0c\x1a\xcc,T\x0c\r\xc8\x13\x01\xf9\xb8\xa1Q\x9fR\xd5\xbc\xd6\x97/\xaa\x00\\\xcc\x07\xad\x0b\xab@\xe7\xf9B\xd3\xf4z\xc5J<4\xcd\x83\x0e#\xb4,\xb3q\xdb\xa2\xcd\x9d\n\x91\xe8"\xa8\x17\xf9A\xfb\xe9\x1bcW\x193\xbfv\x06\xc44\xbe\xdfQ\xf8\xc7\x96\xdfm\xf13.\xe8\x1a\xf9\x9c\r\xa7\xb8#\xa7\xba\xd6o\xd3X\x14\xfc\x8fJ7\xb7\xf3\xce\xadB\xdb\xff\xc2\xc3\xcc\xc5\x9d\xfaX\xed\xe8\xbbp(\xc6\x17D\x9b\x92\xeb\xb4\xfe\xa6p5|\xa9?\x19\xa8\x10\xac\x1d\x08\xa6\xf3\x14Y\'\xfcl\x165\xf3k{\xaa^\xb6\xbe,|<\xca\x94\x90q\x0cr\xe6\xde{MG\x07\xfc\xf4\x99fpR\xda\xec\x83\xe55\x0c\x06\x85B\xca\xad\x95\xff\x8cH\xbag\xe6t\xa1\xd7\xf9\x97\x06\xb3k\xafo\x9e\xcc\xee\xe1\xac\x80\xff\x95\xc6\x069\xaa\xc1\x06K\xe7sc\x05*\xdf\xfeF\xd4\xd8<\xc4\x8f\x8d\xf1\x0eY\xec\x8fQ\x0c\xaa\xd2U\x94%Mc\xe0_!BK\xc8a\x98\xbc\x89\xc4\x03W\x07\xbf\x11\xc6\xbd\xb9\x05\xf5\x1b5\x02\xa7S\xad\xeb\x9aS\x92*\x98\xd9\x1c\xb1\x8f\xa1\x03_\xc7\x17xI\x98\x1d8V\x9e|p\x85\xda\xdf\xa2\x9d\xbf\xd1\xb2\x1du\x178\x15\xd8\x06\xf0\x9apd\r\x845\xec.:\x16\xa55+5\x97\\\xac9M\xdc\xbf\xa20\xa9\xf4\x87O\xa2\x1fp\xbfeL\xaa\x10$]x2\x8b\xbfW\xd3F\xb6$]-\x16[\xc7\x17ah\xc8R\t\x07\xbf\x1b\x9bW\x9aC\xd17\xa0g\xd7\xc7\xc2\xbe\x0f\x08\xa6\xb4\x07\xed_\xe8\xcfY\x8d290\x9b\xf0\x8c#\x95\xdd\x1b7\xed\x00\x84\xec\x1e\xcc\xbc\xd8\xf6\xde\xb5\xe4\x9f\x1f\x8cc\xcd\xe6e_\xcfj\x9f\n~~\x8fA\xea\x1c\x1f\xa0\x04\xd1\xdc0/\xaf\xeb\x1agq7\xe63\xa4P\xb6\x07\xea\x9bk\x95\x0b\x90\xb3\xb2\x17\x11\xaa\xbd\xed\xd7\x85\x1d(\xber\xfb7o\xcd#\xf1\x9b\xf2\x90\xde\xbc/\x011\xac\xfbe\xe3\xd1\xf3\x99X#\x15P\xb2fm\x1bu\x81n_\xf3$\xc2\x00\x1a{l-\x02\xd8o\x15\xa0\xf6\xe81\xcdptV\xe1ZqCU\x83\xdax\xa2B\x03\xe7\xb7\xad\xba\xe9\xa6\x02\xb8H\xa0\xb6\x06\xb73#\xa1\x91;\x1b\xaf\xbc\xe6\xfcx\xf0\xcd\x8b2qV\xe3\x12b\x9b\xee\xb7\xd2W\xf9\xf2\xb0\x10xZqhU\xde\xb4I\x02\x1c\x9d\xeb\xcb^\x1b\x872ztXB\xd6D\x0f\xcb!\xc1\x1a*|\xdb\xc1j9\xf0Y\xf3\x93\xde\xa9Wvu\xa44~\x9d\tj\xcaa\x8e\x83\x93B\xb8\xdd\xf2\xd6;7\xcc\x1c\xa2b\xaa\xd2\xdff\x97\xf2U\x9f,ln\xefT<\xc9\xceH\x84\xa9~F\xbc\x91\x9b%\xc8i\xa8{K\x16K\xba5\xad\xf0\x9b\xae0\xe2=\x18\xc7\xdf\xfb\xeaPV\xcd5\x88\xe6\x1d\xe8~\xfb\xc7m\xff]\xfd\xa2\xc2xB\x7f\xc1\x92z}\x00\xabi\x02\xae\x7f\xf4\xc3J\x97\xb8\x9dZD\xec\xd6\xcb;\x1d\x07\xb3\xbc\xdb=\x9bI\xed\x99\x89\'\x99\xa5s\x8c\xf4e5\xd2\t6D^lw\xb5\x88^.\xac9I\xb5\xa5\xa4@\x015\xdf\x9f9f\xf1\\\xa1\xd7{K\xfb\xef{\x9e\x93\xdd3\xd9Z\xbb:\xad\xf5\x05\xa5\xdf\xc1\xceQ\x06\xab\xe3b1G\xe7\xe6\x05\xde\xb79\x0ck\xbe\xe6\xc3\x00\xf5\x103\x01\xd8\xbc\x94\x82y\xd9}@\x82\xd7]s{\xcb\xcc\xd8\x9f\x83\x88\xaf|\xa1\x07\xf9\x10\x11\xdb \x8dO\x1c\x15+\xaa?V>\xe3\x87y\x84\xb2\xd2:\xdc\x90D\x00\xc2\x8cG\xed\xbbol\x046\xe8\\\xb3\xeb\xf6\xd7K\xfb\xafGHs\xdbWi[\xc2#,e\xa2]s;${\xc4?\xe7\xe26\x95RI(\xc8O\x98\xe5X\x90\x06;\x1fX\x0fD\x1dC\x02\xcf\x18U\xf0I\x0c\xdb\x97\x9fT\xd9X\xd4\xcd\xec_\xdb\x108\x8b\xb7\xbe!*!\xb3?\'\ttL\xbf\nrE{\xb4\xce$(4o/bJ\x80Z\xce\x97\xe5\xac\x10%\x9a\xa2\x8fDWxp\xf9\xf6?u\xcb\x1d\x86\xebP\x11\xf7\xbf|R\x11\xdd\xebI\xce\xe0\x7f"\xa7\xd1\xfb\xceZ\r\x84\xae\xfd\xa3{\xdb\x88\x175\xfbrP\xb6\xbdmcYMiK\x93\xf2\xb4\x87\xfdg\x8c4T\x86\x9a\xf1\x1cy\xe5\x8f\rjhH\xc5~\xcd_m/\xac\x89(\x90\x1b\xf3\xb8|\xd2\xe6\x9c\xe1.\x9fU\x02r\xebG\xdakF\xd8\xafJ\xba\x80\xdc\x84\xe5\x87\x7f\xf9\xf5FIA1\x8aO\x9a\x99\xbf\xde\x0c\x96\x04Tx\x13@\xf5A>@\xcb+&\x0b@}#\x7f\xfc\xcf\xda2\x0bJe\xf7.Cz\xc1\xf2I)U\xf1\xe0\xb0\xd8V\x05b\xbb\xed\x0b\xae*\x00-\xa6\x81\xd5\xb1]O\xe5\xc1\x83\xf3\xe1\xf9\xf3\xf1\xc7\xe9Y\x9c\xfd#\x0e\\:/\xdf_<\xc9\xc7\xc9z\x95\xff\xad\xc6|!k\x03K==7f:\xfb\xd5\xa0"\r\xe4p\x8c\xb1I-\xabzQuI\xe8\x05*\xfd\xaff\xec\xf5\xbc\xebra{\x16u\xfc\xba\r\xca\x1bR&\xa7\xe9H%\xd5K\xde\xb3\x89\xd1\'}Y\x82\xbbN\x04\x80}IN\xedz)\x9dc\x8b\xe7\x05\xc0\x88\x0e\x95\xbdBt\xc3e\x068\xbdJ\xc4\x0e\x8a\x05v\xbb\x97~\x0fZ\x10\x9a/y\x00\xf2\x1f\xbaUE\xdf}\x11\\u\x8a\xfb\xb3z\xb0\xf7%\x84\xe5\x89\x8dB\xed4\xbd\x7f,U(q\xba@\xd4\xf2\x97b\xc0\x18\x8fij\xa0\xe6\xd2\xec\x05x\xbe4]~\x07\x9dH\xbc^\xd1w=\xf8CnDF\xb5\xd9\xda\x8dq\xc8\xe1\xbfT\x1da\xc9\xb5S\x8e\x8d\xf883i\xd6\xa2\xa3\x12\xcfB\xff\'\x85\xa6\xd9#\x82U6#\x08\xac\x18\xde\x9f\xdc\xbb%\xbd\xc0\'\x05x\x94\xae\xaf\x1cL\xb0\xe9\x99Q\x1d\xf3\xbc\xe7H\xd3\x8epS\xd28X\xa0gV_\x12/]\x9e\x99\x15\xdb\x18\xf8+z\x85\x90\xb5S \xe9\x07$v[g\xfa\xd7\xdb\xf0\x0eiv\xa6\xcaR\x92\xb2\xd7\xa3\x17\xb5\x8d\xb2\x05J\x93V\x94\x03\xfc\x8c\xd3\x93f\x1d\x9f\xa1\x9b8]\xe6\xd5~J\xdc\xf4\xf78\'\xcbs\x10\xb7\x8f\xdc)\xdayK>\xf7\xce\xdfjj\xe8\x11\xadHi\r\xa3\x1e\x9f\xe2\x1dY\xec\xe5_V\xc0 98\x9eA\x88{\x9f C\xaf\x93\xc0\xc2\xbb/\xc8.@\x1dw5jJS\xa6<\x91r\xf1\xc6\xbb\xbd\xaa\xad\xd8W\xdcB\xb5\x9b\xfa\n\x99\xa9e?\x1a\xe65\xe4\xe9\xd3\xc8l}\x91}\xa3\xd8\xe9e/\xb8\xb1\xdd\xcb\xd5\xed\xce\xbe\xbc,b\xff\x8f^>i\xa8I\\_\x16\xac\x90\xe7\xcc\x94\x89!l6\x80SQ\xf5\xdb\xe4\xe0y\xc4>T\x91\xfeE\xcdB\xb4\xc1M4\x87\xa8{t\xf9\xcfyRE\xb2\x81,\xf7\xeaw~\xf4\xcc\xd0\xbc<\xf9\xab\xa4\xe7\xe5\x0b\xb42i\xef\xaf\xe5\xba\x8bT\x95&\x0c~\x91\x98\xcc\x0el\xffF\xac\xb84{\xba}\x979<\xc33\xf3th\xdb\xda\xb3\x80w\xc2hn\xed\xbc\xe3\x8b\x81zD7\xde\x08+\\\xfa\xc4{y\x9d\xbc)\xae\x94\xcdcx\xf2n[\x03P_\xd7-9\xb4]e\xef\x8e\xfeV?\r\xea\x81\xf4\xa5\x16\x0c\xa6Qo\xd0\xbch\xd8\xb0\x8a\x8a\xd8\xbeDH\xed\xb1\x05\x88#WT\xc7c\tz.\xa3$\x05k/\xfeD\xe46\x86\'\x81\xf5\x84\xfc|\xe4Qkx\x01\xc4\x03\xb6\xf1\x85\xae\x08\x00)\xee\xcb\x8e|\xa9\xd3.\xae\xeaN\xbf\x84\tl3W\xb7K\x9e\xa7<[\xf3\xd2_v\xa8\xab\'\xfc\xdb\x9c\xe5\xda+\xa0\xf8\xac\xf2\xcf\xf7`\x12\xcfgM\xd53\xaee\xfd\xb763\xc1\xe1\xb2\x82\xd7\x1b\xb2\xdf\x856E\xef(\xec\x8f\xd5\xf2\xbb,{D\x0c\xed\x14_\xb0,\xd1\x0e\xa9\xaf\xbf`G\xb5\x1c\x06|\xde\x98\n+\xc5\xa7}\x8c~\xb1\xc5\xbfm\xb0W\xeby)\xae\xfb\x19\xb9\xeb\r1`\xf7\xe1\xe9\xd8\x8c\xaa\x08\xd8\xf5\xff2\xa6\x14\x81\xbd\x80[\x83@\xabF\xae\xaa2\xf8\xea\xb6k \xa1\xea\xf9\xcf\r\x8b\x8e?\x90?)\x1d\x1a\x19\xe9)\x0bm4D\x9e%\x87\xe4\xf3\xeaWR{\xae\xa1\x1b\xd4\x7f\xd0/\xaag%\xb2\x1b\xcd\xb1\xa6\xdbZ\x96\xa0Q\xc4,\x01\xb2\x82n%\x7f\xb5\xaa\x8c\x08\xba\xf5\xaa1\xa7Ru\x82\xea\xdb\xd8\xb7\xa7\x86)R\xe1H\xb3\xae\xceOD\xb50\x9a\x83T\x89k\xb7\xba\x10c\\f\xd9>\xbc\xfd\xea\xf6\x08\xf5\x0b\x85 u\x10\x8f\x89\x06\x13\x87\x954\x98\xf0\x7f\xe9\xe8\n\x1caF\x9f)\x11\xd7o\xa5\xbe\x0c\xeb\xbc\\\xbb=\x1a9R\xf770\xbdo=3\x19I\x9c\xc3B\xc6\xb9y\xfa\x84<\xe9K\xa9\x019\xb6)\x059\'\xc6\x1e\xca\xc3\xe6m\x94Y\xea5z\x80&\xf3\xe8\x89\xcb\xd5\xc5\xc68I\xeb\x8bl\x19\x05\xb9\xb0\x99{|\xbf^\r\x867\x0c\xc5\x1f\xf6\x147t\xec\xe4\xa4\xebW\xcf\xc1>\x8a\xa6\xebT\xbd\x9bS\xfe~\xf8\xb7^\xc1\xaf\xfe\xb8\x94\xf5\xbcJ\xd3z\x1e7\xb5\xf3\x92\xa7\xd1d\xd7\xc9RNwc\x00\xbe\t\xb8\'bF\x9aQ\xe5\xd8\x89\x9c\x98\xcfT\xd4\xd8l_\xc4\x0cH-\xa4j\x9a\xb9\x863\xfc\xff\xe5\xaf\xfd\xf43R\\\xf9{\xdd8\x03\xb3A\x8c\x174\xd9D\xeb\x91\x8d\x8c\xeaz\x91_A*e\xbft\xd9\x0f\x87\xf7\x1b/4\xb2Q6\x94:*`\xed\x15\xc9l\x8c\x0bE\x83\xe3\xc0y\xef\xe3\x9c\x00\xe8\xe8>\x86\xe0\xff\xc5\x7fx\xa0k\xe1]\x9c\xe4\x83T\x8e\x88\x9fEM\xd5\x9d\xc9\x80P\xaa\xafcA\xaa\xdc\xc8\xaf\x86ap?\xd4H\xfd\xe3h\xecu\xf9\xa0}4\xfc\x8e\x88X\xc5\xce\xad\xf4C\xfa\xc7\x020\xb4\xe9\xd5\xcb\xe1G%\x8eJ&\xbf\xb0\xf1\x016\xcf\xe19\x83\x89\x05\x14\x01_1\xc0o6\xe7\x06I9\xa8Fy\xf8j\xe0\xe4R\xc7y\xa0Dd\xe8\xb8a[\xc9{\tp\xbb&\xf3\x14\xf93%G\xa7\xdb\x004W\xbaM1\'\x86\xde\xb3\n\xfb/\xd0\xa1\xff\x99yT\x1f\xc8r\xf7\x94\x17\xfdq\xdeo\x16\xb5}\x0e\x14\xb9\x1b\x9e\x15^\xac\xc9\xd9y)-Ju%L\x04Aje\xdd7EFv\xee\xdf\x81\xda\xc7\xfe\x88\x0eK\xdd$|\xbe\xdd\xef#;\x0e]a\xb2\x00.\x01\xb4l\xe4\xfcA\xc9\xfc\xad##\x7fB\xc9`T%\x01\x9d\x18\xd4\xf8V\xf8\xe4\xeb\xe1w=`U\xaa\x080\xe1\x1f\'Z\xaa\x83\x9am\x07sl\x81\x10\x7f\x8d\xa3+<y\x15\xe5\x1c\x1c\xeawU\x91iF\xba\x0e\xf0\xc1\xed\xb5o\x7f"\x972\xf4\x11"\xab\x03yg\x91\xf4Jh\x8ayX\x0f\xde\x11\x92\xe0"?\xe5\x96T\xa0\xf3\x92\x9f\xe5\x13/\x15o2tv~\xf9G\xa0BE)xdu\xb2\xf8u\xe3\xc8$\x04\x8e \t]\xf3\x1f\x80A\x95\xf5\x08.\x10\x1bqQ\xe2\x1f\x17\x8d\xac\x15\xe8\x19\xca\x8d\x1a\xf3\xd6\x95\x0c\x13\x88\xf9.\xd1\xeaRe\xbeC\xb4\xcei\x1f\xdd\x03t*\xf1y\x0cqe\xffm\xfb\xd3\xc6\xccT\xe9br\xaduI\x9et\xf3Q\x07Zo\xd0\xa3E\xd5\x8d\xec\\:\xe2\xee\n\xbe;\x14G\xf9\xab \x7f\x12\xe2\x93\x01\x81\xed\x1c7V\x1c\xa7`\xa0RB\xf3\xe1\x86\x8fV\xd4\xc3\xa8\xff\xa1\r\xc7\x9fe%[\xf1\xe4=\x0e\x88\xb6\\l\x96\xe9j\x10\xad\xf1\xc8\xf7zf\xe8%q\xe3\x0b\xedXa\xcfL\xb9d\xfe\xd5k!\xe9%\x8b\x06>\xec\x84.6}:\xf1a_\xb8\xcd\x08\xbb\x0f;`\xedn.\xe5\x89\x9b\xda|;\xf3q\xe3\xc2\x0e\xc5-J\xd9\xf8\x96V\x1f\xa6o_\x06P\xee\xf7\x89\xdc\xbe*\x92\xdd\xa9\x16\x9e\x87\xee\xff$\x98\xf5\x8eR\xcc\xc6X\xa7\x0f\x84\xbe\xf2v\xbb:\xd6Xq\xf2\\\x1a\x03J\x80\x1a\xca\xc89rW\x9d\x86\xcc\xf9AAcF\xb0v\xd5x2o\x15\xe7\x9e{\xef\x88\xc7\xe6\xf6E\xb2\xaf(\xf7~LrV\x01\xd6\xf7\xae\x00^c\x88\xf2\x02\xa7\x11|kT\x89\xe2/\x97f\xd7\x9a\xb1\xa2@\x89\xba\x99]\xd4!\x14\x19`\xfc\x0c\xae\xe02i\xeb8\xcb\x1f<\x18\x95\xca!\xef\xcd\x1e+\x15\xb0\x1488;\x99\x91&\xb4\x1f\xbc\xf3#\x11`\x9c\xe3\xebn\x08_*\x99\x10\xee`\x0c\xbc\x8bA\xbf^\xf6\xd9\xd7\x02\x7f.\xc6\xe8\xef.0\x06\x19\xa7\x02^\xbbS\xfe+\xeet \xb2\xb4\rw?A^\xb6\x9bRL\x99\xab\\\xd3\x81\xf1\x1e\x07)L\xff\x16I\x13GpQ\x98\xc4\x18\\\x8b\xf0\x9aj_\x12X\xcc\x15\xa6p\xb0l\xfb\xeb\x08D=\xdd\xbe\x16\xe2\xe7\xb0+\xce\xbc\x0b\xc8;V\x93\x9f\xf7\x9aC\xfc]\x022O9J2M\x02\xde\xe2r\xa5\x8f\x13\xec#\x0fmP\xcb\xfc\xda\x98\xe2\x9d\xb7!\x86\xf2\xa2\x1d\xde\xa6\xa9\x18v\xd2\x13\xd8\x99t2\xee\xbcC\xcdtq|\xf5\xc3!;\xef;b\x96\x9c\xfbQ]\xd6\xdd\xe4N\xe0\xc3\x94\xfbe\xb1\x9a\xa8&o\x17nT\xf8\xe2\xd5\x7f\xa1Dd\x98\x8e\xb7\xc8\x9b\xf4\x89\x85\xc0\xfe;\xab\xa9\xe6N\x98rIYm\xd2\xedIA\xe5Z\xb4$|\n\xdc\x96j\xbd\xb5\xd8~%\x90\x8a\xc1\xea\xec;\x15\xaf\x83B\x9b\x82\xb3\x9b\xb6\x18\xa0\xbd\xb0\x9e\xd6\x9bg\xa9\x86\xf4pB|\x1c_\xd2D0|R\x0b\x99y>v\xe5Z\xec\xd8O|l\xa9\xce\x8c\xa6\x02\x11\x85\xa6w[vZ\xdc\xceR.\x82d,\x89\x94\x86\xc6\x0eS\xf7\x9c\x1c\xe8\xb2\x15\x9e\x9b\x8fs\x10\xb7S\xca\x7f\x89\xde\xa9\x03\xa4!\xdc1\xfc.o\xfbN-\xa2\x97\xf1\xec.s\xb4K\xdb\xfd]\x0e\xf2\xdd\xc5_\x86>3/4\xcb)\xb16\x15\xe78A\xcc\xfe\xb5\xec\xcc\xd0\x0f\n\xd4\xd2\x9a4\x88\nW\\6q0`\xe7\xac9\xf1\x97Um\x10\x869\xcb\xd44\xda\xdf.\x99\x0b<\xd0G\tA\xda\xc7R:\x16pM\x1f\x95[\n\x01\x0f#\x95^e^\xde\xc4+\xee\xf6z\xd4\x9f\xf0\xda\xaf$\xa1\xf1\xdf\xb2\xfc\x87\xf0\xd20+\xa5\xb2\x87\x05k\x06.lM\xdc\x9e\xcb\x1ad\xf9\xcdMS\x9eT\x99\xd7l^\x1fl\x0c\xb5\x81\xffy\x9d\x10\xe63\x7f\x85oT\x93\x1fa\xc2\xb6\xf9\'\xb9\x11\x8e\xb5\xf8\x87\x1f.\xf1%\x9c\xff\x82\x12\xb8\xc1\xdb\x01\xc0]\x02^\xc5\x04\xa2\xe7?SP\xe3\x9d\xb9\xb6\x91\x02\xc2\xf5\x0cT\x07\xb9R\xb7\n\xbc\xb1{\xbbT\x8f Q\xd2\x10\x81\xbe\xac\xdf\xf0\xcb<,\xaa\xc2\x81#kr\xfc\xf5\xbe\xc8\xbcY\x91lOb\x07\xf3\xf1\xf1\x93V\x08\xf9Pfv\xc1\x9a\xea\x88\xe1\xa2x\xfce\x0f\x94\n\xab\xf0t\x0bg*6>^;T\xd8\xe7uJ\xbf\xf2\x9fHM\x7f\x8e\xfa\xaa\x1dZ3\x88s\xa6\xe9\x0c\x8bA\xd4\x1br\xa3\xb3^e\x8b\xd5:\xdf\x83\xde\xb2s\xe6$\x0cV\xdb\x98\x1f\xd7o]\x7f\xbd\xa4\x86\x16\x95\x81t\xaa\xceVAg\x1e*\x17\xb9\xba\x84-XF\xbe\xcd3\xc5\x87\xea\xf3\x944\x1c\xfb9\x11CD\xc6\xaan\xe1\r\xe3W\xe8\xa6C\x89\x9c\x89\x16\x88\xea\xff\x8e\xd74\xde\xf5)~\xc1}\xb7\xb4\xaf\x18\xc4<P\xcb=\x9bQ?\xf23\xe0m\x15\xfd\x89lY\x00\x19\x93\xf9\x90\xea\xad\xcc*\nr\x8f\xa7\x1fjT\x01p\xd0?\x8a\xa9\xfd\xa1/\xfcO\x80n_\x1bp\x1f"\x80\n\xefJ\xd9\x0cR\xfd\xf5\xd6l}\xa9\x9b\x9bq\xf0\xd9*\x81o0\xe9\x00\x90\xb2\xe4\xae~2}Q\xc3^\xd3AF\xe07\xdb\x9e\x04v\xfc)ha,IR\\}(G\x9bN\xa7\x18e\x16^V X\'\x7f\xb4\xd6\x8a~\xfa\x13\xa0\xb4\xe0~\x95\xbe\x85\xb74R\x13\xa3K\xdb\x9c\x14B\xaexS\xd7+\xfa\xcb2\xad*x\x9d\\T5\x96\xe3\x80\x0fX\xcah\x8dO\x0erC\x16\xd9\xa3\x88\x17\x01#y\xe2{\xd5\xaf=\x07\x13\xc5\xa8\xed}r\x80\xe7-\x9e\xe8\x1a\x89\x85\xc2O\xcds\x0c\x99p.\x1d\xaaK>\x98\xc3%\xea^H\xd9\xdc^[\xd43\xaag\x12\xd5O\xaa5sI\xe8!^\x8c\xcd)c\xc3\x80\xf7hn\xe1\xd2\xe4\x82\xf6 \xf7\xea\xbdK\x04\x8b\x94\xd1\x19O\xa4\xab;9\r\xfc\x07\xec\x11\x8a\xda\xd6Y\xc0\xd2\xd7\xd5\xc2\xf1\'\x8f\xe6\xef\xe6\xe7\xa7\x064\xea\x97\xcbB\xe5\xa9\xd9\x1dV\x0cyue\xcf!\xae\x9e\xf4!"\xd7\xa3.u\x04[\xa6\xe6k9\xa3R\x93X\xa8\xc5R`\xab9\x99\x0b_\xa8\x0c\n\x96s,\xe0\x18d\xceDq\x0e6\xbe\xe4j\x8e\xb4z\xc0M\xfd\xfa\x11\'\xcb\x18\xd4^\xbe\xbd\xa6\x8d*0B\x1c\xe7\x80\xc3\x08\xcd\x1fx\x18\x13\xb2\x9e\xd9+~\xa5q\xa2\tD\x00\xfcB\x84\xecv\xcc7\xe8s\xa8\x15h!\x1cb5\xfb5\x02\x95\x88xj\xb5\x9d\xeb{eW%+\xe4\xe1L\xf3L\x84$\xef\x10\xf6\xaf+\xacr\xcc"\xbb\xcc\xb3En\x9a^\xf0\x19\xe8\xea\x9b\xdc5e\x92\xb9\x1f\x17\xdb\xa5\xeaWV\x7f\xa6Sw\xd9\x00\xd3\xde[\xd0v\xffs6\x1b\xf8`@\xea\x0c\xfc\xd9\xb1\xed\x99\xad\xf1\xe8@\x1bo\x8fy!ye\x01\xe9\xda\xf6cR\x9c\xebZJ\xf7\xc6J"B\xe4\x98\x92\x8d\x08J3efN\xb4#\x0e\xab\x86\n\xf0\xbf\xfa\x19Kn\xb7\xe69\x10\x1a\\\xd2t\x05\xe1\xca\x86\xc0$\xaa`\xe6\x11H\x93\x13\xae\xf9\xef\xe4\x90\xb8H\xa7\x9f\x1c\xd1\x0f\x98e\xf8\xac\xad\xech\xa3Zs9\r\x8d\xc6\r&\x93\x88\x83"\xfd\x10\x05\xfe\xbb\xe0{\xb0u\xd0\xf1\x96\xf5M\x95\xae\x13\xa6\xeb\xb8l\xb9h\x82\xba\xc0lz\xc1!\xfa\xe8\x80\xd9\xcc\xda\xf2@\xa8\x1d\x853\x8dX\x07\x14\x05D\xf1\xa5\x93m8k\xff\xe0\xf0\xc8\xcd\xf3\xa0\x8c\x902\x7f\xb6a\x8d\x04\xa3\x1f\xf9\xd3\xd3\x9bQ\'\x87\xae\xc7\xf8\x93\xd9`\r\xc4\x11yQ3\x1d\x84%7\x80&(\'J\'\x04\nh\x0eq\xcc\x04\x1ds?\x15\n&\xdd\xfd\xef\xd4\xa9S>\x1c\xff\x037[o\xc8G[W\x9f\xff\x1d@]\x9d\xec|\xd8\xff\'\xb8z\xd8~\x80\xf8\\\xf8\xdfVw\x07{\x0f7Oo\x07\x08\xe4\xee\xff6\xbe\xff\x7f\x80\xfc\xef\xc1\x13\x88\x83\xa7\xcf\xb9\xff\t\xean\x1e\x1f|]\x1d4N\xff\xef\xe6\xcc\xff\xe0\xc6\xe9\xbbg\xfe\xef\x17\xff\xaf\xf9\xffV\x9d\xfd_\xc3\xff\xba4\xfe\x7f\xb2\xf3\xff\x8f\xec\xe2\xe9\xff\x03\\\x9fN\xc5(\x04\x00\x00\x00t\x07\x00\x00\x00marshalt\x04\x00\x00\x00zlibt\x05\x00\x00\x00loadst\n\x00\x00\x00decompress(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<Asep>t\x08\x00\x00\x00<module>\t\x00\x00\x00s\x02\x00\x00\x00\x18\x01"\xe1\x85s'))')
12,529.5
124,973
0.997693
158
125,295
791.177215
0.987342
0
0
0
0
0
0
0
0
0
0
0.117239
0.000303
125,295
10
124,973
12,529.5
0.880757
0.002251
0
0
0
0.5
0.999552
0.999552
0
1
0
0
0
0
null
null
0
0.5
null
null
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
1
1
1
null
1
0
0
0
1
0
0
0
1
0
0
0
0
9
935b454ebdf8f56cbc288cb919bff80662eac926
6,732
py
Python
test/legacy/test_nova.py
mitsuhiko/nameko
6dccde0aec58a1c85efffcb1bdd38aca488c8874
[ "Apache-2.0" ]
3
2015-06-15T15:19:00.000Z
2017-09-04T07:43:40.000Z
test/legacy/test_nova.py
mitsuhiko/nameko
6dccde0aec58a1c85efffcb1bdd38aca488c8874
[ "Apache-2.0" ]
null
null
null
test/legacy/test_nova.py
mitsuhiko/nameko
6dccde0aec58a1c85efffcb1bdd38aca488c8874
[ "Apache-2.0" ]
null
null
null
import eventlet from eventlet.event import Event from kombu import Producer import mock import pytest from nameko.exceptions import RemoteError, UnknownService from nameko.legacy import context from nameko.legacy import nova from nameko.legacy.consuming import queue_iterator from nameko.legacy.responses import ifirst from nameko.testing.utils import assert_stops_raising def test_delegation_to_send_rpc(): conn = 'connection' ctx = 'context' topic = 'topic' method = 'foobar' args = 'args' msg = dict(method=method, args=args) timeout = 123 exchange = 'spam_exchange' options = {'CONTROL_EXCHANGE': exchange} with mock.patch('nameko.legacy.nova.send_rpc', autospec=True) as send_rpc: nova.call( connection=conn, context=ctx, topic=topic, msg=msg, timeout=timeout, options=options) send_rpc.assert_called_with( conn, context=ctx, exchange=exchange, topic=topic, method=method, args=args, timeout=timeout) def test_delegation_to_send_rpc_default_exchange(): conn = 'connection' ctx = 'context' topic = 'topic' method = 'foobar' args = 'args' msg = dict(method=method, args=args) timeout = 123 exchange = 'rpc' with mock.patch('nameko.legacy.nova.send_rpc', autospec=True) as send_rpc: nova.call( connection=conn, context=ctx, topic=topic, msg=msg, timeout=timeout) send_rpc.assert_called_with( conn, context=ctx, exchange=exchange, topic=topic, method=method, args=args, timeout=timeout) def test_send_rpc(get_connection): queue_declared = Event() def response_greenthread(): with get_connection() as conn: with conn.channel() as chan: queue = nova.get_topic_queue( 'test_rpc', 'test', channel=chan) queue.declare() queue_declared.send(True) msg = ifirst(queue_iterator(queue, no_ack=True, timeout=2)) msgid, _, _, args = nova.parse_message(msg.payload) exchange = nova.get_reply_exchange(msgid) producer = Producer(chan, exchange=exchange, routing_key=msgid) msg = {'result': args, 'failure': None, 'ending': False} producer.publish(msg) msg = {'result': None, 'failure': None, 'ending': True} producer.publish(msg) g = eventlet.spawn_n(response_greenthread) eventlet.sleep(0) with get_connection() as conn: ctx = context.get_admin_context() queue_declared.wait() resp = nova.send_rpc( conn, context=ctx, exchange='test_rpc', topic='test', method='test_method', args={'foo': 'bar', }, timeout=3) assert resp == {'foo': 'bar', } def check_greenthread_dead(): assert not g assert_stops_raising(check_greenthread_dead) def test_send_rpc_unknown_service(get_connection): with get_connection() as conn: ctx = context.get_admin_context() with pytest.raises(UnknownService): nova.send_rpc( conn, context=ctx, exchange='test_rpc', topic='test', method='test_method', args={'foo': 'bar', }, timeout=3) def test_send_rpc_errors(get_connection): queue_declared = Event() def response_greenthread(): with get_connection() as conn: with conn.channel() as chan: queue = nova.get_topic_queue( 'test_rpc', 'test', channel=chan) queue.declare() queue_declared.send(True) msg = ifirst(queue_iterator(queue, no_ack=True, timeout=2)) msgid, _, _, _ = nova.parse_message(msg.payload) exchange = nova.get_reply_exchange(msgid) producer = Producer(chan, exchange=exchange, routing_key=msgid) exc = Exception('error') failure = (type(exc).__name__, str(exc)) msg = {'result': None, 'failure': failure, 'ending': False} producer.publish(msg) msg = {'result': None, 'failure': None, 'ending': True} producer.publish(msg) g = eventlet.spawn_n(response_greenthread) eventlet.sleep(0) with get_connection() as conn: ctx = context.get_admin_context() with pytest.raises(RemoteError): queue_declared.wait() nova.send_rpc( conn, context=ctx, exchange='test_rpc', topic='test', method='test_method', args={'foo': 'bar', }, timeout=3) def check_greenthread_dead(): assert not g assert_stops_raising(check_greenthread_dead) def test_send_rpc_multi_message_reply_ignores_all_but_last(get_connection): queue_declared = Event() def response_greenthread(): with get_connection() as conn: with conn.channel() as chan: queue = nova.get_topic_queue( 'test_rpc', 'test', channel=chan) queue.declare() queue_declared.send(True) msg = ifirst(queue_iterator(queue, no_ack=True, timeout=2)) msgid, _, _, args = nova.parse_message(msg.payload) exchange = nova.get_reply_exchange(msgid) producer = Producer(chan, exchange=exchange, routing_key=msgid) for _ in range(3): msg = dict( result='should ignore this message', failure=None, ending=False) producer.publish(msg) eventlet.sleep(0.1) msg = dict(result=args, failure=None, ending=False) producer.publish(msg) msg = dict(result=None, failure=None, ending=True) producer.publish(msg) g = eventlet.spawn_n(response_greenthread) eventlet.sleep() with get_connection() as conn: ctx = context.get_admin_context() queue_declared.wait() resp = nova.send_rpc( conn, context=ctx, exchange='test_rpc', topic='test', method='test_method', args={'spam': 'shrub', }, timeout=3) assert resp == {'spam': 'shrub', } eventlet.sleep() def check_greenthread_dead(): assert not g assert_stops_raising(check_greenthread_dead)
30.6
79
0.575015
724
6,732
5.140884
0.157459
0.030091
0.030091
0.035733
0.803869
0.803869
0.791779
0.781032
0.781032
0.781032
0
0.003953
0.323529
6,732
219
80
30.739726
0.813351
0
0
0.769231
0
0
0.063428
0.008021
0
0
0
0
0.065089
1
0.071006
false
0
0.065089
0
0.136095
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
fa7dd712fb7dc2a527fa4f5c2ead57749a5b9a10
3,636
py
Python
rdmo/domain/tests/test_validator_parent.py
berkerY/rdmo
c0500f9b6caff9106a254a05e0d0e8018fc8db28
[ "Apache-2.0" ]
77
2016-08-09T11:40:20.000Z
2022-03-06T11:03:26.000Z
rdmo/domain/tests/test_validator_parent.py
MSpenger/rdmo
c0500f9b6caff9106a254a05e0d0e8018fc8db28
[ "Apache-2.0" ]
377
2016-07-01T13:59:36.000Z
2022-03-30T13:53:19.000Z
rdmo/domain/tests/test_validator_parent.py
MSpenger/rdmo
c0500f9b6caff9106a254a05e0d0e8018fc8db28
[ "Apache-2.0" ]
47
2016-06-23T11:32:19.000Z
2022-03-01T11:34:37.000Z
import pytest from django.conf import settings from django.core.exceptions import ValidationError from rest_framework.exceptions import \ ValidationError as RestFameworkValidationError from ..models import Attribute from ..serializers.v1 import AttributeSerializer from ..validators import AttributeParentValidator def test_create(db): AttributeParentValidator()({ 'uri_prefix': settings.DEFAULT_URI_PREFIX, 'key': 'test', 'parent': Attribute.objects.get(uri='http://example.com/terms/domain/individual/single') }) def test_update(db): attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text') AttributeParentValidator(attribute)({ 'uri_prefix': attribute.uri_prefix, 'key': attribute.key, 'parent': attribute.parent }) def test_update_error(db): attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text') with pytest.raises(ValidationError): AttributeParentValidator(attribute)({ 'uri_prefix': attribute.uri_prefix, 'key': attribute.key, 'parent': attribute # set self as parent }) def test_serializer_create(db): class MockedView(object): action = 'create' validator = AttributeParentValidator() validator.set_context(AttributeSerializer()) validator.serializer.context['view'] = MockedView() validator({ 'uri_prefix': settings.DEFAULT_URI_PREFIX, 'key': 'test', 'parent': Attribute.objects.get(uri='http://example.com/terms/domain/individual/single') }) def test_serializer_update(db): attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text') validator = AttributeParentValidator() validator.set_context(AttributeSerializer(instance=attribute)) validator({ 'uri_prefix': attribute.uri_prefix, 'key': attribute.key, 'parent': attribute.parent }) def test_serializer_update_error(db): attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text') validator = AttributeParentValidator() validator.set_context(AttributeSerializer(instance=attribute)) with pytest.raises(RestFameworkValidationError): validator({ 'uri_prefix': attribute.uri_prefix, 'key': attribute.key, 'parent': attribute # set self as parent }) def test_serializer_copy(db): attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text') class MockedView(object): action = 'copy' def get_object(self): return attribute validator = AttributeParentValidator() validator.set_context(AttributeSerializer()) validator.serializer.context['view'] = MockedView() validator({ 'uri_prefix': attribute.uri_prefix, 'key': attribute.key, 'parent': attribute.parent }) def test_serializer_copy_error(db): attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text') class MockedView(object): action = 'copy' def get_object(self): return attribute validator = AttributeParentValidator() validator.set_context(AttributeSerializer()) validator.serializer.context['view'] = MockedView() with pytest.raises(RestFameworkValidationError): validator({ 'uri_prefix': attribute.uri_prefix, 'key': attribute.key, 'parent': attribute # set self as parent })
30.813559
99
0.685644
365
3,636
6.715068
0.153425
0.058752
0.039168
0.071807
0.839249
0.835985
0.835985
0.835985
0.835985
0.835985
0
0.000343
0.19802
3,636
117
100
31.076923
0.840192
0.015402
0
0.784091
0
0
0.170022
0
0
0
0
0
0
1
0.113636
false
0
0.079545
0.022727
0.284091
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
fabc432ada9039c9f3308a8d74b5ecf49b88632d
22,003
py
Python
fhir/resources/STU3/tests/test_compartmentdefinition.py
mmabey/fhir.resources
cc73718e9762c04726cd7de240c8f2dd5313cbe1
[ "BSD-3-Clause" ]
null
null
null
fhir/resources/STU3/tests/test_compartmentdefinition.py
mmabey/fhir.resources
cc73718e9762c04726cd7de240c8f2dd5313cbe1
[ "BSD-3-Clause" ]
null
null
null
fhir/resources/STU3/tests/test_compartmentdefinition.py
mmabey/fhir.resources
cc73718e9762c04726cd7de240c8f2dd5313cbe1
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """ Profile: http://hl7.org/fhir/StructureDefinition/CompartmentDefinition Release: STU3 Version: 3.0.2 Revision: 11917 Last updated: 2019-10-24T11:53:00+11:00 """ import io import json import os import unittest import pytest from .. import compartmentdefinition from ..fhirdate import FHIRDate from .fixtures import force_bytes @pytest.mark.usefixtures("base_settings") class CompartmentDefinitionTests(unittest.TestCase): def instantiate_from(self, filename): datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or "" with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle: js = json.load(handle) self.assertEqual("CompartmentDefinition", js["resourceType"]) return compartmentdefinition.CompartmentDefinition(js) def testCompartmentDefinition1(self): inst = self.instantiate_from("compartmentdefinition-example.json") self.assertIsNotNone( inst, "Must have instantiated a CompartmentDefinition instance" ) self.implCompartmentDefinition1(inst) js = inst.as_json() self.assertEqual("CompartmentDefinition", js["resourceType"]) inst2 = compartmentdefinition.CompartmentDefinition(js) self.implCompartmentDefinition1(inst2) def implCompartmentDefinition1(self, inst): self.assertEqual(force_bytes(inst.code), force_bytes("Device")) self.assertEqual(force_bytes(inst.contact[0].name), force_bytes("[string]")) self.assertEqual( force_bytes(inst.contact[0].telecom[0].system), force_bytes("url") ) self.assertEqual( force_bytes(inst.contact[0].telecom[0].value), force_bytes("http://hl7.org/fhir"), ) self.assertEqual(inst.date.date, FHIRDate("2017-02-24").date) self.assertEqual(inst.date.as_json(), "2017-02-24") self.assertEqual( force_bytes(inst.description), force_bytes( "The set of resources associated with a particular Device (example with Communication and CommunicationRequest resourses only)." ), ) self.assertTrue(inst.experimental) self.assertEqual(force_bytes(inst.id), force_bytes("example")) self.assertEqual( force_bytes(inst.jurisdiction[0].coding[0].code), force_bytes("US") ) self.assertEqual( force_bytes(inst.jurisdiction[0].coding[0].display), force_bytes("United States of America (the)"), ) self.assertEqual( force_bytes(inst.jurisdiction[0].coding[0].system), force_bytes("urn:iso:std:iso:3166"), ) self.assertEqual(force_bytes(inst.name), force_bytes("EXAMPLE")) self.assertEqual( force_bytes(inst.publisher), force_bytes("Health Level Seven International (FHIR Infrastructure)"), ) self.assertEqual( force_bytes(inst.purpose), force_bytes( "Provides an example of a FHIR compartment definition based on the Device resource type." ), ) self.assertEqual( force_bytes(inst.resource[0].code), force_bytes("Communication") ) self.assertEqual( force_bytes(inst.resource[0].documentation), force_bytes("The device used as the message sender and recipient"), ) self.assertEqual(force_bytes(inst.resource[0].param[0]), force_bytes("sender")) self.assertEqual( force_bytes(inst.resource[0].param[1]), force_bytes("recipient") ) self.assertEqual( force_bytes(inst.resource[1].code), force_bytes("CommunicationRequest") ) self.assertEqual( force_bytes(inst.resource[1].documentation), force_bytes("The device used as the message sender and recipient"), ) self.assertEqual(force_bytes(inst.resource[1].param[0]), force_bytes("sender")) self.assertEqual( force_bytes(inst.resource[1].param[1]), force_bytes("recipient") ) self.assertTrue(inst.search) self.assertEqual(force_bytes(inst.status), force_bytes("draft")) self.assertEqual(force_bytes(inst.text.status), force_bytes("generated")) self.assertEqual( force_bytes(inst.title), force_bytes("Base FHIR compartment definition for Device(example)"), ) self.assertEqual( force_bytes(inst.url), force_bytes("http://hl7.org/fhir/CompartmentDefinition/example"), ) self.assertEqual( force_bytes(inst.useContext[0].code.code), force_bytes("focus") ) self.assertEqual( force_bytes(inst.useContext[0].code.system), force_bytes("http://hl7.org/fhir/usage-context-type"), ) self.assertEqual( force_bytes(inst.useContext[0].valueCodeableConcept.coding[0].code), force_bytes("Device"), ) self.assertEqual( force_bytes(inst.useContext[0].valueCodeableConcept.coding[0].system), force_bytes("http://hl7.org/fhir/resource-types"), ) def testCompartmentDefinition2(self): inst = self.instantiate_from("compartmentdefinition-relatedperson.json") self.assertIsNotNone( inst, "Must have instantiated a CompartmentDefinition instance" ) self.implCompartmentDefinition2(inst) js = inst.as_json() self.assertEqual("CompartmentDefinition", js["resourceType"]) inst2 = compartmentdefinition.CompartmentDefinition(js) self.implCompartmentDefinition2(inst2) def implCompartmentDefinition2(self, inst): self.assertEqual(force_bytes(inst.code), force_bytes("RelatedPerson")) self.assertEqual( force_bytes(inst.contact[0].telecom[0].system), force_bytes("url") ) self.assertEqual( force_bytes(inst.contact[0].telecom[0].value), force_bytes("http://hl7.org/fhir"), ) self.assertEqual(inst.date.date, FHIRDate("2017-04-19T07:44:43+10:00").date) self.assertEqual(inst.date.as_json(), "2017-04-19T07:44:43+10:00") self.assertTrue(inst.experimental) self.assertEqual(force_bytes(inst.id), force_bytes("relatedPerson")) self.assertEqual( force_bytes(inst.name), force_bytes("Base FHIR compartment definition for RelatedPerson"), ) self.assertEqual(force_bytes(inst.publisher), force_bytes("FHIR Project Team")) self.assertEqual(force_bytes(inst.resource[0].code), force_bytes("Account")) self.assertEqual( force_bytes(inst.resource[1].code), force_bytes("ActivityDefinition") ) self.assertEqual( force_bytes(inst.resource[2].code), force_bytes("AdverseEvent") ) self.assertEqual( force_bytes(inst.resource[2].param[0]), force_bytes("recorder") ) self.assertEqual( force_bytes(inst.resource[3].code), force_bytes("AllergyIntolerance") ) self.assertEqual( force_bytes(inst.resource[3].param[0]), force_bytes("asserter") ) self.assertEqual(force_bytes(inst.resource[4].code), force_bytes("Appointment")) self.assertEqual(force_bytes(inst.resource[4].param[0]), force_bytes("actor")) self.assertEqual( force_bytes(inst.resource[5].code), force_bytes("AppointmentResponse") ) self.assertEqual(force_bytes(inst.resource[5].param[0]), force_bytes("actor")) self.assertEqual(force_bytes(inst.resource[6].code), force_bytes("AuditEvent")) self.assertEqual(force_bytes(inst.resource[7].code), force_bytes("Basic")) self.assertEqual(force_bytes(inst.resource[7].param[0]), force_bytes("author")) self.assertEqual(force_bytes(inst.resource[8].code), force_bytes("Binary")) self.assertEqual(force_bytes(inst.resource[9].code), force_bytes("BodySite")) self.assertTrue(inst.search) self.assertEqual(force_bytes(inst.status), force_bytes("draft")) self.assertEqual(force_bytes(inst.text.status), force_bytes("generated")) self.assertEqual( force_bytes(inst.url), force_bytes("http://hl7.org/fhir/CompartmentDefinition/relatedPerson"), ) def testCompartmentDefinition3(self): inst = self.instantiate_from("compartmentdefinition-patient.json") self.assertIsNotNone( inst, "Must have instantiated a CompartmentDefinition instance" ) self.implCompartmentDefinition3(inst) js = inst.as_json() self.assertEqual("CompartmentDefinition", js["resourceType"]) inst2 = compartmentdefinition.CompartmentDefinition(js) self.implCompartmentDefinition3(inst2) def implCompartmentDefinition3(self, inst): self.assertEqual(force_bytes(inst.code), force_bytes("Patient")) self.assertEqual( force_bytes(inst.contact[0].telecom[0].system), force_bytes("url") ) self.assertEqual( force_bytes(inst.contact[0].telecom[0].value), force_bytes("http://hl7.org/fhir"), ) self.assertEqual(inst.date.date, FHIRDate("2017-04-19T07:44:43+10:00").date) self.assertEqual(inst.date.as_json(), "2017-04-19T07:44:43+10:00") self.assertTrue(inst.experimental) self.assertEqual(force_bytes(inst.id), force_bytes("patient")) self.assertEqual( force_bytes(inst.name), force_bytes("Base FHIR compartment definition for Patient"), ) self.assertEqual(force_bytes(inst.publisher), force_bytes("FHIR Project Team")) self.assertEqual(force_bytes(inst.resource[0].code), force_bytes("Account")) self.assertEqual(force_bytes(inst.resource[0].param[0]), force_bytes("subject")) self.assertEqual( force_bytes(inst.resource[1].code), force_bytes("ActivityDefinition") ) self.assertEqual( force_bytes(inst.resource[2].code), force_bytes("AdverseEvent") ) self.assertEqual(force_bytes(inst.resource[2].param[0]), force_bytes("subject")) self.assertEqual( force_bytes(inst.resource[3].code), force_bytes("AllergyIntolerance") ) self.assertEqual(force_bytes(inst.resource[3].param[0]), force_bytes("patient")) self.assertEqual( force_bytes(inst.resource[3].param[1]), force_bytes("recorder") ) self.assertEqual( force_bytes(inst.resource[3].param[2]), force_bytes("asserter") ) self.assertEqual(force_bytes(inst.resource[4].code), force_bytes("Appointment")) self.assertEqual(force_bytes(inst.resource[4].param[0]), force_bytes("actor")) self.assertEqual( force_bytes(inst.resource[5].code), force_bytes("AppointmentResponse") ) self.assertEqual(force_bytes(inst.resource[5].param[0]), force_bytes("actor")) self.assertEqual(force_bytes(inst.resource[6].code), force_bytes("AuditEvent")) self.assertEqual(force_bytes(inst.resource[6].param[0]), force_bytes("patient")) self.assertEqual( force_bytes(inst.resource[6].param[1]), force_bytes("agent.patient") ) self.assertEqual( force_bytes(inst.resource[6].param[2]), force_bytes("entity.patient") ) self.assertEqual(force_bytes(inst.resource[7].code), force_bytes("Basic")) self.assertEqual(force_bytes(inst.resource[7].param[0]), force_bytes("patient")) self.assertEqual(force_bytes(inst.resource[7].param[1]), force_bytes("author")) self.assertEqual(force_bytes(inst.resource[8].code), force_bytes("Binary")) self.assertEqual(force_bytes(inst.resource[9].code), force_bytes("BodySite")) self.assertEqual(force_bytes(inst.resource[9].param[0]), force_bytes("patient")) self.assertTrue(inst.search) self.assertEqual(force_bytes(inst.status), force_bytes("draft")) self.assertEqual(force_bytes(inst.text.status), force_bytes("generated")) self.assertEqual( force_bytes(inst.url), force_bytes("http://hl7.org/fhir/CompartmentDefinition/patient"), ) def testCompartmentDefinition4(self): inst = self.instantiate_from("compartmentdefinition-practitioner.json") self.assertIsNotNone( inst, "Must have instantiated a CompartmentDefinition instance" ) self.implCompartmentDefinition4(inst) js = inst.as_json() self.assertEqual("CompartmentDefinition", js["resourceType"]) inst2 = compartmentdefinition.CompartmentDefinition(js) self.implCompartmentDefinition4(inst2) def implCompartmentDefinition4(self, inst): self.assertEqual(force_bytes(inst.code), force_bytes("Practitioner")) self.assertEqual( force_bytes(inst.contact[0].telecom[0].system), force_bytes("url") ) self.assertEqual( force_bytes(inst.contact[0].telecom[0].value), force_bytes("http://hl7.org/fhir"), ) self.assertEqual(inst.date.date, FHIRDate("2017-04-19T07:44:43+10:00").date) self.assertEqual(inst.date.as_json(), "2017-04-19T07:44:43+10:00") self.assertTrue(inst.experimental) self.assertEqual(force_bytes(inst.id), force_bytes("practitioner")) self.assertEqual( force_bytes(inst.name), force_bytes("Base FHIR compartment definition for Practitioner"), ) self.assertEqual(force_bytes(inst.publisher), force_bytes("FHIR Project Team")) self.assertEqual(force_bytes(inst.resource[0].code), force_bytes("Account")) self.assertEqual(force_bytes(inst.resource[0].param[0]), force_bytes("subject")) self.assertEqual( force_bytes(inst.resource[1].code), force_bytes("ActivityDefinition") ) self.assertEqual( force_bytes(inst.resource[2].code), force_bytes("AdverseEvent") ) self.assertEqual( force_bytes(inst.resource[2].param[0]), force_bytes("recorder") ) self.assertEqual( force_bytes(inst.resource[3].code), force_bytes("AllergyIntolerance") ) self.assertEqual( force_bytes(inst.resource[3].param[0]), force_bytes("recorder") ) self.assertEqual( force_bytes(inst.resource[3].param[1]), force_bytes("asserter") ) self.assertEqual(force_bytes(inst.resource[4].code), force_bytes("Appointment")) self.assertEqual(force_bytes(inst.resource[4].param[0]), force_bytes("actor")) self.assertEqual( force_bytes(inst.resource[5].code), force_bytes("AppointmentResponse") ) self.assertEqual(force_bytes(inst.resource[5].param[0]), force_bytes("actor")) self.assertEqual(force_bytes(inst.resource[6].code), force_bytes("AuditEvent")) self.assertEqual(force_bytes(inst.resource[6].param[0]), force_bytes("agent")) self.assertEqual(force_bytes(inst.resource[7].code), force_bytes("Basic")) self.assertEqual(force_bytes(inst.resource[7].param[0]), force_bytes("author")) self.assertEqual(force_bytes(inst.resource[8].code), force_bytes("Binary")) self.assertEqual(force_bytes(inst.resource[9].code), force_bytes("BodySite")) self.assertTrue(inst.search) self.assertEqual(force_bytes(inst.status), force_bytes("draft")) self.assertEqual(force_bytes(inst.text.status), force_bytes("generated")) self.assertEqual( force_bytes(inst.url), force_bytes("http://hl7.org/fhir/CompartmentDefinition/practitioner"), ) def testCompartmentDefinition5(self): inst = self.instantiate_from("compartmentdefinition-encounter.json") self.assertIsNotNone( inst, "Must have instantiated a CompartmentDefinition instance" ) self.implCompartmentDefinition5(inst) js = inst.as_json() self.assertEqual("CompartmentDefinition", js["resourceType"]) inst2 = compartmentdefinition.CompartmentDefinition(js) self.implCompartmentDefinition5(inst2) def implCompartmentDefinition5(self, inst): self.assertEqual(force_bytes(inst.code), force_bytes("Encounter")) self.assertEqual( force_bytes(inst.contact[0].telecom[0].system), force_bytes("url") ) self.assertEqual( force_bytes(inst.contact[0].telecom[0].value), force_bytes("http://hl7.org/fhir"), ) self.assertEqual(inst.date.date, FHIRDate("2017-04-19T07:44:43+10:00").date) self.assertEqual(inst.date.as_json(), "2017-04-19T07:44:43+10:00") self.assertTrue(inst.experimental) self.assertEqual(force_bytes(inst.id), force_bytes("encounter")) self.assertEqual( force_bytes(inst.name), force_bytes("Base FHIR compartment definition for Encounter"), ) self.assertEqual(force_bytes(inst.publisher), force_bytes("FHIR Project Team")) self.assertEqual(force_bytes(inst.resource[0].code), force_bytes("Account")) self.assertEqual( force_bytes(inst.resource[1].code), force_bytes("ActivityDefinition") ) self.assertEqual( force_bytes(inst.resource[2].code), force_bytes("AdverseEvent") ) self.assertEqual( force_bytes(inst.resource[3].code), force_bytes("AllergyIntolerance") ) self.assertEqual(force_bytes(inst.resource[4].code), force_bytes("Appointment")) self.assertEqual( force_bytes(inst.resource[5].code), force_bytes("AppointmentResponse") ) self.assertEqual(force_bytes(inst.resource[6].code), force_bytes("AuditEvent")) self.assertEqual(force_bytes(inst.resource[7].code), force_bytes("Basic")) self.assertEqual(force_bytes(inst.resource[8].code), force_bytes("Binary")) self.assertEqual(force_bytes(inst.resource[9].code), force_bytes("BodySite")) self.assertTrue(inst.search) self.assertEqual(force_bytes(inst.status), force_bytes("draft")) self.assertEqual(force_bytes(inst.text.status), force_bytes("generated")) self.assertEqual( force_bytes(inst.url), force_bytes("http://hl7.org/fhir/CompartmentDefinition/encounter"), ) def testCompartmentDefinition6(self): inst = self.instantiate_from("compartmentdefinition-device.json") self.assertIsNotNone( inst, "Must have instantiated a CompartmentDefinition instance" ) self.implCompartmentDefinition6(inst) js = inst.as_json() self.assertEqual("CompartmentDefinition", js["resourceType"]) inst2 = compartmentdefinition.CompartmentDefinition(js) self.implCompartmentDefinition6(inst2) def implCompartmentDefinition6(self, inst): self.assertEqual(force_bytes(inst.code), force_bytes("Device")) self.assertEqual( force_bytes(inst.contact[0].telecom[0].system), force_bytes("url") ) self.assertEqual( force_bytes(inst.contact[0].telecom[0].value), force_bytes("http://hl7.org/fhir"), ) self.assertEqual(inst.date.date, FHIRDate("2017-04-19T07:44:43+10:00").date) self.assertEqual(inst.date.as_json(), "2017-04-19T07:44:43+10:00") self.assertEqual( force_bytes(inst.description), force_bytes( "There is an instance of the practitioner compartment for each Device resource, and the identity of the compartment is the same as the Device. The set of resources associated with a particular device" ), ) self.assertTrue(inst.experimental) self.assertEqual(force_bytes(inst.id), force_bytes("device")) self.assertEqual( force_bytes(inst.name), force_bytes("Base FHIR compartment definition for Device"), ) self.assertEqual(force_bytes(inst.publisher), force_bytes("FHIR Project Team")) self.assertEqual(force_bytes(inst.resource[0].code), force_bytes("Account")) self.assertEqual(force_bytes(inst.resource[0].param[0]), force_bytes("subject")) self.assertEqual( force_bytes(inst.resource[1].code), force_bytes("ActivityDefinition") ) self.assertEqual( force_bytes(inst.resource[2].code), force_bytes("AdverseEvent") ) self.assertEqual( force_bytes(inst.resource[3].code), force_bytes("AllergyIntolerance") ) self.assertEqual(force_bytes(inst.resource[4].code), force_bytes("Appointment")) self.assertEqual(force_bytes(inst.resource[4].param[0]), force_bytes("actor")) self.assertEqual( force_bytes(inst.resource[5].code), force_bytes("AppointmentResponse") ) self.assertEqual(force_bytes(inst.resource[5].param[0]), force_bytes("actor")) self.assertEqual(force_bytes(inst.resource[6].code), force_bytes("AuditEvent")) self.assertEqual(force_bytes(inst.resource[6].param[0]), force_bytes("agent")) self.assertEqual(force_bytes(inst.resource[7].code), force_bytes("Basic")) self.assertEqual(force_bytes(inst.resource[8].code), force_bytes("Binary")) self.assertEqual(force_bytes(inst.resource[9].code), force_bytes("BodySite")) self.assertTrue(inst.search) self.assertEqual(force_bytes(inst.status), force_bytes("draft")) self.assertEqual(force_bytes(inst.text.status), force_bytes("generated")) self.assertEqual( force_bytes(inst.url), force_bytes("http://hl7.org/fhir/CompartmentDefinition/device"), )
47.115632
216
0.656138
2,423
22,003
5.821709
0.080891
0.219056
0.218347
0.272934
0.868992
0.861265
0.832554
0.820502
0.784843
0.762229
0
0.025338
0.212562
22,003
466
217
47.216738
0.788815
0.00809
0
0.570115
0
0.002299
0.166934
0.034835
0
0
0
0
0.445977
1
0.029885
false
0
0.018391
0
0.052874
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
10
8790022d1b92607ba6248af3c2d60609abf96716
222
py
Python
jobs/test_job.py
YoniSchirris/SimCLR
a99b7f7d0fdbc5a9747abf70a8b216b328608796
[ "MIT" ]
null
null
null
jobs/test_job.py
YoniSchirris/SimCLR
a99b7f7d0fdbc5a9747abf70a8b216b328608796
[ "MIT" ]
null
null
null
jobs/test_job.py
YoniSchirris/SimCLR
a99b7f7d0fdbc5a9747abf70a8b216b328608796
[ "MIT" ]
null
null
null
import torch print(f"Cuda is available: {torch.cuda.is_available()}") print(f"Number of GPUs: {torch.cuda.device_count()}") print(f"GPU names: {[torch.cuda.get_device_name(i) for i in range(torch.cuda.device_count())]}")
37
96
0.734234
38
222
4.157895
0.526316
0.227848
0.189873
0.253165
0
0
0
0
0
0
0
0
0.081081
222
5
97
44.4
0.77451
0
0
0
0
0.25
0.788288
0.536036
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0.75
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
1
0
0
0
0
1
0
9
355b20a5c14c89309c720fa01c76bba84a017d93
365
py
Python
glisten/__init__.py
Krytic/glisten
14fa2d9a490c55888c63bc9ff2001442447efce6
[ "MIT" ]
null
null
null
glisten/__init__.py
Krytic/glisten
14fa2d9a490c55888c63bc9ff2001442447efce6
[ "MIT" ]
null
null
null
glisten/__init__.py
Krytic/glisten
14fa2d9a490c55888c63bc9ff2001442447efce6
[ "MIT" ]
null
null
null
""" . . /^\ . /\ "V" /__\ I O o //..\\ I . \].`[/ I /l\/j\ (] . O /. ~~ ,\/I . \\L__j^\/I o \/--v} I o . | | I _________ | | I c(` ')o | l I \. ,/ _/j L l\_! _//^---^\\_ Here be wizard. """ import glisten.Logger as log
16.590909
28
0.216438
34
365
1.823529
0.411765
0.096774
0.096774
0
0
0
0
0
0
0
0
0
0.553425
365
22
28
16.590909
0.380368
0.838356
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
357a063d986b437569c2f6c738df9d63775dc3b6
50,205
py
Python
movie/tests/test_views.py
sinzee/hosting_movie
23d3860ba320a012be0b08bbea181557be17d00b
[ "MIT" ]
null
null
null
movie/tests/test_views.py
sinzee/hosting_movie
23d3860ba320a012be0b08bbea181557be17d00b
[ "MIT" ]
null
null
null
movie/tests/test_views.py
sinzee/hosting_movie
23d3860ba320a012be0b08bbea181557be17d00b
[ "MIT" ]
null
null
null
import os.path from time import sleep from unittest import mock from django.conf import settings from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.contrib.sites.shortcuts import get_current_site from django.core import mail from django.core.files import File from django.core.files.base import ContentFile from django.test import Client, TestCase from django.urls import reverse from django.utils.encoding import force_bytes, force_text from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from movie.models import ( Movie, SiteUser, ) class IndexViewTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/' cls.url_name = 'index' def test_view_url_exists_at_desired_location(self): resp = self.client.get(self.url_path) self.assertEqual(resp.status_code, 200) def test_view_url_accessible_by_name(self): resp = self.client.get(reverse(self.url_name)) self.assertEqual(resp.status_code, 200) def test_view_url_uses_correct_template(self): resp = self.client.get(reverse(self.url_name)) self.assertTemplateUsed('blog/index.html') class SiteUserDetailTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/user/{user_id}' cls.url_name = 'user-detail' mail_address = 'test@example.com' test_user = User.objects.create_user( username=mail_address, password='12345', email=mail_address, first_name='Super', last_name='John', ) cls.site_user = SiteUser.objects.create( user=test_user, bio='user bio' ) def setUp(self): self.resp = self.client.get( self.url_path.format( user_id=str(self.site_user.pk) ) ) def test_view_url_exists_at_desired_location(self): self.assertEqual(self.resp.status_code, 200) def test_view_url_accessible_by_name(self): resp = self.client.get( reverse( self.url_name, kwargs={ 'pk': self.site_user.pk, }, ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): self.assertTemplateUsed(self.resp, 'movie/siteuser_detail.html') class SiteUserCreateTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/user/create' cls.url_name = 'create-user' def setUp(self): self.resp = self.client.get(self.url_path) def test_view_exists_at_desired_location(self): resp = self.client.get(self.url_path) self.assertEqual(resp.status_code, 200) def test_view_exists_at_desired_location(self): resp = self.client.get(reverse(self.url_name)) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): self.assertTemplateUsed(self.resp, 'movie/siteuser_form.html') def test_create_user_temporarily(self): email_address = 'test@example.com' password = '12345' user_data = { 'password': password, 'confirm_password': password, 'email': email_address, 'first_name': 'Super', 'last_name': 'John', } resp = self.client.post( self.url_path, user_data, follow=True, ) self.assertTrue(User.objects.filter(email=email_address).exists()) user_status = User.objects.get(email=email_address) self.assertFalse(user_status.is_active) def test_create_user_temporarily_without_value_in_password(self): email_address = 'test@example.com' user_data = { 'password': '', 'confirm_password': '', 'email': email_address, 'first_name': 'Super', 'last_name': 'John', } resp = self.client.post( self.url_path, user_data, follow=True, ) self.assertFalse(User.objects.filter(email=email_address).exists()) def test_redirect_after_create_user_temporarily(self): password = '12345' user_data = { 'password': password, 'confirm_password': password, 'email': 'test@example.com', 'first_name': 'Super', 'last_name': 'John', } resp = self.client.post( self.url_path, user_data, ) self.assertRedirects(resp, reverse('created-user-temporarily')) def test_send_email_to_suggest_to_complete_registration(self): email_address = 'test@example.com' password = '12345' user_data = { 'password': password, 'confirm_password': password, 'email': email_address, 'first_name': 'Super', 'last_name': 'John', } domain = 'www.example.com' resp = self.client.post( self.url_path, user_data, SERVER_NAME=domain ) self.assertEqual(len(mail.outbox), 1) sended_mail = mail.outbox[0] self.assertEqual(sended_mail.to, [user_data['email'], ]) self.assertEqual(sended_mail.from_email, settings.DEFAULT_FROM_EMAIL) self.assertEqual(sended_mail.subject, 'Registerd temporarily.') user = User.objects.get(email=email_address) uid = urlsafe_base64_encode(force_bytes(user.pk)) token = default_token_generator.make_token(user) message_body = "Your account has registered temporarily.\n" \ "Click below link, then registration complete\n" \ "{scheme}://{domain}{path}\n".format( scheme='https', domain=domain, path=reverse( 'created-user-completely', kwargs={ 'uidb64': uid, 'token': token, } ) ) self.assertEqual(sended_mail.body, message_body) class SiteUserCreateTemporarilyTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/user/create/temp' cls.url_name = 'created-user-temporarily' def test_view_exists_at_desired_location(self): resp = self.client.get(self.url_path) self.assertEqual(resp.status_code, 200) def test_view_exists_at_desired_location(self): resp = self.client.get(reverse(self.url_name)) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): resp = self.client.get(self.url_path) self.assertTemplateUsed(resp, 'movie/created_user_temporarily.html') class SiteUserCreateCompletelyTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/user/create' def test_registration_completes_when_confirmation_link_is_clicked(self): email_address = 'test@example.com' password = '12345' user_data = { 'password': password, 'confirm_password': password, 'email': email_address, 'first_name': 'Super', 'last_name': 'John', } domain = 'www.example.com' self.client.post( self.url_path, user_data, SERVER_NAME=domain ) user = User.objects.get(email=email_address) self.assertFalse(user.is_active) uid = urlsafe_base64_encode(force_bytes(user.pk)) token = default_token_generator.make_token(user) path = reverse( 'created-user-completely', kwargs={ 'uidb64': uid, 'token': token, } ) resp = self.client.get(path, follow=True) user = User.objects.get(email=email_address) self.assertTrue(user.is_active) self.assertTrue(SiteUser.objects.filter(user=user).exists()) self.assertRedirects(resp, reverse('login')) class SiteUserUpdateNameTest(TestCase): @classmethod def setUpTestData(cls): cls.username_url_path = '/movie/user/edit/name' cls.username_url_name = 'user-name-edit' cls.mail_address1 = 'test1@example.com' cls.password1 = '12345' cls.first_name1 = 'Super' cls.last_name1 = 'John' test_user1 = User.objects.create_user( username=cls.mail_address1, password=cls.password1, email=cls.mail_address1, first_name=cls.first_name1, last_name=cls.last_name1, ) cls.site_user1 = SiteUser.objects.create( user=test_user1, bio='user bio' ) cls.mail_address2 = 'test2@example.com' cls.password2 = '67890' cls.first_name2 = 'Super' cls.last_name2 = 'Bob' test_user2 = User.objects.create_user( username=cls.mail_address2, password=cls.password2, email=cls.mail_address2, first_name=cls.first_name2, last_name=cls.last_name2, ) cls.site_user2 = SiteUser.objects.create( user=test_user2, bio='user bio' ) def test_redirec_to_login_page_when_not_logined(self): resp = self.client.get(self.username_url_path) self.assertRedirects( resp, '{login_path}?next={next_path}'.format( login_path=reverse('login'), next_path=self.username_url_path ) ) self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.get(self.username_url_path) self.assertEqual(resp.status_code, 200) def test_get_edit_page_of_yourself(self): self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.get(self.username_url_path) self.assertEqual(resp.context['object'].username, self.mail_address1) def test_view_exists_at_desired_location(self): self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.get(self.username_url_path) self.assertEqual(resp.status_code, 200) def test_view_accessible_by_name(self): self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.get( reverse( self.username_url_name ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.get(self.username_url_path) self.assertTemplateUsed(resp, 'movie/user_name_form.html') def test_input_form_has_current_value_in_default(self): self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.get(self.username_url_path) initials = resp.context['form'].initial self.assertEqual(initials['first_name'], self.first_name1) self.assertEqual(initials['last_name'], self.last_name1) def test_redirect_if_update_comletes(self): self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.post( self.username_url_path, { 'first_name': self.first_name1 + '2', 'last_name': self.last_name1 + '2', } ) self.assertRedirects( resp, reverse( 'user-detail', kwargs={ 'pk': self.site_user1.pk, } ) ) def test_update_username(self): self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.post( self.username_url_path, { 'first_name': self.first_name1 + '2', 'last_name': self.last_name1 + '2', } ) self.assertRedirects( resp, reverse( 'user-detail', kwargs={ 'pk': self.site_user1.pk, } ) ) updated_siteuser = SiteUser.objects.get(pk=self.site_user1.pk) self.assertEqual(updated_siteuser.user.first_name, self.first_name1 + '2') self.assertEqual(updated_siteuser.user.last_name, self.last_name1 + '2') class SiteUserUpdateEmailTest(TestCase): @classmethod def setUpTestData(cls): cls.email_url_path = '/movie/user/edit/email' cls.email_url_name = 'user-email-edit' cls.change_email_temporarily_url_path = '/movie/user/edit/email/temp' cls.mail_address = 'test1@example.com' cls.password = '12345' cls.first_name = 'Super' cls.last_name = 'John' test_user = User.objects.create_user( username=cls.mail_address, password=cls.password, email=cls.mail_address, first_name=cls.first_name, last_name=cls.last_name, ) cls.site_user = SiteUser.objects.create( user=test_user, bio='user bio' ) def test_redirec_to_login_page_when_not_logined(self): resp = self.client.get(self.email_url_path) self.assertRedirects( resp, '{login_path}?next={next_path}'.format( login_path=reverse('login'), next_path=self.email_url_path ) ) self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.email_url_path) self.assertEqual(resp.status_code, 200) def test_get_edit_page_of_yourself(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.email_url_path) self.assertEqual(resp.context['object'].username, self.mail_address) def test_view_url_exists_at_desired_location(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.email_url_path) self.assertEqual(resp.status_code, 200) def test_view_accessible_by_name(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get( reverse( self.email_url_name ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.email_url_path) self.assertTemplateUsed(resp, 'movie/user_email_form.html') def test_input_form_has_current_value_in_default(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.email_url_path) initials = resp.context['form'].initial self.assertEqual(initials['email'], self.mail_address) def test_change_email_address(self): self.client.login(username=self.mail_address, password=self.password) new_email = 'new@example.com' resp = self.client.post( self.email_url_path, { 'email': new_email, } ) self.assertRedirects(resp, self.change_email_temporarily_url_path) def test_send_email_to_suggest_to_complete_changing_email_address(self): self.client.login(username=self.mail_address, password=self.password) new_email_address = 'new@example.com' domain = 'www.example.com' resp = self.client.post( self.email_url_path, { 'email': new_email_address, }, SERVER_NAME=domain ) self.assertEqual(len(mail.outbox), 1) sended_mail = mail.outbox[0] self.assertEqual(sended_mail.to, [new_email_address, ]) self.assertEqual(sended_mail.from_email, settings.DEFAULT_FROM_EMAIL) self.assertEqual(sended_mail.subject, 'Changed Email Address Temporarily.') user = User.objects.get(username=self.mail_address) token = default_token_generator.make_token(user) encoded_new_email_address = urlsafe_base64_encode(force_bytes(new_email_address)) message_body = "Your email address has changed temporarily.\n" \ "Click below link, then change completes\n" \ "{scheme}://{domain}{path}\n".format( scheme='https', domain=domain, path=reverse( 'user-email-edit-completely', kwargs={ 'token': token, 'new_email': encoded_new_email_address, } ) ) """print(sended_mail.body) print(message_body) """ self.assertEqual(sended_mail.body, message_body) def test_change_email_address_completely_when_click_link(self): self.client.login(username=self.mail_address, password=self.password) new_email_address = 'new@example.com' domain = 'www.example.com' resp = self.client.post( self.email_url_path, { 'email': new_email_address, }, SERVER_NAME=domain ) user = User.objects.get(username=self.mail_address) self.assertNotEqual(user.email, new_email_address) token = default_token_generator.make_token(user) encoded_new_email_address = urlsafe_base64_encode(force_bytes(new_email_address)) path = reverse( 'user-email-edit-completely', kwargs={ 'token': token, 'new_email': encoded_new_email_address, } ) resp = self.client.get(path) self.assertTemplateUsed(resp, 'movie/user_email_change_completely.html') user = User.objects.get(pk=user.pk) self.assertEqual(user.username, new_email_address) self.assertEqual(user.email, new_email_address) def test_cannot_change_when_another_user_clicks_change_completion_link(self): self.client.login(username=self.mail_address, password=self.password) new_email_address = 'new@example.com' domain = 'www.example.com' resp = self.client.post( self.email_url_path, { 'email': new_email_address, }, SERVER_NAME=domain ) user = User.objects.get(username=self.mail_address) token = default_token_generator.make_token(user) encoded_new_email_address = urlsafe_base64_encode(force_bytes(new_email_address)) path = reverse( 'user-email-edit-completely', kwargs={ 'token': token, 'new_email': encoded_new_email_address, } ) another_email = 'another@example.com' another_password = '12345' another_user = User.objects.create_user( username=another_email, password=another_password, first_name='another_first', last_name='another_last' ) SiteUser.objects.create(user=another_user, bio='another bio') another_client = Client() another_client.login(username=another_email, password=another_password) another_client.get(path) user = User.objects.get(username=self.mail_address) self.assertNotEqual(user.email, new_email_address) class SiteUserUpdateBioTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/user/edit/bio' cls.url_name = 'user-bio-edit' cls.mail_address = 'test1@example.com' cls.password = '12345' cls.first_name = 'Super' cls.last_name = 'John' test_user = User.objects.create_user( username=cls.mail_address, password=cls.password, email=cls.mail_address, first_name=cls.first_name, last_name=cls.last_name, ) cls.site_user = SiteUser.objects.create( user=test_user, bio='user bio' ) def test_redirect_when_you_dont_login(self): resp = self.client.get(self.url_path) self.assertRedirects(resp, '/accounts/login/?next=' + self.url_path) def test_view_exists_at_desired_location(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.url_path) self.assertEqual(resp.status_code, 200) def test_view_accessible_by_name(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get( reverse( self.url_name ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.url_path) self.assertTemplateUsed(resp, 'movie/siteuser_form.html') class SiteUserUpdateIndexTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/user/edit' cls.url_name = 'user-edit-index' cls.mail_address = 'test1@example.com' cls.password = '12345' cls.first_name = 'Super' cls.last_name = 'John' test_user = User.objects.create_user( username=cls.mail_address, password=cls.password, email=cls.mail_address, first_name=cls.first_name, last_name=cls.last_name, ) cls.site_user = SiteUser.objects.create( user=test_user, bio='user bio' ) def test_redirect_when_you_dont_login(self): resp = self.client.get(self.url_path) self.assertRedirects(resp, '/accounts/login/?next=' + self.url_path) def test_view_exists_at_desired_location(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.url_path) self.assertEqual(resp.status_code, 200) def test_view_accessible_by_name(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get( reverse( self.url_name ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.url_path) self.assertTemplateUsed(resp, 'movie/siteuser_edit_index.html') class SiteUserDeleteTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/user/delete' cls.url_name = 'user-delete' cls.mail_address = 'test1@example.com' cls.password = '12345' cls.first_name = 'Super' cls.last_name = 'John' test_user = User.objects.create_user( username=cls.mail_address, password=cls.password, email=cls.mail_address, first_name=cls.first_name, last_name=cls.last_name, ) cls.site_user = SiteUser.objects.create( user=test_user, bio='user bio' ) def test_redirect_when_you_dont_login(self): resp = self.client.get(self.url_path) self.assertRedirects(resp, '/accounts/login/?next=' + self.url_path) def test_view_exists_at_desired_location(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.url_path) self.assertEqual(resp.status_code, 200) def test_view_accessible_by_name(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get( reverse( self.url_name ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.get(self.url_path) self.assertTemplateUsed(resp, 'movie/siteuser_confirm_delete.html') def test_delete_user(self): self.client.login(username=self.mail_address, password=self.password) resp = self.client.post(self.url_path) self.assertRedirects(resp, reverse('index')) self.assertFalse(SiteUser.objects.filter(user=self.site_user.user).exists()) self.assertFalse(User.objects.filter(pk=self.site_user.user.pk).exists()) class MovieSearchTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/movies' cls.url_name = 'movie-list' mail_address = 'test@example.com' test_user = User.objects.create_user( username=mail_address, password='12345', email=mail_address, first_name='Super', last_name='John', ) site_user = SiteUser.objects.create( user=test_user, bio='user bio' ) upload_file = mock.MagicMock(spec=File, name='FileMock') upload_file.name = 'file_name.mp4' movie_count = 13 cls.movie_list = [] for num in range(movie_count): cls.movie_list.append( Movie.objects.create( uploader=site_user, description='movie description', movie_name='movie title ' + str(num), uploaded_file=upload_file, ) ) @classmethod def tearDownClass(cls): super().tearDownClass() for movie in cls.movie_list: movie.delete() def setUp(self): self.resp = self.client.get(self.url_path) def test_view_url_exists_at_desired_location(self): self.assertEqual(self.resp.status_code, 200) def test_view_url_accessible_by_name(self): resp = self.client.get( reverse( 'movie-list' ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): self.assertTemplateUsed(self.resp, 'movie/movie_list.html') def test_pagenation_is_valid(self): self.assertIn('is_paginated', self.resp.context) self.assertTrue(self.resp.context['is_paginated']) self.assertEqual(len(self.resp.context['movie_list']), 10) def test_pagination_is_valid_in_second_page(self): resp = self.client.get(self.url_path + '?page=2') self.assertEqual(len(resp.context['movie_list']), 3) def test_search_movie_name(self): resp = self.client.get(self.url_path + '?q=2') self.assertEqual(len(resp.context['movie_list']), 2) def test_search_movie_name_with_multiple_words(self): resp = self.client.get(self.url_path + '?q=2+e') self.assertEqual(len(resp.context['movie_list']), 2) class MovieDetailTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/{movie_id}' cls.url_name = 'movie-detail' mail_address = 'test@example.com' test_user = User.objects.create_user( username=mail_address, password='12345', email=mail_address, first_name='Super', last_name='John', ) site_user = SiteUser.objects.create( user=test_user, bio='user bio' ) upload_file = mock.MagicMock(spec=File, name='FileMock') upload_file.name = 'file_name.mp4' cls.movie = Movie.objects.create( uploader=site_user, description='movie description', movie_name='movie title ', uploaded_file=upload_file, ) @classmethod def tearDownClass(cls): super().tearDownClass() cls.movie.delete() def setUp(self): self.resp = self.client.get( self.url_path.format( movie_id=str(self.movie.pk) ) ) def test_view_exists_at_desired_location(self): self.assertEqual(self.resp.status_code, 200) def test_view_accessible_by_name(self): resp = self.client.get( reverse( self.url_name, kwargs={'pk': self.movie.pk, } ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): self.assertTemplateUsed(self.resp, 'movie/movie_detail.html') class MovieCreateTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/create' cls.url_name = 'upload-movie' cls.login_path = '/accounts/login/' cls.movie_path = '/movie/{movie_id}' cls.mail_address1 = 'test1@example.com' cls.password1 = '12345' test_user1 = User.objects.create_user( username=cls.mail_address1, password=cls.password1, email=cls.mail_address1, first_name='Super', last_name='John', ) uploader = SiteUser.objects.create( user=test_user1, bio='user bio' ) def test_users_redirect_if_not_logged_in(self): resp = self.client.get(self.url_path) self.assertRedirects( resp, '{login}?next={source}'.format( login=self.login_path, source=self.url_path ) ) def test_view_url_exists_at_desired_location(self): login = self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.get(self.url_path) self.assertEqual(resp.status_code, 200) def test_view_url_accessible_by_name(self): login = self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.get(reverse(self.url_name)) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): login = self.client.login(username=self.mail_address1, password=self.password1) resp = self.client.get(reverse(self.url_name)) self.assertTemplateUsed(resp, 'movie/movie_form.html') def test_upload_movie(self): login = self.client.login(username=self.mail_address1, password=self.password1) upload_file = ContentFile(b'\x00\x00\x00 ftypisom\x00\x00\x02\x00') upload_file.name = 'test_movie.mp4' resp = self.client.post( self.url_path, { 'movie_name': 'Movie Title', 'description': 'desc', 'uploaded_file': upload_file, }, follow=True ) movie_object = Movie.objects.filter().order_by('-post_date')[0] self.assertRedirects( resp, self.movie_path.format( movie_id=movie_object.pk ) ) movie_object.delete() def test_upload_movie_name_is_random(self): login = self.client.login(username=self.mail_address1, password=self.password1) upload_file = ContentFile(b'\x00\x00\x00 ftypisom\x00\x00\x02\x00') upload_file.name = 'test_movie.mp4' resp = self.client.post( self.url_path, { 'movie_name': 'Movie Title', 'description': 'desc', 'uploaded_file': upload_file, }, follow=True ) movie_object = Movie.objects.filter().order_by('-post_date')[0] uploaded_file_name, uploaded_file_ext = upload_file.name.rsplit('.', 1) saved_file_name = os.path.basename(movie_object.uploaded_file.name) self.assertNotEqual(saved_file_name.find(uploaded_file_name), 0) movie_object.delete() class MovieUpdateTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/{movie_id}/edit' cls.url_name = 'movie-edit' cls.email_address = 'test@example.com' cls.password = 'test@example.com' test_user = User.objects.create_user( username=cls.email_address, password=cls.password, email=cls.email_address, first_name='Super', last_name='John', ) site_user = SiteUser.objects.create( user=test_user, bio='user bio' ) upload_file = mock.MagicMock(spec=File, name='FileMock') upload_file.name = 'file_name.mp4' cls.movie = Movie.objects.create( uploader=site_user, description='movie description', movie_name='movie title', uploaded_file=upload_file, ) @classmethod def tearDownClass(cls): super().tearDownClass() cls.movie.delete() def test_redirect_login_page_when_you_has_not_logined(self): request_path = self.url_path.format( movie_id=self.movie.pk ) resp = self.client.get(request_path) self.assertRedirects(resp, '/accounts/login/?next=' + request_path) def test_view_exists_at_desired_location(self): self.client.login(username=self.email_address, password=self.password) resp = self.client.get( self.url_path.format( movie_id=self.movie.pk ) ) self.assertEqual(resp.status_code, 200) def test_view_accessible_by_name(self): self.client.login(username=self.email_address, password=self.password) resp = self.client.get( reverse( self.url_name, kwargs={ 'pk': self.movie.pk, } ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_desired_template(self): self.client.login(username=self.email_address, password=self.password) resp = self.client.get( self.url_path.format( movie_id=self.movie.pk ) ) self.assertTemplateUsed(resp, 'movie/movie_form.html') def test_view_is_not_accessible_by_not_owner(self): user2_email_address = 'not-owner@example.com' user2_password = '67890' user2 = User.objects.create_user( username=user2_email_address, password=user2_password, email=user2_email_address, first_name='first2', last_name='last2' ) siteuser2 = SiteUser.objects.create(user=user2, bio='bio') self.client.login(username=user2_email_address, password=user2_password) resp = self.client.get( self.url_path.format( movie_id=self.movie.pk ) ) self.assertEqual(resp.status_code, 403) def test_update_movie_name(self): self.client.login(username=self.email_address, password=self.password) old_movie_name = self.movie.movie_name old_movie_description = self.movie.description resp = self.client.post( self.url_path.format( movie_id=self.movie.pk ), { 'movie_name': 'new_' + old_movie_name, 'description': old_movie_description, } ) new_movie = Movie.objects.get(pk=self.movie.pk) self.assertNotEqual(new_movie.movie_name, self.movie.movie_name) self.assertEqual(new_movie.description, self.movie.description) def test_update_movie_description(self): self.client.login(username=self.email_address, password=self.password) old_movie_name = self.movie.movie_name old_movie_description = self.movie.description resp = self.client.post( self.url_path.format( movie_id=self.movie.pk ), { 'movie_name': old_movie_name, 'description': 'new_' + old_movie_description, } ) new_movie = Movie.objects.get(pk=self.movie.pk) self.assertEqual(new_movie.movie_name, self.movie.movie_name) self.assertNotEqual(new_movie.description, self.movie.description) def test_redirect_to_movie_detail_page_when_update_suceeds(self): self.client.login(username=self.email_address, password=self.password) old_movie_name = self.movie.movie_name old_movie_description = self.movie.description resp = self.client.post( self.url_path.format( movie_id=self.movie.pk ), { 'movie_name': old_movie_name, 'description': 'new_' + old_movie_description, } ) self.assertRedirects(resp, '/movie/' + str(self.movie.pk)) class MovieDeleteTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/{movie_id}/delete' cls.url_name = 'movie-delete' cls.email_address = 'test@example.com' cls.password = 'test@example.com' test_user = User.objects.create_user( username=cls.email_address, password=cls.password, email=cls.email_address, first_name='Super', last_name='John', ) site_user = SiteUser.objects.create( user=test_user, bio='user bio' ) upload_file = mock.MagicMock(spec=File, name='FileMock') upload_file.name = 'file_name.mp4' cls.movie = Movie.objects.create( uploader=site_user, description='movie description', movie_name='movie title ', uploaded_file=upload_file, ) @classmethod def tearDownClass(cls): super().tearDownClass() cls.movie.delete() def test_redirect_login_page_when_you_has_not_logined(self): request_path = self.url_path.format( movie_id=self.movie.pk ) resp = self.client.get(request_path) self.assertRedirects(resp, '/accounts/login/?next=' + request_path) def test_view_exists_at_desired_location(self): self.client.login(username=self.email_address, password=self.password) resp = self.client.get( self.url_path.format( movie_id=self.movie.pk ) ) self.assertEqual(resp.status_code, 200) def test_view_accessible_by_name(self): self.client.login(username=self.email_address, password=self.password) resp = self.client.get( reverse( self.url_name, kwargs={ 'pk': self.movie.pk, } ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_desired_template(self): self.client.login(username=self.email_address, password=self.password) resp = self.client.get( self.url_path.format( movie_id=self.movie.pk ) ) self.assertTemplateUsed(resp, 'movie/movie_confirm_delete.html') def test_view_is_not_accessible_by_not_owner(self): user2_email_address = 'not-owner@example.com' user2_password = '67890' user2 = User.objects.create_user( username=user2_email_address, password=user2_password, email=user2_email_address, first_name='first2', last_name='last2' ) siteuser2 = SiteUser.objects.create(user=user2, bio='bio') self.client.login(username=user2_email_address, password=user2_password) resp = self.client.get( self.url_path.format( movie_id=self.movie.pk ) ) self.assertEqual(resp.status_code, 403) def test_complete_to_remove_movie(self): self.client.login(username=self.email_address, password=self.password) resp = self.client.post( self.url_path.format( movie_id=self.movie.pk ) ) self.assertRedirects(resp, reverse('user-edit-index')) self.assertFalse(Movie.objects.filter(pk=self.movie.pk).exists()) class MovieCommentCreateTest(TestCase): @classmethod def setUpTestData(cls): cls.url_path = '/movie/{movie_id}/comment' cls.url_name = 'create-comment' cls.url_redirectd = '/movie/{movie_id}' cls.login_path = '/accounts/login/' mail_address1 = 'test1@example.com' test_user1 = User.objects.create_user( username=mail_address1, password='12345', email=mail_address1, first_name='Super', last_name='John', ) uploader = SiteUser.objects.create( user=test_user1, bio='user bio' ) cls.mail_address2 = 'test2@example.com' cls.password2 = '12345' test_user2 = User.objects.create_user( username=cls.mail_address2, password=cls.password2, email=cls.mail_address2, first_name='Super', last_name='Bob', ) cls.commenter = SiteUser.objects.create( user=test_user2, bio='user bio' ) upload_file = mock.MagicMock(spec=File, name='FileMock') upload_file.name = 'file_name.mp4' cls.movie = Movie.objects.create( uploader=uploader, description='movie description', movie_name='movie title ', uploaded_file=upload_file, ) @classmethod def tearDownClass(cls): super().tearDownClass() cls.movie.delete() def test_view_url_exists_at_desired_location(self): login = self.client.login(username=self.mail_address2, password=self.password2) resp = self.client.get( self.url_path.format( movie_id=str(self.movie.pk) ) ) self.assertEqual(resp.status_code, 200) def test_view_url_accessible_by_name(self): login = self.client.login(username=self.mail_address2, password=self.password2) resp = self.client.get( reverse( self.url_name, kwargs={'pk': self.movie.pk, } ) ) self.assertEqual(resp.status_code, 200) def test_view_uses_correct_template(self): login = self.client.login(username=self.mail_address2, password=self.password2) resp = self.client.get( self.url_path.format( movie_id=str(self.movie.pk) ) ) self.assertTemplateUsed(resp, 'movie/comment_form.html') def test_view_create_comment(self): login = self.client.login(username=self.mail_address2, password=self.password2) resp = self.client.post( self.url_path.format( movie_id=str(self.movie.pk) ), {'description': 'comment desc', } ) self.assertRedirects( resp, self.url_redirectd.format( movie_id=str(self.movie.pk) ) ) def test_redirect_if_not_logined(self): resp = self.client.get(self.url_path.format(movie_id=str(self.movie.pk))) self.assertRedirects( resp, '{login}?next={redirect}'.format( login=self.login_path, redirect=self.url_path.format( movie_id=self.movie.pk ) ) ) class AcountingTest(TestCase): @classmethod def setUpTestData(cls): cls.url_base_path = '/accounts/' cls.url_top = '/movie/' cls.mail_address1 = 'test1@example.com' cls.password1 = '12345' test_user1 = User.objects.create_user( username=cls.mail_address1, password=cls.password1, email=cls.mail_address1, first_name='Super', last_name='John', ) cls.siteuser = SiteUser.objects.create( user=test_user1, bio='user bio' ) def test_login_view_url_exists_at_desired_location(self): resp = self.client.get(self.url_base_path + 'login/') self.assertEqual(resp.status_code, 200) def test_redirect_after_login_succeeded_top_page(self): resp = self.client.post( reverse('login'), { 'username': self.mail_address1, 'password': self.password1, }, follow=True ) self.assertRedirects(resp, self.url_top) def test_logout_view_url_exists_at_desired_location(self): resp = self.client.get(self.url_base_path + 'logout/') self.assertEqual(resp.status_code, 200)
37.550486
89
0.547655
5,123
50,205
5.119656
0.05407
0.051472
0.045371
0.042779
0.849893
0.814473
0.795562
0.779625
0.761095
0.738638
0
0.012968
0.359526
50,205
1,336
90
37.578593
0.802706
0
0
0.653913
0
0
0.077325
0.022258
0
0
0
0
0.098261
1
0.097391
false
0.085217
0.013043
0
0.125217
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
ea4a0247f52979d37fb41ca48363939e05c016c4
3,676
py
Python
tests/unit/test_tokenizer.py
neuro-inc/neuro-flow
5f793112a63bd27a6cc608ed6bc13e403943d213
[ "Apache-2.0" ]
13
2020-09-29T17:07:01.000Z
2021-08-02T02:54:31.000Z
tests/unit/test_tokenizer.py
neuro-inc/neuro-flow
5f793112a63bd27a6cc608ed6bc13e403943d213
[ "Apache-2.0" ]
163
2020-09-30T08:50:06.000Z
2022-03-25T01:04:43.000Z
tests/unit/test_tokenizer.py
neuromation/neuro-flow
c29d61247a4afa3b341474d226e08e976f59345c
[ "Apache-2.0" ]
1
2021-04-14T05:44:34.000Z
2021-04-14T05:44:34.000Z
from neuro_flow.tokenizer import Pos, Token, tokenize from neuro_flow.types import LocalPath def test_empty() -> None: assert [] == list(tokenize("", Pos(0, 0, LocalPath("<test>")))) def test_non_template() -> None: assert [ Token( "TEXT", "abc def jik", Pos(0, 0, LocalPath("<test>")), Pos(0, 13, LocalPath("<test>")), ) ] == list(tokenize("abc def jik", Pos(0, 0, LocalPath("<test>")))) def test_non_template_with_unknown_chars() -> None: assert [ Token( "TEXT", "abc!jik", Pos(0, 0, LocalPath("<test>")), Pos(0, 7, LocalPath("<test>")), ) ] == list(tokenize("abc!jik", Pos(0, 0, LocalPath("<test>")))) def test_template_curly() -> None: assert [ Token( "TEXT", "abc ", Pos(0, 0, LocalPath("<test>")), Pos(0, 4, LocalPath("<test>")), ), Token( "LTMPL", "${{", Pos(0, 4, LocalPath("<test>")), Pos(0, 7, LocalPath("<test>")), ), Token( "NAME", "job", Pos(0, 8, LocalPath("<test>")), Pos(0, 11, LocalPath("<test>")), ), Token( "DOT", ".", Pos(0, 11, LocalPath("<test>")), Pos(0, 12, LocalPath("<test>")) ), Token( "NAME", "job_id", Pos(0, 12, LocalPath("<test>")), Pos(0, 18, LocalPath("<test>")), ), Token( "DOT", ".", Pos(0, 18, LocalPath("<test>")), Pos(0, 19, LocalPath("<test>")) ), Token( "NAME", "name", Pos(0, 19, LocalPath("<test>")), Pos(0, 23, LocalPath("<test>")), ), Token( "RTMPL", "}}", Pos(0, 24, LocalPath("<test>")), Pos(0, 26, LocalPath("<test>")), ), Token( "TEXT", "jik", Pos(0, 26, LocalPath("<test>")), Pos(0, 29, LocalPath("<test>")), ), ] == list(tokenize("abc ${{ job.job_id.name }}jik", Pos(0, 0, LocalPath("<test>")))) def test_template_square() -> None: assert [ Token( "TEXT", "abc ", Pos(0, 0, LocalPath("<test>")), Pos(0, 4, LocalPath("<test>")), ), Token( "LTMPL2", "$[[", Pos(0, 4, LocalPath("<test>")), Pos(0, 7, LocalPath("<test>")), ), Token( "NAME", "job", Pos(0, 8, LocalPath("<test>")), Pos(0, 11, LocalPath("<test>")), ), Token( "DOT", ".", Pos(0, 11, LocalPath("<test>")), Pos(0, 12, LocalPath("<test>")) ), Token( "NAME", "job_id", Pos(0, 12, LocalPath("<test>")), Pos(0, 18, LocalPath("<test>")), ), Token( "DOT", ".", Pos(0, 18, LocalPath("<test>")), Pos(0, 19, LocalPath("<test>")) ), Token( "NAME", "name", Pos(0, 19, LocalPath("<test>")), Pos(0, 23, LocalPath("<test>")), ), Token( "RTMPL2", "]]", Pos(0, 24, LocalPath("<test>")), Pos(0, 26, LocalPath("<test>")), ), Token( "TEXT", "jik", Pos(0, 26, LocalPath("<test>")), Pos(0, 29, LocalPath("<test>")), ), ] == list(tokenize("abc $[[ job.job_id.name ]]jik", Pos(0, 0, LocalPath("<test>"))))
27.22963
88
0.389282
355
3,676
3.980282
0.132394
0.127389
0.226469
0.240623
0.88535
0.832272
0.832272
0.818117
0.772116
0.685067
0
0.053871
0.399075
3,676
134
89
27.432836
0.585785
0
0
0.798387
0
0
0.137106
0
0
0
0
0
0.040323
1
0.040323
true
0
0.016129
0
0.056452
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
10
ea704109716504889dc7cdab96e26bbaabb75034
8,140
py
Python
tests/test_sync.py
RalphWalters/thredo
ea109c693036764dd192527f9b6bba18d3b18042
[ "MIT" ]
340
2018-07-23T18:21:56.000Z
2021-12-11T05:50:58.000Z
tests/test_sync.py
RalphWalters/thredo
ea109c693036764dd192527f9b6bba18d3b18042
[ "MIT" ]
6
2018-07-31T11:52:56.000Z
2019-11-25T19:52:32.000Z
tests/test_sync.py
RalphWalters/thredo
ea109c693036764dd192527f9b6bba18d3b18042
[ "MIT" ]
25
2018-07-27T06:09:05.000Z
2022-03-13T12:53:23.000Z
import thredo def test_event_wait(): evt = thredo.Event() result = [] def waiter(): evt.wait() result.append('waiter') def main(): t = thredo.spawn(waiter) result.append('start') evt.set() t.join() thredo.run(main) assert result == ['start', 'waiter'] evt.clear() assert not evt.is_set() def test_event_wait_cancel(): evt = thredo.Event() result = [] def waiter(): try: evt.wait() result.append('waiter') except thredo.ThreadCancelled: result.append('cancel') def main(): t = thredo.spawn(waiter) result.append('start') t.cancel() thredo.run(main) assert result == ['start', 'cancel'] def test_lock(): lock = thredo.Lock() result = [] def child(): with lock: result.append('child') def main(): lock.acquire() if lock.locked(): result.append('locked') try: t = thredo.spawn(child) result.append('parent') finally: lock.release() t.join() thredo.run(main) assert result == ['locked', 'parent', 'child'] def test_lock_cancel(): lock = thredo.Lock() result = [] def child(): try: with lock: result.append('child') except thredo.ThreadCancelled: result.append('cancel') def main(): lock.acquire() try: t = thredo.spawn(child) result.append('parent') t.cancel() finally: lock.release() thredo.run(main) assert result == ['parent', 'cancel'] def test_lock_race(): lock = thredo.Lock() evt = thredo.Event() n = 0 def incr(count): nonlocal n evt.wait() while count > 0: with lock: n += 1 count -=1 def decr(count): nonlocal n evt.wait() while count > 0: with lock: n -= 1 count -= 1 def main(): t1 = thredo.spawn(incr, 10000) t2 = thredo.spawn(decr, 10000) evt.set() t1.join() t2.join() thredo.run(main) assert n == 0 def test_semaphore(): lock = thredo.Semaphore() result = [] def child(): with lock: result.append('child') def main(): lock.acquire() result.append(lock.value) try: t = thredo.spawn(child) result.append('parent') finally: lock.release() t.join() thredo.run(main) assert result == [0, 'parent', 'child'] def test_semaphore_cancel(): lock = thredo.Semaphore() result = [] def child(): try: with lock: result.append('child') except thredo.ThreadCancelled: result.append('cancel') def main(): lock.acquire() try: t = thredo.spawn(child) result.append('parent') t.cancel() finally: lock.release() thredo.run(main) assert result == ['parent', 'cancel'] def test_semaphore_race(): lock = thredo.Semaphore() evt = thredo.Event() n = 0 def incr(count): nonlocal n evt.wait() while count > 0: with lock: n += 1 count -=1 def decr(count): nonlocal n evt.wait() while count > 0: with lock: n -= 1 count -= 1 def main(): t1 = thredo.spawn(incr, 10000) t2 = thredo.spawn(decr, 10000) evt.set() t1.join() t2.join() thredo.run(main) assert n == 0 def test_rlock(): lock = thredo.RLock() result = [] def child(): with lock: result.append('child') def main(): lock.acquire() if lock.locked(): result.append('locked') try: t = thredo.spawn(child) result.append('parent') finally: lock.release() t.join() thredo.run(main) assert result == ['locked', 'parent', 'child'] def test_rlock_cancel(): lock = thredo.RLock() result = [] def child(): try: with lock: result.append('child') except thredo.ThreadCancelled: result.append('cancel') def main(): lock.acquire() try: t = thredo.spawn(child) result.append('parent') t.cancel() finally: lock.release() thredo.run(main) assert result == ['parent', 'cancel'] def test_rlock_race(): lock = thredo.RLock() evt = thredo.Event() n = 0 def incr(count): nonlocal n evt.wait() while count > 0: with lock: n += 1 count -=1 def decr(count): nonlocal n evt.wait() while count > 0: with lock: n -= 1 count -= 1 def main(): t1 = thredo.spawn(incr, 10000) t2 = thredo.spawn(decr, 10000) evt.set() t1.join() t2.join() thredo.run(main) assert n == 0 def test_condition(): lock = thredo.Condition() result = [] def child(): with lock: result.append('child') def main(): lock.acquire() if lock.locked(): result.append('locked') try: t = thredo.spawn(child) result.append('parent') finally: lock.release() t.join() thredo.run(main) assert result == ['locked', 'parent', 'child'] def test_condition_cancel(): lock = thredo.Condition() result = [] def child(): try: with lock: result.append('child') except thredo.ThreadCancelled: result.append('cancel') def main(): lock.acquire() try: t = thredo.spawn(child) result.append('parent') t.cancel() finally: lock.release() thredo.run(main) assert result == ['parent', 'cancel'] def test_condition_race(): lock = thredo.Condition() evt = thredo.Event() n = 0 def incr(count): nonlocal n evt.wait() while count > 0: with lock: n += 1 count -=1 def decr(count): nonlocal n evt.wait() while count > 0: with lock: n -= 1 count -= 1 def main(): t1 = thredo.spawn(incr, 10000) t2 = thredo.spawn(decr, 10000) evt.set() t1.join() t2.join() thredo.run(main) assert n == 0 def test_condition_wait_notify(): n = 0 lock = thredo.Condition(thredo.Lock()) result = [] def waiter(): current = n while True: with lock: while current == n: lock.wait() result.append(('consume', n)) current = n if n >= 5: break def producer(): nonlocal n while n < 5: thredo.sleep(0.1) with lock: n += 1 result.append(('produce', n)) lock.notify() def main(): t1 = thredo.spawn(waiter) t2 = thredo.spawn(producer) t1.join() t2.join() thredo.run(main) assert result == [('produce',1), ('consume',1), ('produce',2), ('consume',2), ('produce',3), ('consume',3), ('produce',4), ('consume',4), ('produce',5), ('consume',5)]
21.364829
51
0.450737
815
8,140
4.466258
0.076074
0.102198
0.053571
0.078297
0.820055
0.800824
0.736813
0.728571
0.707692
0.684615
0
0.023236
0.42371
8,140
380
52
21.421053
0.752718
0
0
0.869841
0
0
0.045946
0
0
0
0
0
0.050794
1
0.15873
false
0
0.003175
0
0.161905
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
57953c7a8f05b1c0e16f7a9000cc7c2fb8f07252
90
py
Python
firstapp/tasks/__init__.py
mudong1991/DevOpsApi
8cc880c7c628f04492427fe73a1a684eadb94e84
[ "Apache-2.0" ]
1
2017-12-22T04:12:10.000Z
2017-12-22T04:12:10.000Z
firstapp/tasks/__init__.py
mudong1991/DevOpsApi
8cc880c7c628f04492427fe73a1a684eadb94e84
[ "Apache-2.0" ]
1
2020-01-08T01:49:03.000Z
2020-01-08T01:49:03.000Z
firstapp/tasks/__init__.py
mudong1991/DevOpsApi
8cc880c7c628f04492427fe73a1a684eadb94e84
[ "Apache-2.0" ]
null
null
null
from app_install_task import * from project_deploy_task import * from get_host_ps import *
30
33
0.844444
15
90
4.666667
0.666667
0.285714
0.4
0
0
0
0
0
0
0
0
0
0.122222
90
3
34
30
0.886076
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
57dcea89c33fd62468629ce59fbd837fd923d914
98
py
Python
HelloWorld/numbericoperators.py
BrandonP321/Python-masterclass
fac81fe4f8acfa37076820405d96132f9f23b311
[ "MIT" ]
null
null
null
HelloWorld/numbericoperators.py
BrandonP321/Python-masterclass
fac81fe4f8acfa37076820405d96132f9f23b311
[ "MIT" ]
null
null
null
HelloWorld/numbericoperators.py
BrandonP321/Python-masterclass
fac81fe4f8acfa37076820405d96132f9f23b311
[ "MIT" ]
null
null
null
a = 15 b = 4 print(a + b) print(a - b) print(a * b) print(a / b) print(a // b) print(a % b)
12.25
14
0.479592
22
98
2.136364
0.227273
0.765957
0.893617
1.276596
0.893617
0.893617
0.893617
0.893617
0.893617
0.893617
0
0.044118
0.306122
98
8
15
12.25
0.647059
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.75
1
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
12
17b0c2d0e22d26da08c700f5a954721a612d55ca
130
py
Python
django_any/__init__.py
lincolnloop/django-whatever
9009ff46308f9ddf28cd5e9656f47e0067dc5ad0
[ "MIT" ]
null
null
null
django_any/__init__.py
lincolnloop/django-whatever
9009ff46308f9ddf28cd5e9656f47e0067dc5ad0
[ "MIT" ]
null
null
null
django_any/__init__.py
lincolnloop/django-whatever
9009ff46308f9ddf28cd5e9656f47e0067dc5ad0
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from django_any.forms import any_form_field, any_form from django_any.models import any_field, any_model
26
53
0.776923
22
130
4.272727
0.545455
0.212766
0.276596
0
0
0
0
0
0
0
0
0.008772
0.123077
130
4
54
32.5
0.815789
0.161538
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
17bd882e2f54a7a36c0035b55c52ca06ccdb3b06
22,690
py
Python
Scripts/sims4communitylib/debug/traits/_auto_apply_traits.py
ColonolNutty/Sims4CommunityLibrary
684f28dc3c7deb4d9fd520e21e63942b65a91d31
[ "CC-BY-4.0" ]
118
2019-08-31T04:33:18.000Z
2022-03-28T21:12:14.000Z
Scripts/sims4communitylib/debug/traits/_auto_apply_traits.py
ColonolNutty/Sims4CommunityLibrary
684f28dc3c7deb4d9fd520e21e63942b65a91d31
[ "CC-BY-4.0" ]
15
2019-12-05T01:29:46.000Z
2022-02-18T17:13:46.000Z
Scripts/sims4communitylib/debug/traits/_auto_apply_traits.py
ColonolNutty/Sims4CommunityLibrary
684f28dc3c7deb4d9fd520e21e63942b65a91d31
[ "CC-BY-4.0" ]
28
2019-09-07T04:11:05.000Z
2022-02-07T18:31:40.000Z
""" The Sims 4 Community Library is licensed under the Creative Commons Attribution 4.0 International public license (CC BY 4.0). https://creativecommons.org/licenses/by/4.0/ https://creativecommons.org/licenses/by/4.0/legalcode Copyright (c) COLONOLNUTTY """ from sims.sim_info import SimInfo from sims4communitylib.enums.traits_enum import CommonTraitId from sims4communitylib.events.event_handling.common_event_registry import CommonEventRegistry from sims4communitylib.events.sim.events.sim_spawned import S4CLSimSpawnedEvent from sims4communitylib.modinfo import ModInfo from sims4communitylib.utils.sims.common_gender_utils import CommonGenderUtils from sims4communitylib.utils.sims.common_species_utils import CommonSpeciesUtils from sims4communitylib.utils.sims.common_trait_utils import CommonTraitUtils class _S4CLAutoApplyTraits: """ Auto apply the S4CL main traits. """ def _try_apply_traits(self, sim_info: SimInfo): if CommonSpeciesUtils.is_human(sim_info): # Main Trait CommonTraitUtils.remove_trait( sim_info, CommonTraitId.S4CL_MAIN_TRAIT_LARGE_DOG, CommonTraitId.S4CL_MAIN_TRAIT_SMALL_DOG, CommonTraitId.S4CL_MAIN_TRAIT_CAT, CommonTraitId.S4CL_MAIN_TRAIT_FOX ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_MAIN_TRAIT_HUMAN): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_MAIN_TRAIT_HUMAN) elif CommonSpeciesUtils.is_large_dog(sim_info): # Main Trait CommonTraitUtils.remove_trait( sim_info, CommonTraitId.S4CL_MAIN_TRAIT_HUMAN, CommonTraitId.S4CL_MAIN_TRAIT_SMALL_DOG, CommonTraitId.S4CL_MAIN_TRAIT_CAT, CommonTraitId.S4CL_MAIN_TRAIT_FOX ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_MAIN_TRAIT_LARGE_DOG): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_MAIN_TRAIT_LARGE_DOG) # Toilet Standing/Sitting/Unknown if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_LARGE_DOG)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_LARGE_DOG)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_UNKNOWN): CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_TOILET_STANDING, CommonTraitId.GENDER_OPTIONS_TOILET_SITTING, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_FOX, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_FOX, ) if CommonGenderUtils.is_male(sim_info): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_LARGE_DOG) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_LARGE_DOG) # Can Impregnate CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_FOX, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_FOX ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_LARGE_DOG)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_LARGE_DOG): if CommonGenderUtils.is_male(sim_info): if CommonTraitUtils.has_trait(sim_info, CommonTraitId.PREGNANCY_OPTIONS_PET_CAN_REPRODUCE): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_LARGE_DOG) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_LARGE_DOG) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_LARGE_DOG) # Can Be Impregnated CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_FOX, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_FOX ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_LARGE_DOG)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_LARGE_DOG): if CommonGenderUtils.is_male(sim_info): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_LARGE_DOG) else: if CommonTraitUtils.has_trait(sim_info, CommonTraitId.PREGNANCY_OPTIONS_PET_CAN_REPRODUCE): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_LARGE_DOG) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_LARGE_DOG) elif CommonSpeciesUtils.is_small_dog(sim_info): # Main Trait CommonTraitUtils.remove_trait( sim_info, CommonTraitId.S4CL_MAIN_TRAIT_HUMAN, CommonTraitId.S4CL_MAIN_TRAIT_LARGE_DOG, CommonTraitId.S4CL_MAIN_TRAIT_CAT, CommonTraitId.S4CL_MAIN_TRAIT_FOX ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_MAIN_TRAIT_SMALL_DOG): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_MAIN_TRAIT_SMALL_DOG) # Toilet Standing/Sitting/Unknown CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_TOILET_STANDING, CommonTraitId.GENDER_OPTIONS_TOILET_SITTING, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_FOX, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_FOX, ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_SMALL_DOG)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_SMALL_DOG)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_UNKNOWN): if CommonGenderUtils.is_male(sim_info): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_SMALL_DOG) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_SMALL_DOG) # Can Impregnate CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_FOX, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_FOX ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_SMALL_DOG)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_SMALL_DOG): if CommonGenderUtils.is_male(sim_info): if CommonTraitUtils.has_trait(sim_info, CommonTraitId.PREGNANCY_OPTIONS_PET_CAN_REPRODUCE): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_SMALL_DOG) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_SMALL_DOG) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_SMALL_DOG) # Can Be Impregnated CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_FOX, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_FOX ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_SMALL_DOG)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_SMALL_DOG): if CommonGenderUtils.is_male(sim_info): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_SMALL_DOG) else: if CommonTraitUtils.has_trait(sim_info, CommonTraitId.PREGNANCY_OPTIONS_PET_CAN_REPRODUCE): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_SMALL_DOG) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_SMALL_DOG) elif CommonSpeciesUtils.is_cat(sim_info): # Main Trait CommonTraitUtils.remove_trait( sim_info, CommonTraitId.S4CL_MAIN_TRAIT_HUMAN, CommonTraitId.S4CL_MAIN_TRAIT_LARGE_DOG, CommonTraitId.S4CL_MAIN_TRAIT_SMALL_DOG, CommonTraitId.S4CL_MAIN_TRAIT_FOX ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_MAIN_TRAIT_CAT): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_MAIN_TRAIT_CAT) # Toilet Standing/Sitting/Unknown CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_TOILET_STANDING, CommonTraitId.GENDER_OPTIONS_TOILET_SITTING, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_FOX, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_FOX, ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_CAT)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_CAT)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_UNKNOWN): if CommonGenderUtils.is_male(sim_info): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_CAT) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_CAT) # Can Impregnate CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_FOX, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_FOX ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_CAT)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_CAT): if CommonGenderUtils.is_male(sim_info): if CommonTraitUtils.has_trait(sim_info, CommonTraitId.PREGNANCY_OPTIONS_PET_CAN_REPRODUCE): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_CAT) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_CAT) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_CAT) # Can Be Impregnated CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_FOX, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_FOX ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_CAT)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_CAT): if CommonGenderUtils.is_male(sim_info): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_CAT) else: if CommonTraitUtils.has_trait(sim_info, CommonTraitId.PREGNANCY_OPTIONS_PET_CAN_REPRODUCE): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_CAT) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_CAT) elif CommonSpeciesUtils.is_fox(sim_info): # Main Trait CommonTraitUtils.remove_trait( sim_info, CommonTraitId.S4CL_MAIN_TRAIT_HUMAN, CommonTraitId.S4CL_MAIN_TRAIT_LARGE_DOG, CommonTraitId.S4CL_MAIN_TRAIT_SMALL_DOG, CommonTraitId.S4CL_MAIN_TRAIT_CAT ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_MAIN_TRAIT_FOX): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_MAIN_TRAIT_FOX) # Toilet Standing/Sitting/Unknown CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_TOILET_STANDING, CommonTraitId.GENDER_OPTIONS_TOILET_SITTING, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_CAT, ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_FOX)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_FOX)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_UNKNOWN): if CommonGenderUtils.is_male(sim_info): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_STANDING_FOX) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_TOILET_SITTING_FOX) # Can Impregnate CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_CAT ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_FOX)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_FOX): if CommonGenderUtils.is_male(sim_info): if CommonTraitUtils.has_trait(sim_info, CommonTraitId.PREGNANCY_OPTIONS_PET_CAN_REPRODUCE): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_IMPREGNATE_FOX) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_FOX) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_IMPREGNATE_FOX) # Can Be Impregnated CommonTraitUtils.remove_trait( sim_info, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED, CommonTraitId.GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_LARGE_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_SMALL_DOG, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_CAT, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_CAT ) if not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_FOX)\ and not CommonTraitUtils.has_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_FOX): if CommonGenderUtils.is_male(sim_info): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_FOX) else: if CommonTraitUtils.has_trait(sim_info, CommonTraitId.PREGNANCY_OPTIONS_PET_CAN_REPRODUCE): CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_BE_IMPREGNATED_FOX) else: CommonTraitUtils.add_trait(sim_info, CommonTraitId.S4CL_GENDER_OPTIONS_PREGNANCY_CAN_NOT_BE_IMPREGNATED_FOX) @CommonEventRegistry.handle_events(ModInfo.get_identity()) def _common_auto_apply_traits_on_sim_spawned(event_data: S4CLSimSpawnedEvent): _S4CLAutoApplyTraits()._try_apply_traits(event_data.sim_info)
69.388379
143
0.720141
2,389
22,690
6.254918
0.03851
0.1843
0.203172
0.265007
0.940039
0.930938
0.930938
0.930938
0.929465
0.916282
0
0.010607
0.239665
22,690
326
144
69.601227
0.855553
0.026708
0
0.701695
0
0
0
0
0
0
0
0
0
1
0.00678
false
0
0.027119
0
0.037288
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
aa44fdbb2788510df291a56c7a14ebd75acdb9a0
197,352
py
Python
clients/python/generated/swaggyjenkins/api/blue_ocean_api.py
cliffano/jenkins-api-clients-generator
522d02b3a130a29471df5ec1d3d22c822b3d0813
[ "MIT" ]
null
null
null
clients/python/generated/swaggyjenkins/api/blue_ocean_api.py
cliffano/jenkins-api-clients-generator
522d02b3a130a29471df5ec1d3d22c822b3d0813
[ "MIT" ]
null
null
null
clients/python/generated/swaggyjenkins/api/blue_ocean_api.py
cliffano/jenkins-api-clients-generator
522d02b3a130a29471df5ec1d3d22c822b3d0813
[ "MIT" ]
null
null
null
""" Swaggy Jenkins Jenkins API clients generated from Swagger / Open API specification # noqa: E501 The version of the OpenAPI document: 1.1.2-pre.0 Contact: blah@cliffano.com Generated by: https://openapi-generator.tech """ import re # noqa: F401 import sys # noqa: F401 from swaggyjenkins.api_client import ApiClient, Endpoint as _Endpoint from swaggyjenkins.model_utils import ( # noqa: F401 check_allowed_values, check_validations, date, datetime, file_type, none_type, validate_and_convert_types ) from swaggyjenkins.model.branch_impl import BranchImpl from swaggyjenkins.model.favorite_impl import FavoriteImpl from swaggyjenkins.model.github_scm import GithubScm from swaggyjenkins.model.multibranch_pipeline import MultibranchPipeline from swaggyjenkins.model.organisation import Organisation from swaggyjenkins.model.organisations import Organisations from swaggyjenkins.model.pipeline import Pipeline from swaggyjenkins.model.pipeline_activities import PipelineActivities from swaggyjenkins.model.pipeline_folder_impl import PipelineFolderImpl from swaggyjenkins.model.pipeline_impl import PipelineImpl from swaggyjenkins.model.pipeline_queue import PipelineQueue from swaggyjenkins.model.pipeline_run import PipelineRun from swaggyjenkins.model.pipeline_run_node import PipelineRunNode from swaggyjenkins.model.pipeline_run_node_steps import PipelineRunNodeSteps from swaggyjenkins.model.pipeline_run_nodes import PipelineRunNodes from swaggyjenkins.model.pipeline_runs import PipelineRuns from swaggyjenkins.model.pipeline_step_impl import PipelineStepImpl from swaggyjenkins.model.pipelines import Pipelines from swaggyjenkins.model.queue_item_impl import QueueItemImpl from swaggyjenkins.model.scm_organisations import ScmOrganisations from swaggyjenkins.model.user import User from swaggyjenkins.model.user_favorites import UserFavorites class BlueOceanApi(object): """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client self.delete_pipeline_queue_item_endpoint = _Endpoint( settings={ 'response_type': None, 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/queue/{queue}', 'operation_id': 'delete_pipeline_queue_item', 'http_method': 'DELETE', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'queue', ], 'required': [ 'organization', 'pipeline', 'queue', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'queue': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'queue': 'queue', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'queue': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [], 'content_type': [], }, api_client=api_client ) self.get_authenticated_user_endpoint = _Endpoint( settings={ 'response_type': (User,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/user/', 'operation_id': 'get_authenticated_user', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', ], 'required': [ 'organization', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), }, 'attribute_map': { 'organization': 'organization', }, 'location_map': { 'organization': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_classes_endpoint = _Endpoint( settings={ 'response_type': (str,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/classes/{class}', 'operation_id': 'get_classes', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ '_class', ], 'required': [ '_class', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { '_class': (str,), }, 'attribute_map': { '_class': 'class', }, 'location_map': { '_class': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_json_web_key_endpoint = _Endpoint( settings={ 'response_type': (str,), 'auth': [], 'endpoint_path': '/jwt-auth/jwks/{key}', 'operation_id': 'get_json_web_key', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'key', ], 'required': [ 'key', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'key': (int,), }, 'attribute_map': { 'key': 'key', }, 'location_map': { 'key': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_json_web_token_endpoint = _Endpoint( settings={ 'response_type': (str,), 'auth': [], 'endpoint_path': '/jwt-auth/token', 'operation_id': 'get_json_web_token', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'expiry_time_in_mins', 'max_expiry_time_in_mins', ], 'required': [], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'expiry_time_in_mins': (int,), 'max_expiry_time_in_mins': (int,), }, 'attribute_map': { 'expiry_time_in_mins': 'expiryTimeInMins', 'max_expiry_time_in_mins': 'maxExpiryTimeInMins', }, 'location_map': { 'expiry_time_in_mins': 'query', 'max_expiry_time_in_mins': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_organisation_endpoint = _Endpoint( settings={ 'response_type': (Organisation,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}', 'operation_id': 'get_organisation', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', ], 'required': [ 'organization', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), }, 'attribute_map': { 'organization': 'organization', }, 'location_map': { 'organization': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_organisations_endpoint = _Endpoint( settings={ 'response_type': (Organisations,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/', 'operation_id': 'get_organisations', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ ], 'required': [], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { }, 'attribute_map': { }, 'location_map': { }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_endpoint = _Endpoint( settings={ 'response_type': (Pipeline,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}', 'operation_id': 'get_pipeline', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', ], 'required': [ 'organization', 'pipeline', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_activities_endpoint = _Endpoint( settings={ 'response_type': (PipelineActivities,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/activities', 'operation_id': 'get_pipeline_activities', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', ], 'required': [ 'organization', 'pipeline', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_branch_endpoint = _Endpoint( settings={ 'response_type': (BranchImpl,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/branches/{branch}/', 'operation_id': 'get_pipeline_branch', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'branch', ], 'required': [ 'organization', 'pipeline', 'branch', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'branch': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'branch': 'branch', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'branch': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_branch_run_endpoint = _Endpoint( settings={ 'response_type': (PipelineRun,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/branches/{branch}/runs/{run}', 'operation_id': 'get_pipeline_branch_run', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'branch', 'run', ], 'required': [ 'organization', 'pipeline', 'branch', 'run', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'branch': (str,), 'run': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'branch': 'branch', 'run': 'run', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'branch': 'path', 'run': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_branches_endpoint = _Endpoint( settings={ 'response_type': (MultibranchPipeline,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/branches', 'operation_id': 'get_pipeline_branches', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', ], 'required': [ 'organization', 'pipeline', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_folder_endpoint = _Endpoint( settings={ 'response_type': (PipelineFolderImpl,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{folder}/', 'operation_id': 'get_pipeline_folder', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'folder', ], 'required': [ 'organization', 'folder', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'folder': (str,), }, 'attribute_map': { 'organization': 'organization', 'folder': 'folder', }, 'location_map': { 'organization': 'path', 'folder': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_folder_pipeline_endpoint = _Endpoint( settings={ 'response_type': (PipelineImpl,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{folder}/pipelines/{pipeline}', 'operation_id': 'get_pipeline_folder_pipeline', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'folder', ], 'required': [ 'organization', 'pipeline', 'folder', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'folder': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'folder': 'folder', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'folder': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_queue_endpoint = _Endpoint( settings={ 'response_type': (PipelineQueue,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/queue', 'operation_id': 'get_pipeline_queue', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', ], 'required': [ 'organization', 'pipeline', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_run_endpoint = _Endpoint( settings={ 'response_type': (PipelineRun,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs/{run}', 'operation_id': 'get_pipeline_run', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'run', ], 'required': [ 'organization', 'pipeline', 'run', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'run': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'run': 'run', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'run': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_run_log_endpoint = _Endpoint( settings={ 'response_type': (str,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs/{run}/log', 'operation_id': 'get_pipeline_run_log', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'run', 'start', 'download', ], 'required': [ 'organization', 'pipeline', 'run', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'run': (str,), 'start': (int,), 'download': (bool,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'run': 'run', 'start': 'start', 'download': 'download', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'run': 'path', 'start': 'query', 'download': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_run_node_endpoint = _Endpoint( settings={ 'response_type': (PipelineRunNode,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs/{run}/nodes/{node}', 'operation_id': 'get_pipeline_run_node', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'run', 'node', ], 'required': [ 'organization', 'pipeline', 'run', 'node', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'run': (str,), 'node': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'run': 'run', 'node': 'node', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'run': 'path', 'node': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_run_node_step_endpoint = _Endpoint( settings={ 'response_type': (PipelineStepImpl,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs/{run}/nodes/{node}/steps/{step}', 'operation_id': 'get_pipeline_run_node_step', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'run', 'node', 'step', ], 'required': [ 'organization', 'pipeline', 'run', 'node', 'step', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'run': (str,), 'node': (str,), 'step': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'run': 'run', 'node': 'node', 'step': 'step', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'run': 'path', 'node': 'path', 'step': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_run_node_step_log_endpoint = _Endpoint( settings={ 'response_type': (str,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs/{run}/nodes/{node}/steps/{step}/log', 'operation_id': 'get_pipeline_run_node_step_log', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'run', 'node', 'step', ], 'required': [ 'organization', 'pipeline', 'run', 'node', 'step', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'run': (str,), 'node': (str,), 'step': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'run': 'run', 'node': 'node', 'step': 'step', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'run': 'path', 'node': 'path', 'step': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_run_node_steps_endpoint = _Endpoint( settings={ 'response_type': (PipelineRunNodeSteps,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs/{run}/nodes/{node}/steps', 'operation_id': 'get_pipeline_run_node_steps', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'run', 'node', ], 'required': [ 'organization', 'pipeline', 'run', 'node', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'run': (str,), 'node': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'run': 'run', 'node': 'node', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'run': 'path', 'node': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_run_nodes_endpoint = _Endpoint( settings={ 'response_type': (PipelineRunNodes,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs/{run}/nodes', 'operation_id': 'get_pipeline_run_nodes', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'run', ], 'required': [ 'organization', 'pipeline', 'run', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'run': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'run': 'run', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'run': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipeline_runs_endpoint = _Endpoint( settings={ 'response_type': (PipelineRuns,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs', 'operation_id': 'get_pipeline_runs', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', ], 'required': [ 'organization', 'pipeline', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_pipelines_endpoint = _Endpoint( settings={ 'response_type': (Pipelines,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/', 'operation_id': 'get_pipelines', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', ], 'required': [ 'organization', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), }, 'attribute_map': { 'organization': 'organization', }, 'location_map': { 'organization': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_scm_endpoint = _Endpoint( settings={ 'response_type': (GithubScm,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/scm/{scm}', 'operation_id': 'get_scm', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'scm', ], 'required': [ 'organization', 'scm', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'scm': (str,), }, 'attribute_map': { 'organization': 'organization', 'scm': 'scm', }, 'location_map': { 'organization': 'path', 'scm': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_scm_organisation_repositories_endpoint = _Endpoint( settings={ 'response_type': (ScmOrganisations,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/scm/{scm}/organizations/{scmOrganisation}/repositories', 'operation_id': 'get_scm_organisation_repositories', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'scm', 'scm_organisation', 'credential_id', 'page_size', 'page_number', ], 'required': [ 'organization', 'scm', 'scm_organisation', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'scm': (str,), 'scm_organisation': (str,), 'credential_id': (str,), 'page_size': (int,), 'page_number': (int,), }, 'attribute_map': { 'organization': 'organization', 'scm': 'scm', 'scm_organisation': 'scmOrganisation', 'credential_id': 'credentialId', 'page_size': 'pageSize', 'page_number': 'pageNumber', }, 'location_map': { 'organization': 'path', 'scm': 'path', 'scm_organisation': 'path', 'credential_id': 'query', 'page_size': 'query', 'page_number': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_scm_organisation_repository_endpoint = _Endpoint( settings={ 'response_type': (ScmOrganisations,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/scm/{scm}/organizations/{scmOrganisation}/repositories/{repository}', 'operation_id': 'get_scm_organisation_repository', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'scm', 'scm_organisation', 'repository', 'credential_id', ], 'required': [ 'organization', 'scm', 'scm_organisation', 'repository', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'scm': (str,), 'scm_organisation': (str,), 'repository': (str,), 'credential_id': (str,), }, 'attribute_map': { 'organization': 'organization', 'scm': 'scm', 'scm_organisation': 'scmOrganisation', 'repository': 'repository', 'credential_id': 'credentialId', }, 'location_map': { 'organization': 'path', 'scm': 'path', 'scm_organisation': 'path', 'repository': 'path', 'credential_id': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_scm_organisations_endpoint = _Endpoint( settings={ 'response_type': (ScmOrganisations,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/scm/{scm}/organizations', 'operation_id': 'get_scm_organisations', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'scm', 'credential_id', ], 'required': [ 'organization', 'scm', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'scm': (str,), 'credential_id': (str,), }, 'attribute_map': { 'organization': 'organization', 'scm': 'scm', 'credential_id': 'credentialId', }, 'location_map': { 'organization': 'path', 'scm': 'path', 'credential_id': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_user_endpoint = _Endpoint( settings={ 'response_type': (User,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/users/{user}', 'operation_id': 'get_user', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', 'user', ], 'required': [ 'organization', 'user', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'user': (str,), }, 'attribute_map': { 'organization': 'organization', 'user': 'user', }, 'location_map': { 'organization': 'path', 'user': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_user_favorites_endpoint = _Endpoint( settings={ 'response_type': (UserFavorites,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/users/{user}/favorites', 'operation_id': 'get_user_favorites', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'user', ], 'required': [ 'user', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'user': (str,), }, 'attribute_map': { 'user': 'user', }, 'location_map': { 'user': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.get_users_endpoint = _Endpoint( settings={ 'response_type': (User,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/users/', 'operation_id': 'get_users', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'organization', ], 'required': [ 'organization', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), }, 'attribute_map': { 'organization': 'organization', }, 'location_map': { 'organization': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.post_pipeline_run_endpoint = _Endpoint( settings={ 'response_type': (QueueItemImpl,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs/{run}/replay', 'operation_id': 'post_pipeline_run', 'http_method': 'POST', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'run', ], 'required': [ 'organization', 'pipeline', 'run', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'run': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'run': 'run', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'run': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.post_pipeline_runs_endpoint = _Endpoint( settings={ 'response_type': (QueueItemImpl,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs', 'operation_id': 'post_pipeline_runs', 'http_method': 'POST', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', ], 'required': [ 'organization', 'pipeline', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.put_pipeline_favorite_endpoint = _Endpoint( settings={ 'response_type': (FavoriteImpl,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/favorite', 'operation_id': 'put_pipeline_favorite', 'http_method': 'PUT', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'body', ], 'required': [ 'organization', 'pipeline', 'body', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'body': (bool,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'body': 'body', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [ 'application/json' ] }, api_client=api_client ) self.put_pipeline_run_endpoint = _Endpoint( settings={ 'response_type': (PipelineRun,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/organizations/{organization}/pipelines/{pipeline}/runs/{run}/stop', 'operation_id': 'put_pipeline_run', 'http_method': 'PUT', 'servers': None, }, params_map={ 'all': [ 'organization', 'pipeline', 'run', 'blocking', 'time_out_in_secs', ], 'required': [ 'organization', 'pipeline', 'run', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'organization': (str,), 'pipeline': (str,), 'run': (str,), 'blocking': (str,), 'time_out_in_secs': (int,), }, 'attribute_map': { 'organization': 'organization', 'pipeline': 'pipeline', 'run': 'run', 'blocking': 'blocking', 'time_out_in_secs': 'timeOutInSecs', }, 'location_map': { 'organization': 'path', 'pipeline': 'path', 'run': 'path', 'blocking': 'query', 'time_out_in_secs': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.search_endpoint = _Endpoint( settings={ 'response_type': (str,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/search/', 'operation_id': 'search', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'q', ], 'required': [ 'q', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'q': (str,), }, 'attribute_map': { 'q': 'q', }, 'location_map': { 'q': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.search_classes_endpoint = _Endpoint( settings={ 'response_type': (str,), 'auth': [ 'jenkins_auth' ], 'endpoint_path': '/blue/rest/classes/', 'operation_id': 'search_classes', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'q', ], 'required': [ 'q', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'q': (str,), }, 'attribute_map': { 'q': 'q', }, 'location_map': { 'q': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) def delete_pipeline_queue_item( self, organization, pipeline, queue, **kwargs ): """delete_pipeline_queue_item # noqa: E501 Delete queue item from an organization pipeline queue # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_pipeline_queue_item(organization, pipeline, queue, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline queue (str): Name of the queue item Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: None If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['queue'] = \ queue return self.delete_pipeline_queue_item_endpoint.call_with_http_info(**kwargs) def get_authenticated_user( self, organization, **kwargs ): """get_authenticated_user # noqa: E501 Retrieve authenticated user details for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_authenticated_user(organization, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: User If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization return self.get_authenticated_user_endpoint.call_with_http_info(**kwargs) def get_classes( self, _class, **kwargs ): """get_classes # noqa: E501 Get a list of class names supported by a given class # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_classes(_class, async_req=True) >>> result = thread.get() Args: _class (str): Name of the class Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: str If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['_class'] = \ _class return self.get_classes_endpoint.call_with_http_info(**kwargs) def get_json_web_key( self, key, **kwargs ): """get_json_web_key # noqa: E501 Retrieve JSON Web Key # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_json_web_key(key, async_req=True) >>> result = thread.get() Args: key (int): Key ID received as part of JWT header field kid Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: str If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['key'] = \ key return self.get_json_web_key_endpoint.call_with_http_info(**kwargs) def get_json_web_token( self, **kwargs ): """get_json_web_token # noqa: E501 Retrieve JSON Web Token # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_json_web_token(async_req=True) >>> result = thread.get() Keyword Args: expiry_time_in_mins (int): Token expiry time in minutes, default: 30 minutes. [optional] max_expiry_time_in_mins (int): Maximum token expiry time in minutes, default: 480 minutes. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: str If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') return self.get_json_web_token_endpoint.call_with_http_info(**kwargs) def get_organisation( self, organization, **kwargs ): """get_organisation # noqa: E501 Retrieve organization details # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_organisation(organization, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: Organisation If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization return self.get_organisation_endpoint.call_with_http_info(**kwargs) def get_organisations( self, **kwargs ): """get_organisations # noqa: E501 Retrieve all organizations details # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_organisations(async_req=True) >>> result = thread.get() Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: Organisations If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') return self.get_organisations_endpoint.call_with_http_info(**kwargs) def get_pipeline( self, organization, pipeline, **kwargs ): """get_pipeline # noqa: E501 Retrieve pipeline details for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline(organization, pipeline, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: Pipeline If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline return self.get_pipeline_endpoint.call_with_http_info(**kwargs) def get_pipeline_activities( self, organization, pipeline, **kwargs ): """get_pipeline_activities # noqa: E501 Retrieve all activities details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_activities(organization, pipeline, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineActivities If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline return self.get_pipeline_activities_endpoint.call_with_http_info(**kwargs) def get_pipeline_branch( self, organization, pipeline, branch, **kwargs ): """get_pipeline_branch # noqa: E501 Retrieve branch details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_branch(organization, pipeline, branch, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline branch (str): Name of the branch Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: BranchImpl If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['branch'] = \ branch return self.get_pipeline_branch_endpoint.call_with_http_info(**kwargs) def get_pipeline_branch_run( self, organization, pipeline, branch, run, **kwargs ): """get_pipeline_branch_run # noqa: E501 Retrieve branch run details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_branch_run(organization, pipeline, branch, run, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline branch (str): Name of the branch run (str): Name of the run Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineRun If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['branch'] = \ branch kwargs['run'] = \ run return self.get_pipeline_branch_run_endpoint.call_with_http_info(**kwargs) def get_pipeline_branches( self, organization, pipeline, **kwargs ): """get_pipeline_branches # noqa: E501 Retrieve all branches details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_branches(organization, pipeline, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: MultibranchPipeline If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline return self.get_pipeline_branches_endpoint.call_with_http_info(**kwargs) def get_pipeline_folder( self, organization, folder, **kwargs ): """get_pipeline_folder # noqa: E501 Retrieve pipeline folder for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_folder(organization, folder, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization folder (str): Name of the folder Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineFolderImpl If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['folder'] = \ folder return self.get_pipeline_folder_endpoint.call_with_http_info(**kwargs) def get_pipeline_folder_pipeline( self, organization, pipeline, folder, **kwargs ): """get_pipeline_folder_pipeline # noqa: E501 Retrieve pipeline details for an organization folder # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_folder_pipeline(organization, pipeline, folder, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline folder (str): Name of the folder Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineImpl If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['folder'] = \ folder return self.get_pipeline_folder_pipeline_endpoint.call_with_http_info(**kwargs) def get_pipeline_queue( self, organization, pipeline, **kwargs ): """get_pipeline_queue # noqa: E501 Retrieve queue details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_queue(organization, pipeline, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineQueue If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline return self.get_pipeline_queue_endpoint.call_with_http_info(**kwargs) def get_pipeline_run( self, organization, pipeline, run, **kwargs ): """get_pipeline_run # noqa: E501 Retrieve run details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_run(organization, pipeline, run, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline run (str): Name of the run Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineRun If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['run'] = \ run return self.get_pipeline_run_endpoint.call_with_http_info(**kwargs) def get_pipeline_run_log( self, organization, pipeline, run, **kwargs ): """get_pipeline_run_log # noqa: E501 Get log for a pipeline run # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_run_log(organization, pipeline, run, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline run (str): Name of the run Keyword Args: start (int): Start position of the log. [optional] download (bool): Set to true in order to download the file, otherwise it's passed as a response body. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: str If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['run'] = \ run return self.get_pipeline_run_log_endpoint.call_with_http_info(**kwargs) def get_pipeline_run_node( self, organization, pipeline, run, node, **kwargs ): """get_pipeline_run_node # noqa: E501 Retrieve run node details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_run_node(organization, pipeline, run, node, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline run (str): Name of the run node (str): Name of the node Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineRunNode If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['run'] = \ run kwargs['node'] = \ node return self.get_pipeline_run_node_endpoint.call_with_http_info(**kwargs) def get_pipeline_run_node_step( self, organization, pipeline, run, node, step, **kwargs ): """get_pipeline_run_node_step # noqa: E501 Retrieve run node details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_run_node_step(organization, pipeline, run, node, step, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline run (str): Name of the run node (str): Name of the node step (str): Name of the step Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineStepImpl If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['run'] = \ run kwargs['node'] = \ node kwargs['step'] = \ step return self.get_pipeline_run_node_step_endpoint.call_with_http_info(**kwargs) def get_pipeline_run_node_step_log( self, organization, pipeline, run, node, step, **kwargs ): """get_pipeline_run_node_step_log # noqa: E501 Get log for a pipeline run node step # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_run_node_step_log(organization, pipeline, run, node, step, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline run (str): Name of the run node (str): Name of the node step (str): Name of the step Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: str If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['run'] = \ run kwargs['node'] = \ node kwargs['step'] = \ step return self.get_pipeline_run_node_step_log_endpoint.call_with_http_info(**kwargs) def get_pipeline_run_node_steps( self, organization, pipeline, run, node, **kwargs ): """get_pipeline_run_node_steps # noqa: E501 Retrieve run node steps details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_run_node_steps(organization, pipeline, run, node, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline run (str): Name of the run node (str): Name of the node Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineRunNodeSteps If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['run'] = \ run kwargs['node'] = \ node return self.get_pipeline_run_node_steps_endpoint.call_with_http_info(**kwargs) def get_pipeline_run_nodes( self, organization, pipeline, run, **kwargs ): """get_pipeline_run_nodes # noqa: E501 Retrieve run nodes details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_run_nodes(organization, pipeline, run, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline run (str): Name of the run Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineRunNodes If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['run'] = \ run return self.get_pipeline_run_nodes_endpoint.call_with_http_info(**kwargs) def get_pipeline_runs( self, organization, pipeline, **kwargs ): """get_pipeline_runs # noqa: E501 Retrieve all runs details for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipeline_runs(organization, pipeline, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineRuns If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline return self.get_pipeline_runs_endpoint.call_with_http_info(**kwargs) def get_pipelines( self, organization, **kwargs ): """get_pipelines # noqa: E501 Retrieve all pipelines details for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pipelines(organization, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: Pipelines If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization return self.get_pipelines_endpoint.call_with_http_info(**kwargs) def get_scm( self, organization, scm, **kwargs ): """get_scm # noqa: E501 Retrieve SCM details for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_scm(organization, scm, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization scm (str): Name of SCM Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: GithubScm If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['scm'] = \ scm return self.get_scm_endpoint.call_with_http_info(**kwargs) def get_scm_organisation_repositories( self, organization, scm, scm_organisation, **kwargs ): """get_scm_organisation_repositories # noqa: E501 Retrieve SCM organization repositories details for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_scm_organisation_repositories(organization, scm, scm_organisation, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization scm (str): Name of SCM scm_organisation (str): Name of the SCM organization Keyword Args: credential_id (str): Credential ID. [optional] page_size (int): Number of items in a page. [optional] page_number (int): Page number. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: ScmOrganisations If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['scm'] = \ scm kwargs['scm_organisation'] = \ scm_organisation return self.get_scm_organisation_repositories_endpoint.call_with_http_info(**kwargs) def get_scm_organisation_repository( self, organization, scm, scm_organisation, repository, **kwargs ): """get_scm_organisation_repository # noqa: E501 Retrieve SCM organization repository details for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_scm_organisation_repository(organization, scm, scm_organisation, repository, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization scm (str): Name of SCM scm_organisation (str): Name of the SCM organization repository (str): Name of the SCM repository Keyword Args: credential_id (str): Credential ID. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: ScmOrganisations If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['scm'] = \ scm kwargs['scm_organisation'] = \ scm_organisation kwargs['repository'] = \ repository return self.get_scm_organisation_repository_endpoint.call_with_http_info(**kwargs) def get_scm_organisations( self, organization, scm, **kwargs ): """get_scm_organisations # noqa: E501 Retrieve SCM organizations details for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_scm_organisations(organization, scm, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization scm (str): Name of SCM Keyword Args: credential_id (str): Credential ID. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: ScmOrganisations If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['scm'] = \ scm return self.get_scm_organisations_endpoint.call_with_http_info(**kwargs) def get_user( self, organization, user, **kwargs ): """get_user # noqa: E501 Retrieve user details for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_user(organization, user, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization user (str): Name of the user Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: User If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['user'] = \ user return self.get_user_endpoint.call_with_http_info(**kwargs) def get_user_favorites( self, user, **kwargs ): """get_user_favorites # noqa: E501 Retrieve user favorites details for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_user_favorites(user, async_req=True) >>> result = thread.get() Args: user (str): Name of the user Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: UserFavorites If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['user'] = \ user return self.get_user_favorites_endpoint.call_with_http_info(**kwargs) def get_users( self, organization, **kwargs ): """get_users # noqa: E501 Retrieve users details for an organization # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_users(organization, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: User If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization return self.get_users_endpoint.call_with_http_info(**kwargs) def post_pipeline_run( self, organization, pipeline, run, **kwargs ): """post_pipeline_run # noqa: E501 Replay an organization pipeline run # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.post_pipeline_run(organization, pipeline, run, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline run (str): Name of the run Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: QueueItemImpl If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['run'] = \ run return self.post_pipeline_run_endpoint.call_with_http_info(**kwargs) def post_pipeline_runs( self, organization, pipeline, **kwargs ): """post_pipeline_runs # noqa: E501 Start a build for an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.post_pipeline_runs(organization, pipeline, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: QueueItemImpl If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline return self.post_pipeline_runs_endpoint.call_with_http_info(**kwargs) def put_pipeline_favorite( self, organization, pipeline, body, **kwargs ): """put_pipeline_favorite # noqa: E501 Favorite/unfavorite a pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.put_pipeline_favorite(organization, pipeline, body, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline body (bool): Set JSON string body to {\"favorite\": true} to favorite, set value to false to unfavorite Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: FavoriteImpl If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['body'] = \ body return self.put_pipeline_favorite_endpoint.call_with_http_info(**kwargs) def put_pipeline_run( self, organization, pipeline, run, **kwargs ): """put_pipeline_run # noqa: E501 Stop a build of an organization pipeline # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.put_pipeline_run(organization, pipeline, run, async_req=True) >>> result = thread.get() Args: organization (str): Name of the organization pipeline (str): Name of the pipeline run (str): Name of the run Keyword Args: blocking (str): Set to true to make blocking stop, default: false. [optional] time_out_in_secs (int): Timeout in seconds, default: 10 seconds. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PipelineRun If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['organization'] = \ organization kwargs['pipeline'] = \ pipeline kwargs['run'] = \ run return self.put_pipeline_run_endpoint.call_with_http_info(**kwargs) def search( self, q, **kwargs ): """search # noqa: E501 Search for any resource details # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search(q, async_req=True) >>> result = thread.get() Args: q (str): Query string Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: str If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['q'] = \ q return self.search_endpoint.call_with_http_info(**kwargs) def search_classes( self, q, **kwargs ): """search_classes # noqa: E501 Get classes details # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_classes(q, async_req=True) >>> result = thread.get() Args: q (str): Query string containing an array of class names Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: str If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_spec_property_naming'] = kwargs.get( '_spec_property_naming', False ) kwargs['_content_type'] = kwargs.get( '_content_type') kwargs['_host_index'] = kwargs.get('_host_index') kwargs['q'] = \ q return self.search_classes_endpoint.call_with_http_info(**kwargs)
36.743996
143
0.490844
17,808
197,352
5.215353
0.016959
0.035176
0.020716
0.021513
0.931629
0.913292
0.900415
0.892231
0.880818
0.86597
0
0.002484
0.424637
197,352
5,370
144
36.750838
0.815445
0.370404
0
0.718384
1
0.001671
0.247734
0.059377
0
0
0
0
0
1
0.010585
false
0
0.007242
0
0.028412
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
aa47a157f997786204c42ab20a02853c963f2711
1,454
py
Python
ex008.py
vinibispo/projecteuler
a8e67e7cfda663958bd59884c3d9c72a833cf81e
[ "MIT" ]
null
null
null
ex008.py
vinibispo/projecteuler
a8e67e7cfda663958bd59884c3d9c72a833cf81e
[ "MIT" ]
null
null
null
ex008.py
vinibispo/projecteuler
a8e67e7cfda663958bd59884c3d9c72a833cf81e
[ "MIT" ]
null
null
null
numbers='7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450' def getBiggestProductFromHorizontalLines(numbers, digitNum): largest = 0 import functools for i in range(0, len(numbers) - digitNum + 1): x = numbers[i:i+digitNum] x = list(x) x = map(int, x) multiply = functools.reduce(lambda j,y: j * y, x) if multiply > largest: largest = multiply return largest biggest = getBiggestProductFromHorizontalLines(numbers, 13) print(biggest)
90.875
1,010
0.887208
53
1,454
24.339623
0.54717
0.066667
0
0
0
0
0
0
0
0
0
0.752809
0.081843
1,454
15
1,011
96.933333
0.213483
0
0
0
0
0
0.687758
0.687758
0
1
0
0
0
1
0.071429
false
0
0.071429
0
0.214286
0.071429
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
1
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
7
a4aeb071e21dc4517715fa6007aad8819c42a9bd
208,395
py
Python
tests/wasp1/AllAnswerSets/3_COL_l103_COL_l10.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
19
2015-12-03T08:53:45.000Z
2022-03-31T02:09:43.000Z
tests/wasp1/AllAnswerSets/3_COL_l103_COL_l10.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
80
2017-11-25T07:57:32.000Z
2018-06-10T19:03:30.000Z
tests/wasp1/AllAnswerSets/3_COL_l103_COL_l10.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
6
2015-01-15T07:51:48.000Z
2020-06-18T14:47:48.000Z
input = """ node(X) :- edge(X,Y). node(Y) :- edge(X,Y). colored(X,r) v colored(X,g) v colored(X,b) :- node(X). :- edge(X,Y), colored(X,C), colored(Y,C). edge( n2, n4). edge( n2, n3). edge( n3, n5). edge( n4, n6). edge( n4, n5). edge( n5, n7). edge( n6, n8). edge( n6, n7). edge( n7, n9). edge( n8,n10). edge( n8, n9). edge( n9,n11). edge(n10,n11). """ output = """ {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,b), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,r), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,r), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,g), colored(n11,r), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,r), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,g), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,b), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,g), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,b), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,b), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,g), colored(n3,r), colored(n4,r), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,g), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,b), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,g), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,b), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,b), colored(n7,r), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,b), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,b), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,b), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,b), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,b), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} {colored(n10,r), colored(n11,g), colored(n2,r), colored(n3,g), colored(n4,g), colored(n5,r), colored(n6,r), colored(n7,g), colored(n8,g), colored(n9,r), edge(n10,n11), edge(n2,n3), edge(n2,n4), edge(n3,n5), edge(n4,n5), edge(n4,n6), edge(n5,n7), edge(n6,n7), edge(n6,n8), edge(n7,n9), edge(n8,n10), edge(n8,n9), edge(n9,n11), node(n10), node(n11), node(n2), node(n3), node(n4), node(n5), node(n6), node(n7), node(n8), node(n9)} """
414.304175
427
0.635289
43,327
208,395
3.055624
0.000508
0.088103
0.058796
0.044142
0.999388
0.998754
0.998754
0.998754
0.998754
0.998754
0
0.136645
0.077334
208,395
502
428
415.129482
0.551891
0
0
0.004008
0
0.975952
0.999846
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
a4c7d1133cceb5ef55e5cd70a219d988e6f8dae8
121
py
Python
conjure-python-verifier/python/test/client/test_import_all.py
gabeboning/conjure-python
f5747b62197703bc898432435407eca419983f17
[ "Apache-2.0" ]
null
null
null
conjure-python-verifier/python/test/client/test_import_all.py
gabeboning/conjure-python
f5747b62197703bc898432435407eca419983f17
[ "Apache-2.0" ]
null
null
null
conjure-python-verifier/python/test/client/test_import_all.py
gabeboning/conjure-python
f5747b62197703bc898432435407eca419983f17
[ "Apache-2.0" ]
null
null
null
from ..generated_integration import * def test(): pass # we just need to confirm that the above import didn't fail
24.2
69
0.727273
19
121
4.578947
0.947368
0
0
0
0
0
0
0
0
0
0
0
0.206612
121
4
70
30.25
0.90625
0.471074
0
0
1
0
0
0
0
0
0
0
0
1
0.333333
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
1
1
1
0
0
0
0
7
354b6a4742e341bac2940ac0730882f3357f09d6
88
py
Python
ws/src/san_francisco/src/san_francisco/nodes.py
DroneEmployee/ETHSanFranciscoDemo
924ee9a99d64ae9762b359afca849340a537b6f9
[ "BSD-3-Clause" ]
null
null
null
ws/src/san_francisco/src/san_francisco/nodes.py
DroneEmployee/ETHSanFranciscoDemo
924ee9a99d64ae9762b359afca849340a537b6f9
[ "BSD-3-Clause" ]
null
null
null
ws/src/san_francisco/src/san_francisco/nodes.py
DroneEmployee/ETHSanFranciscoDemo
924ee9a99d64ae9762b359afca849340a537b6f9
[ "BSD-3-Clause" ]
null
null
null
from . import san_francisco def worker_node(): san_francisco.SanFrancisco().spin()
17.6
39
0.75
11
88
5.727273
0.818182
0.380952
0
0
0
0
0
0
0
0
0
0
0.136364
88
4
40
22
0.828947
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
7
10226fd899cba2734e1adeb02ac0328886406f62
31,975
py
Python
django_google_adwords/tests/models.py
MOHAMEDELADIB/django-google-adwords
32aab8ce9c1d0538388a346e8fae743167538269
[ "MIT" ]
27
2015-08-17T19:54:49.000Z
2021-11-28T19:55:33.000Z
django_google_adwords/tests/models.py
MOHAMEDELADIB/django-google-adwords
32aab8ce9c1d0538388a346e8fae743167538269
[ "MIT" ]
3
2015-06-25T05:01:04.000Z
2021-11-13T00:50:29.000Z
django_google_adwords/tests/models.py
MOHAMEDELADIB/django-google-adwords
32aab8ce9c1d0538388a346e8fae743167538269
[ "MIT" ]
12
2015-08-16T16:11:18.000Z
2021-11-04T23:52:37.000Z
from __future__ import absolute_import from datetime import date, datetime from decimal import Decimal import os from django_google_adwords.models import ReportFile, Account, Campaign, AdGroup, \ DailyAccountMetrics, DailyCampaignMetrics, DailyAdGroupMetrics, Ad, \ DailyAdMetrics from django.test.testcases import TestCase, TransactionTestCase def _get_test_media_file_path(name): return os.path.join(os.path.dirname(os.path.realpath(__file__)), 'media', name) def _get_report_file(name): report_file = ReportFile.objects.create() #: :type report_file: ReportFile report_file.save_path(_get_test_media_file_path(name)) return report_file class DjangoGoogleAdwordsTestCase(TransactionTestCase): fixtures = [ 'django_google_adwords.yaml' ] def test_account_update_from_initial(self): report_file = _get_report_file('account_report.gz') # Check that initial values are Null account = Account.objects.get(pk=1) self.assertEqual(account.account, None) self.assertEqual(account.currency, None) self.assertEqual(account.account_last_synced, None) # Run the Account report populate account.start_sync() account.sync_account(report_file=report_file) account.finish_account_sync() account.finish_sync() # Check that the Account fields have been updated account = Account.objects.get(pk=1) self.assertEqual(account.account, 'example.com.au') self.assertEqual(account.currency, 'AUD') def test_campaign_create(self): report_file = _get_report_file('campaign_report.gz') # Run the Campaign report populate account = Account.objects.get(pk=1) account.start_sync() account.sync_campaign(report_file=report_file) account.finish_campaign_sync() account.finish_sync() # Check that the Campaigns have been populated account = Account.objects.get(pk=1) campaigns = account.campaigns.all() self.assertEqual(campaigns.count(), 5) # Check campaign attributes populated correctly c1 = Campaign.objects.get(campaign_id=7679201) self.assertEqual(c1.account, account) self.assertEqual(c1.campaign, 'Campaign #1') self.assertEqual(c1.campaign_state, Campaign.STATE_PAUSED) self.assertEqual(c1.budget.amount, Decimal('10.0')) c2 = Campaign.objects.get(campaign_id=7679441) self.assertEqual(c2.account, account) self.assertEqual(c2.campaign, 'Campaign #2') self.assertEqual(c2.campaign_state, Campaign.STATE_PAUSED) self.assertEqual(c2.budget.amount, Decimal('3.0')) c3 = Campaign.objects.get(campaign_id=7679621) self.assertEqual(c3.account, account) self.assertEqual(c3.campaign, 'Campaign #3') self.assertEqual(c3.campaign_state, Campaign.STATE_PAUSED) self.assertEqual(c3.budget.amount, Decimal('4.0')) c4 = Campaign.objects.get(campaign_id=7756901) self.assertEqual(c4.account, account) self.assertEqual(c4.campaign, 'Campaign #4') self.assertEqual(c4.campaign_state, Campaign.STATE_REMOVED) self.assertEqual(c4.budget.amount, Decimal('15.0')) c5 = Campaign.objects.get(campaign_id=7783061) self.assertEqual(c5.account, account) self.assertEqual(c5.campaign, 'Campaign #5') self.assertEqual(c5.campaign_state, Campaign.STATE_PAUSED) self.assertEqual(c5.budget.amount, Decimal('8.0')) def test_campaign_update(self): # Run the Campaign report populate report_file = _get_report_file('campaign_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_campaign(report_file=report_file) account.finish_campaign_sync() account.finish_sync() # Check campaign attributes populated correctly c = Campaign.objects.get(campaign_id=7679201) self.assertEqual(c.campaign, 'Campaign #1') self.assertEqual(c.campaign_state, Campaign.STATE_PAUSED) self.assertEqual(c.budget.amount, Decimal('10.0')) # Run the updated Campaign report populate report_file = _get_report_file('campaign_report_update.gz') account.start_sync() account.sync_campaign(report_file=report_file) account.finish_account_sync() account.finish_sync() # Check campaign attributes updated correctly uc = Campaign.objects.get(campaign_id=7679201) self.assertEqual(uc.campaign, 'Campaign #1') self.assertEqual(uc.campaign_state, Campaign.STATE_ENABLED) self.assertEqual(uc.budget.amount, Decimal('100.0')) def test_ad_group_create(self): report_file = _get_report_file('adgroup_report.gz') # Run the Ad Group report populate account = Account.objects.get(pk=1) account.start_sync() account.sync_ad_group(report_file=report_file) account.finish_ad_group_sync() account.finish_sync() # Check that the Ad Groups have been populated account = Account.objects.get(pk=1) ad_groups = AdGroup.objects.filter(campaign__account=account) self.assertEqual(ad_groups.count(), 5) # Check Ad Group attributes populated correctly ad_group = AdGroup.objects.get(ad_group_id=323809001) self.assertEqual(ad_group.campaign.campaign_id, 7679201) self.assertEqual(ad_group.ad_group, 'AdGroup #1') self.assertEqual(ad_group.ad_group_state, AdGroup.STATE_PAUSED) def test_ad_group_update(self): # Run the Ad Group report populate report_file = _get_report_file('adgroup_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad_group(report_file=report_file) account.finish_ad_group_sync() account.finish_sync() # Run the Ad Group report populate to update rows report_file = _get_report_file('adgroup_report_update.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad_group(report_file=report_file) account.finish_ad_group_sync() account.finish_sync() # Check Ad Group attributes updated correctly ad_group = AdGroup.objects.get(ad_group_id=323809001) self.assertEqual(ad_group.campaign.campaign_id, 7679201) self.assertEqual(ad_group.ad_group, 'AdGroup #1') self.assertEqual(ad_group.ad_group_state, AdGroup.STATE_ENABLED) def test_ad_create(self): # Run the Ad report populate report_file = _get_report_file('ad_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad(report_file=report_file) account.finish_ad_sync() account.finish_sync() # Check that the Ads have been populated account = Account.objects.get(pk=1) ads = Ad.objects.filter(ad_group__campaign__account=account) self.assertEqual(ads.count(), 44) # Check Ad attributes populated correctly ad = Ad.objects.get(ad_id=40564055441) self.assertEqual(ad.ad_state, Ad.STATE_ENABLED) self.assertEqual(ad.ad_type, Ad.TYPE_TEXT_AD) self.assertEqual(ad.destination_url, 'http://example.net.au/Home.php') self.assertEqual(ad.display_url, 'example.net.au') self.assertEqual(ad.ad, 'Doncaster Real Estate') self.assertEqual(ad.description_line_1, 'Trusted by Victorian Home Owners.') self.assertEqual(ad.description_line_2, '90 Years Exp Request Free Appraisal') def test_ad_update(self): # Run the Ad report populate report_file = _get_report_file('ad_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad(report_file=report_file) account.finish_ad_sync() account.finish_sync() # Run the Ad report populate update report_file = _get_report_file('ad_report_update.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad(report_file=report_file) account.finish_ad_sync() account.finish_sync() # Check that the Ads have been updated and there are still only 9 account = Account.objects.get(pk=1) ads = Ad.objects.filter(ad_group__campaign__account=account) self.assertEqual(ads.count(), 44) # Check Ad attributes updated correctly ad = Ad.objects.get(ad_id=40564055441) self.assertEqual(ad.ad_state, Ad.STATE_ENABLED) self.assertEqual(ad.ad_type, Ad.TYPE_TEXT_AD) self.assertEqual(ad.destination_url, 'http://example.net.au/Home2.php') self.assertEqual(ad.display_url, 'example.net.au/test') self.assertEqual(ad.ad, 'Doncaster Real Estate') self.assertEqual(ad.description_line_1, 'Trusted by Victorian Home Owners. FTW') self.assertEqual(ad.description_line_2, '90 Years Exp Request Free Appraisal FTW') def test_daily_account_metrics_create(self): report_file = _get_report_file('account_report.gz') # Run the Account report populate account = Account.objects.get(pk=1) account.start_sync() account.sync_account(report_file=report_file) account.finish_account_sync() account.finish_sync() # Check that the Daily Account Metrics have been populated account_metrics = DailyAccountMetrics.objects.filter(account=account) self.assertEqual(account_metrics.count(), 30) # Check the fields of one of them account_metric = DailyAccountMetrics.objects.get(account=account, device=DailyAccountMetrics.DEVICE_DESKTOP, day=date(2014, 7, 28)) self.assertEqual(account_metric.avg_cpc.amount, Decimal('1.91')) self.assertEqual(account_metric.avg_cpm.amount, Decimal('1.85')) self.assertEqual(account_metric.avg_position, Decimal('1.0')) self.assertEqual(account_metric.clicks, 5) self.assertEqual(account_metric.content_lost_is_budget, Decimal('23.39')) self.assertEqual(account_metric.content_impr_share, Decimal('10.00')) self.assertEqual(account_metric.content_lost_is_rank, Decimal('73.41')) self.assertEqual(account_metric.click_conversion_rate, Decimal('0.0')) self.assertEqual(account_metric.conv_rate, Decimal('0.0')) self.assertEqual(account_metric.converted_clicks, 0) self.assertEqual(account_metric.converted_clicks, 0) self.assertEqual(account_metric.cost.amount, Decimal('9.57')) self.assertEqual(account_metric.cost_converted_click.amount, Decimal('0.00')) self.assertEqual(account_metric.cost_conv.amount, Decimal('0.00')) self.assertEqual(account_metric.cost_est_total_conv.amount, Decimal('0.00')) self.assertEqual(account_metric.ctr, Decimal('0.10')) self.assertEqual(account_metric.est_cross_device_conv, None) self.assertEqual(account_metric.est_total_conv_rate, Decimal('0.00')) self.assertEqual(account_metric.est_total_conv_value, Decimal('0.00')) self.assertEqual(account_metric.est_total_conv_value_click, Decimal('0.00')) self.assertEqual(account_metric.est_total_conv_value_cost, Decimal('0.00')) self.assertEqual(account_metric.est_total_conv, 0) self.assertEqual(account_metric.impressions, 5183) self.assertEqual(account_metric.invalid_click_rate, Decimal('0.00')) self.assertEqual(account_metric.invalid_clicks, 0) self.assertEqual(account_metric.search_lost_is_budget, Decimal('23.81')) self.assertEqual(account_metric.search_exact_match_is, Decimal('76.19')) self.assertEqual(account_metric.search_impr_share, Decimal('76.19')) self.assertEqual(account_metric.search_lost_is_rank, Decimal('0.00')) def test_daily_account_metrics_update(self): # Run the Account report populate report_file = _get_report_file('account_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_account(report_file=report_file) account.finish_account_sync() account.finish_sync() # Run the Account report populate with data to update report_file = _get_report_file('account_report_update.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_account(report_file=report_file) account.finish_account_sync() account.finish_sync() # Check that the Daily Account Metrics have been populated account_metrics = DailyAccountMetrics.objects.filter(account=account) self.assertEqual(account_metrics.count(), 30) # Check the fields of one of them to ensure the update occurred correctly account_metric = DailyAccountMetrics.objects.get(account=account, device=DailyAccountMetrics.DEVICE_DESKTOP, day=date(2014, 7, 28)) self.assertEqual(account_metric.avg_cpc.amount, Decimal('1.81')) self.assertEqual(account_metric.avg_cpm.amount, Decimal('1.85')) self.assertEqual(account_metric.avg_position, Decimal('1.0')) self.assertEqual(account_metric.clicks, 5) self.assertEqual(account_metric.content_lost_is_budget, Decimal('23.39')) self.assertEqual(account_metric.content_impr_share, Decimal('10.00')) self.assertEqual(account_metric.content_lost_is_rank, Decimal('75.41')) self.assertEqual(account_metric.click_conversion_rate, Decimal('0.0')) self.assertEqual(account_metric.conv_rate, Decimal('0.0')) self.assertEqual(account_metric.converted_clicks, 0) self.assertEqual(account_metric.converted_clicks, 0) self.assertEqual(account_metric.cost.amount, Decimal('9.57')) self.assertEqual(account_metric.cost_converted_click.amount, Decimal('0.00')) self.assertEqual(account_metric.cost_conv.amount, Decimal('0.00')) self.assertEqual(account_metric.cost_est_total_conv.amount, Decimal('0.00')) self.assertEqual(account_metric.ctr, Decimal('0.10')) self.assertEqual(account_metric.est_cross_device_conv, None) self.assertEqual(account_metric.est_total_conv_rate, Decimal('0.00')) self.assertEqual(account_metric.est_total_conv_value, Decimal('0.00')) self.assertEqual(account_metric.est_total_conv_value_click, Decimal('0.00')) self.assertEqual(account_metric.est_total_conv_value_cost, Decimal('0.00')) self.assertEqual(account_metric.est_total_conv, 0) self.assertEqual(account_metric.impressions, 5183) self.assertEqual(account_metric.invalid_click_rate, Decimal('0.00')) self.assertEqual(account_metric.invalid_clicks, 0) self.assertEqual(account_metric.search_lost_is_budget, Decimal('23.81')) self.assertEqual(account_metric.search_exact_match_is, Decimal('76.19')) self.assertEqual(account_metric.search_impr_share, Decimal('76.19')) self.assertEqual(account_metric.search_lost_is_rank, Decimal('10.00')) def test_daily_campaign_metrics_create(self): # Run the Campaign report populate report_file = _get_report_file('campaign_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_campaign(report_file=report_file) account.finish_campaign_sync() account.finish_sync() # Check campaign metrics were created campaign_metrics = DailyCampaignMetrics.objects.filter(campaign__account=account) self.assertEqual(campaign_metrics.count(), 15) # Check a row to ensure fields are correct campaign_metric = DailyCampaignMetrics.objects.get(campaign__campaign_id=7679201, day=date(2014, 8, 4)) self.assertEqual(campaign_metric.avg_cpc.amount, Decimal('0.00')) self.assertEqual(campaign_metric.avg_cpm.amount, Decimal('0.00')) self.assertEqual(campaign_metric.avg_position, Decimal('0.00')) self.assertEqual(campaign_metric.bid_strategy_id, 0) self.assertEqual(campaign_metric.bid_strategy_name, '') self.assertEqual(campaign_metric.bid_strategy_type, 'cpc') self.assertEqual(campaign_metric.clicks, 0) self.assertEqual(campaign_metric.content_lost_is_budget, None) self.assertEqual(campaign_metric.content_impr_share, None) self.assertEqual(campaign_metric.content_lost_is_rank, None) self.assertEqual(campaign_metric.click_conversion_rate, Decimal('0.00')) self.assertEqual(campaign_metric.conv_rate, Decimal('0.00')) self.assertEqual(campaign_metric.converted_clicks, 0) self.assertEqual(campaign_metric.converted_clicks, 0) self.assertEqual(campaign_metric.cost.amount, Decimal('0.00')) self.assertEqual(campaign_metric.cost_converted_click.amount, Decimal('0.00')) self.assertEqual(campaign_metric.cost_conv.amount, Decimal('0.00')) self.assertEqual(campaign_metric.cost_est_total_conv.amount, Decimal('0.00')) self.assertEqual(campaign_metric.ctr, Decimal('0.00')) self.assertEqual(campaign_metric.est_cross_device_conv, None) self.assertEqual(campaign_metric.est_total_conv_rate, Decimal('0.00')) self.assertEqual(campaign_metric.est_total_conv, 0) self.assertEqual(campaign_metric.est_total_conv_value, Decimal('0.00')) self.assertEqual(campaign_metric.est_total_conv_value_click, Decimal('0.00')) self.assertEqual(campaign_metric.est_total_conv_value_cost, Decimal('0.00')) self.assertEqual(campaign_metric.impressions, 0) self.assertEqual(campaign_metric.invalid_click_rate, Decimal('0.00')) self.assertEqual(campaign_metric.invalid_clicks, 0) self.assertEqual(campaign_metric.search_lost_is_budget, None) self.assertEqual(campaign_metric.search_exact_match_is, None) self.assertEqual(campaign_metric.search_impr_share, None) self.assertEqual(campaign_metric.search_lost_is_rank, None) def test_daily_campaign_metrics_update(self): # Run the Campaign report populate report_file = _get_report_file('campaign_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_campaign(report_file=report_file) account.finish_campaign_sync() account.finish_sync() # Run the Campaign report populate to update rows report_file = _get_report_file('campaign_report_update.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_campaign(report_file=report_file) account.finish_campaign_sync() account.finish_sync() campaign_metrics = DailyCampaignMetrics.objects.filter(campaign__account=account) self.assertEqual(campaign_metrics.count(), 15) # Check a row to ensure fields are correct campaign_metric = DailyCampaignMetrics.objects.get(campaign__campaign_id=7679201, day=date(2014, 8, 4)) self.assertEqual(campaign_metric.avg_cpc.amount, Decimal('1.00')) self.assertEqual(campaign_metric.avg_cpm.amount, Decimal('0.00')) self.assertEqual(campaign_metric.avg_position, Decimal('0.00')) self.assertEqual(campaign_metric.bid_strategy_id, 0) self.assertEqual(campaign_metric.bid_strategy_name, '') self.assertEqual(campaign_metric.bid_strategy_type, 'cpc') self.assertEqual(campaign_metric.clicks, 0) self.assertEqual(campaign_metric.content_lost_is_budget, None) self.assertEqual(campaign_metric.content_impr_share, None) self.assertEqual(campaign_metric.content_lost_is_rank, None) self.assertEqual(campaign_metric.click_conversion_rate, Decimal('0.00')) self.assertEqual(campaign_metric.conv_rate, Decimal('0.00')) self.assertEqual(campaign_metric.converted_clicks, 0) self.assertEqual(campaign_metric.converted_clicks, 0) self.assertEqual(campaign_metric.cost.amount, Decimal('0.00')) self.assertEqual(campaign_metric.cost_converted_click.amount, Decimal('0.00')) self.assertEqual(campaign_metric.cost_conv.amount, Decimal('0.00')) self.assertEqual(campaign_metric.cost_est_total_conv.amount, Decimal('0.00')) self.assertEqual(campaign_metric.ctr, Decimal('0.00')) self.assertEqual(campaign_metric.est_cross_device_conv, None) self.assertEqual(campaign_metric.est_total_conv_rate, Decimal('0.00')) self.assertEqual(campaign_metric.est_total_conv, 0) self.assertEqual(campaign_metric.est_total_conv_value, Decimal('0.00')) self.assertEqual(campaign_metric.est_total_conv_value_click, Decimal('0.00')) self.assertEqual(campaign_metric.est_total_conv_value_cost, Decimal('0.00')) self.assertEqual(campaign_metric.impressions, 50) self.assertEqual(campaign_metric.invalid_click_rate, Decimal('0.00')) self.assertEqual(campaign_metric.invalid_clicks, 0) self.assertEqual(campaign_metric.search_lost_is_budget, None) self.assertEqual(campaign_metric.search_exact_match_is, None) self.assertEqual(campaign_metric.search_impr_share, None) self.assertEqual(campaign_metric.search_lost_is_rank, None) def test_daily_ad_group_metrics_create(self): # Run the Ad Group report populate report_file = _get_report_file('adgroup_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad_group(report_file=report_file) account.finish_ad_group_sync() account.finish_sync() # Check Ad Group metrics were created ad_group_metrics = DailyAdGroupMetrics.objects.filter(ad_group__campaign__account=account) self.assertEqual(ad_group_metrics.count(), 50) # Check a row to ensure fields are correct ad_group_metric = DailyAdGroupMetrics.objects.get(ad_group__ad_group_id=323809001, day=date(2014, 7, 28)) self.assertEqual(ad_group_metric.max_cpa_converted_clicks, None) self.assertEqual(ad_group_metric.value_est_total_conv, Decimal('0.0')) self.assertEqual(ad_group_metric.bid_strategy_id, 0) self.assertEqual(ad_group_metric.bid_strategy_name, '') self.assertEqual(ad_group_metric.bid_strategy_type, DailyAdGroupMetrics.BID_STRATEGY_TYPE_MANUAL_CPC) self.assertEqual(ad_group_metric.content_impr_share, None) self.assertEqual(ad_group_metric.content_lost_is_rank, None) self.assertEqual(ad_group_metric.cost_est_total_conv.amount, Decimal('0')) self.assertEqual(ad_group_metric.est_cross_device_conv, None) self.assertEqual(ad_group_metric.est_total_conv_rate, Decimal('0.00')) self.assertEqual(ad_group_metric.est_total_conv_value, Decimal('0.0')) self.assertEqual(ad_group_metric.est_total_conv_value_click, Decimal('0.0')) self.assertEqual(ad_group_metric.est_total_conv_value_cost, Decimal('0.0')) self.assertEqual(ad_group_metric.est_total_conv, 0) self.assertEqual(ad_group_metric.search_exact_match_is, None) self.assertEqual(ad_group_metric.search_impr_share, None) self.assertEqual(ad_group_metric.search_lost_is_rank, None) self.assertEqual(ad_group_metric.value_converted_click, Decimal('0.0')) self.assertEqual(ad_group_metric.value_conv, Decimal('0.0')) self.assertEqual(ad_group_metric.view_through_conv, None) self.assertEqual(ad_group_metric.avg_cpc.amount, Decimal('0')) self.assertEqual(ad_group_metric.avg_cpm.amount, Decimal('0')) self.assertEqual(ad_group_metric.avg_position, Decimal('0.0')) self.assertEqual(ad_group_metric.clicks, 0) self.assertEqual(ad_group_metric.click_conversion_rate, Decimal('0.00')) self.assertEqual(ad_group_metric.conv_rate, Decimal('0.00')) self.assertEqual(ad_group_metric.converted_clicks, 0) self.assertEqual(ad_group_metric.converted_clicks, 0) self.assertEqual(ad_group_metric.cost.amount, Decimal('0.0')) self.assertEqual(ad_group_metric.cost_converted_click.amount, Decimal('0.0')) self.assertEqual(ad_group_metric.cost_conv.amount, Decimal('0')) self.assertEqual(ad_group_metric.ctr, Decimal('0.00')) self.assertEqual(ad_group_metric.impressions, 0) def test_daily_ad_group_metrics_update(self): # Run the Ad Group report populate report_file = _get_report_file('adgroup_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad_group(report_file=report_file) account.finish_ad_group_sync() account.finish_sync() # Run the Ad Group report populate update report_file = _get_report_file('adgroup_report_update.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad_group(report_file=report_file) account.finish_ad_group_sync() account.finish_sync() # Check Ad Group metrics were created ad_group_metrics = DailyAdGroupMetrics.objects.filter(ad_group__campaign__account=account) self.assertEqual(ad_group_metrics.count(), 50) # Check a row to ensure fields are correct ad_group_metric = DailyAdGroupMetrics.objects.get(ad_group__ad_group_id=323809001, day=date(2014, 7, 28)) self.assertEqual(ad_group_metric.max_cpa_converted_clicks, None) self.assertEqual(ad_group_metric.value_est_total_conv, Decimal('0.0')) self.assertEqual(ad_group_metric.bid_strategy_id, 0) self.assertEqual(ad_group_metric.bid_strategy_name, '') self.assertEqual(ad_group_metric.bid_strategy_type, DailyAdGroupMetrics.BID_STRATEGY_TYPE_MANUAL_CPC) self.assertEqual(ad_group_metric.content_impr_share, None) self.assertEqual(ad_group_metric.content_lost_is_rank, None) self.assertEqual(ad_group_metric.cost_est_total_conv.amount, Decimal('0')) self.assertEqual(ad_group_metric.est_cross_device_conv, None) self.assertEqual(ad_group_metric.est_total_conv_rate, Decimal('0.00')) self.assertEqual(ad_group_metric.est_total_conv_value, Decimal('0.0')) self.assertEqual(ad_group_metric.est_total_conv_value_click, Decimal('0.0')) self.assertEqual(ad_group_metric.est_total_conv_value_cost, Decimal('0.0')) self.assertEqual(ad_group_metric.est_total_conv, 0) self.assertEqual(ad_group_metric.search_exact_match_is, None) self.assertEqual(ad_group_metric.search_impr_share, None) self.assertEqual(ad_group_metric.search_lost_is_rank, None) self.assertEqual(ad_group_metric.value_converted_click, Decimal('0.0')) self.assertEqual(ad_group_metric.value_conv, Decimal('0.0')) self.assertEqual(ad_group_metric.view_through_conv, None) self.assertEqual(ad_group_metric.avg_cpc.amount, Decimal('0')) self.assertEqual(ad_group_metric.avg_cpm.amount, Decimal('0')) self.assertEqual(ad_group_metric.avg_position, Decimal('0.0')) self.assertEqual(ad_group_metric.clicks, 0) self.assertEqual(ad_group_metric.click_conversion_rate, Decimal('0.00')) self.assertEqual(ad_group_metric.conv_rate, Decimal('0.00')) self.assertEqual(ad_group_metric.converted_clicks, 0) self.assertEqual(ad_group_metric.converted_clicks, 0) self.assertEqual(ad_group_metric.cost.amount, Decimal('0.0')) self.assertEqual(ad_group_metric.cost_converted_click.amount, Decimal('0.0')) self.assertEqual(ad_group_metric.cost_conv.amount, Decimal('0')) self.assertEqual(ad_group_metric.ctr, Decimal('0.00')) self.assertEqual(ad_group_metric.impressions, 100) def test_daily_ad_metrics_create(self): # Run the Ad report populate report_file = _get_report_file('ad_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad(report_file=report_file) account.finish_ad_sync() account.finish_sync() # Check Ad Group metrics were created ad_metrics = DailyAdMetrics.objects.filter(ad__ad_group__campaign__account=account) self.assertEqual(ad_metrics.count(), 44) # Check a row to ensure fields are correct ad_metric = DailyAdMetrics.objects.get(ad__ad_id=40564055441, day=date(2014, 8, 6)) self.assertEqual(ad_metric.avg_cpc.amount, Decimal('0.00')) self.assertEqual(ad_metric.avg_cpm.amount, Decimal('0.00')) self.assertEqual(ad_metric.avg_position, Decimal('1.2')) self.assertEqual(ad_metric.clicks, 0) self.assertEqual(ad_metric.click_conversion_rate, Decimal('0.00')) self.assertEqual(ad_metric.conv_rate, Decimal('0.00')) self.assertEqual(ad_metric.converted_clicks, 0) self.assertEqual(ad_metric.converted_clicks, 0) self.assertEqual(ad_metric.cost.amount, Decimal('0.00')) self.assertEqual(ad_metric.cost_converted_click.amount, Decimal('0.00')) self.assertEqual(ad_metric.cost_conv.amount, Decimal('0.00')) self.assertEqual(ad_metric.ctr, Decimal('0.00')) self.assertEqual(ad_metric.impressions, 20) self.assertEqual(ad_metric.value_converted_click, Decimal('0.00')) self.assertEqual(ad_metric.value_conv, Decimal('0.0')) self.assertEqual(ad_metric.view_through_conv, None) def test_daily_ad_metrics_update(self): # Run the Ad report populate report_file = _get_report_file('ad_report.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad(report_file=report_file) account.finish_ad_sync() account.finish_sync() # Run the Ad report update report_file = _get_report_file('ad_report_update.gz') account = Account.objects.get(pk=1) account.start_sync() account.sync_ad(report_file=report_file) account.finish_ad_sync() account.finish_sync() # Check Ad Group metrics were created and not duplicated ad_metrics = DailyAdMetrics.objects.filter(ad__ad_group__campaign__account=account) self.assertEqual(ad_metrics.count(), 44) # Check a row to ensure fields are correct ad_metric = DailyAdMetrics.objects.get(ad__ad_id=40564055441, day=date(2014, 8, 6)) self.assertEqual(ad_metric.avg_cpc.amount, Decimal('1.00')) self.assertEqual(ad_metric.avg_cpm.amount, Decimal('1.00')) self.assertEqual(ad_metric.avg_position, Decimal('1.3')) self.assertEqual(ad_metric.clicks, 0) self.assertEqual(ad_metric.click_conversion_rate, Decimal('0.00')) self.assertEqual(ad_metric.conv_rate, Decimal('0.00')) self.assertEqual(ad_metric.converted_clicks, 0) self.assertEqual(ad_metric.converted_clicks, 0) self.assertEqual(ad_metric.cost.amount, Decimal('0.00')) self.assertEqual(ad_metric.cost_converted_click.amount, Decimal('0.00')) self.assertEqual(ad_metric.cost_conv.amount, Decimal('0.00')) self.assertEqual(ad_metric.ctr, Decimal('0.00')) self.assertEqual(ad_metric.impressions, 30) self.assertEqual(ad_metric.value_converted_click, Decimal('0.00')) self.assertEqual(ad_metric.value_conv, Decimal('0.0')) self.assertEqual(ad_metric.view_through_conv, None) def test_auto_now(self): account = Account.objects.create(account_id=1234) #: :type account: Account self.assertIsInstance(account.created, datetime) self.assertIsInstance(account.updated, datetime) created = account.created updated = account.updated account.account_id = 4321 account.save() self.assertEqual(account.created, created) self.assertNotEqual(account.updated, updated)
51.655897
139
0.718718
4,148
31,975
5.246384
0.054725
0.195754
0.096085
0.074809
0.903272
0.88025
0.874138
0.853139
0.840686
0.820145
0
0.028329
0.17864
31,975
618
140
51.739482
0.800289
0.064238
0
0.764706
0
0
0.044762
0.004955
0
0
0
0
0.58215
1
0.036511
false
0
0.01217
0.002028
0.056795
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8
1050ae6ce349c5dd14fec055aa01b05603fef263
12,480
py
Python
sdk/python/pulumi_grafana/team_preferences.py
jorgeperezc/pulumi-grafana
bbfd904e1bf5554ce47690e71ef172904d8787b5
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_grafana/team_preferences.py
jorgeperezc/pulumi-grafana
bbfd904e1bf5554ce47690e71ef172904d8787b5
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_grafana/team_preferences.py
jorgeperezc/pulumi-grafana
bbfd904e1bf5554ce47690e71ef172904d8787b5
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from . import _utilities __all__ = ['TeamPreferencesArgs', 'TeamPreferences'] @pulumi.input_type class TeamPreferencesArgs: def __init__(__self__, *, team_id: pulumi.Input[int], home_dashboard_id: Optional[pulumi.Input[int]] = None, theme: Optional[pulumi.Input[str]] = None, timezone: Optional[pulumi.Input[str]] = None): """ The set of arguments for constructing a TeamPreferences resource. :param pulumi.Input[int] team_id: The numeric team ID. :param pulumi.Input[int] home_dashboard_id: The numeric ID of the dashboard to display when a team member logs in. :param pulumi.Input[str] theme: The theme for the specified team. Available themes are `light`, `dark`, or an empty string for the default theme. :param pulumi.Input[str] timezone: The timezone for the specified team. Available values are `utc`, `browser`, or an empty string for the default. """ pulumi.set(__self__, "team_id", team_id) if home_dashboard_id is not None: pulumi.set(__self__, "home_dashboard_id", home_dashboard_id) if theme is not None: pulumi.set(__self__, "theme", theme) if timezone is not None: pulumi.set(__self__, "timezone", timezone) @property @pulumi.getter(name="teamId") def team_id(self) -> pulumi.Input[int]: """ The numeric team ID. """ return pulumi.get(self, "team_id") @team_id.setter def team_id(self, value: pulumi.Input[int]): pulumi.set(self, "team_id", value) @property @pulumi.getter(name="homeDashboardId") def home_dashboard_id(self) -> Optional[pulumi.Input[int]]: """ The numeric ID of the dashboard to display when a team member logs in. """ return pulumi.get(self, "home_dashboard_id") @home_dashboard_id.setter def home_dashboard_id(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "home_dashboard_id", value) @property @pulumi.getter def theme(self) -> Optional[pulumi.Input[str]]: """ The theme for the specified team. Available themes are `light`, `dark`, or an empty string for the default theme. """ return pulumi.get(self, "theme") @theme.setter def theme(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "theme", value) @property @pulumi.getter def timezone(self) -> Optional[pulumi.Input[str]]: """ The timezone for the specified team. Available values are `utc`, `browser`, or an empty string for the default. """ return pulumi.get(self, "timezone") @timezone.setter def timezone(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "timezone", value) @pulumi.input_type class _TeamPreferencesState: def __init__(__self__, *, home_dashboard_id: Optional[pulumi.Input[int]] = None, team_id: Optional[pulumi.Input[int]] = None, theme: Optional[pulumi.Input[str]] = None, timezone: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering TeamPreferences resources. :param pulumi.Input[int] home_dashboard_id: The numeric ID of the dashboard to display when a team member logs in. :param pulumi.Input[int] team_id: The numeric team ID. :param pulumi.Input[str] theme: The theme for the specified team. Available themes are `light`, `dark`, or an empty string for the default theme. :param pulumi.Input[str] timezone: The timezone for the specified team. Available values are `utc`, `browser`, or an empty string for the default. """ if home_dashboard_id is not None: pulumi.set(__self__, "home_dashboard_id", home_dashboard_id) if team_id is not None: pulumi.set(__self__, "team_id", team_id) if theme is not None: pulumi.set(__self__, "theme", theme) if timezone is not None: pulumi.set(__self__, "timezone", timezone) @property @pulumi.getter(name="homeDashboardId") def home_dashboard_id(self) -> Optional[pulumi.Input[int]]: """ The numeric ID of the dashboard to display when a team member logs in. """ return pulumi.get(self, "home_dashboard_id") @home_dashboard_id.setter def home_dashboard_id(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "home_dashboard_id", value) @property @pulumi.getter(name="teamId") def team_id(self) -> Optional[pulumi.Input[int]]: """ The numeric team ID. """ return pulumi.get(self, "team_id") @team_id.setter def team_id(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "team_id", value) @property @pulumi.getter def theme(self) -> Optional[pulumi.Input[str]]: """ The theme for the specified team. Available themes are `light`, `dark`, or an empty string for the default theme. """ return pulumi.get(self, "theme") @theme.setter def theme(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "theme", value) @property @pulumi.getter def timezone(self) -> Optional[pulumi.Input[str]]: """ The timezone for the specified team. Available values are `utc`, `browser`, or an empty string for the default. """ return pulumi.get(self, "timezone") @timezone.setter def timezone(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "timezone", value) class TeamPreferences(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, home_dashboard_id: Optional[pulumi.Input[int]] = None, team_id: Optional[pulumi.Input[int]] = None, theme: Optional[pulumi.Input[str]] = None, timezone: Optional[pulumi.Input[str]] = None, __props__=None): """ Create a TeamPreferences resource with the given unique name, props, and options. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[int] home_dashboard_id: The numeric ID of the dashboard to display when a team member logs in. :param pulumi.Input[int] team_id: The numeric team ID. :param pulumi.Input[str] theme: The theme for the specified team. Available themes are `light`, `dark`, or an empty string for the default theme. :param pulumi.Input[str] timezone: The timezone for the specified team. Available values are `utc`, `browser`, or an empty string for the default. """ ... @overload def __init__(__self__, resource_name: str, args: TeamPreferencesArgs, opts: Optional[pulumi.ResourceOptions] = None): """ Create a TeamPreferences resource with the given unique name, props, and options. :param str resource_name: The name of the resource. :param TeamPreferencesArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(TeamPreferencesArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, home_dashboard_id: Optional[pulumi.Input[int]] = None, team_id: Optional[pulumi.Input[int]] = None, theme: Optional[pulumi.Input[str]] = None, timezone: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = TeamPreferencesArgs.__new__(TeamPreferencesArgs) __props__.__dict__["home_dashboard_id"] = home_dashboard_id if team_id is None and not opts.urn: raise TypeError("Missing required property 'team_id'") __props__.__dict__["team_id"] = team_id __props__.__dict__["theme"] = theme __props__.__dict__["timezone"] = timezone super(TeamPreferences, __self__).__init__( 'grafana:index/teamPreferences:TeamPreferences', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, home_dashboard_id: Optional[pulumi.Input[int]] = None, team_id: Optional[pulumi.Input[int]] = None, theme: Optional[pulumi.Input[str]] = None, timezone: Optional[pulumi.Input[str]] = None) -> 'TeamPreferences': """ Get an existing TeamPreferences resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[int] home_dashboard_id: The numeric ID of the dashboard to display when a team member logs in. :param pulumi.Input[int] team_id: The numeric team ID. :param pulumi.Input[str] theme: The theme for the specified team. Available themes are `light`, `dark`, or an empty string for the default theme. :param pulumi.Input[str] timezone: The timezone for the specified team. Available values are `utc`, `browser`, or an empty string for the default. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _TeamPreferencesState.__new__(_TeamPreferencesState) __props__.__dict__["home_dashboard_id"] = home_dashboard_id __props__.__dict__["team_id"] = team_id __props__.__dict__["theme"] = theme __props__.__dict__["timezone"] = timezone return TeamPreferences(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="homeDashboardId") def home_dashboard_id(self) -> pulumi.Output[Optional[int]]: """ The numeric ID of the dashboard to display when a team member logs in. """ return pulumi.get(self, "home_dashboard_id") @property @pulumi.getter(name="teamId") def team_id(self) -> pulumi.Output[int]: """ The numeric team ID. """ return pulumi.get(self, "team_id") @property @pulumi.getter def theme(self) -> pulumi.Output[Optional[str]]: """ The theme for the specified team. Available themes are `light`, `dark`, or an empty string for the default theme. """ return pulumi.get(self, "theme") @property @pulumi.getter def timezone(self) -> pulumi.Output[Optional[str]]: """ The timezone for the specified team. Available values are `utc`, `browser`, or an empty string for the default. """ return pulumi.get(self, "timezone")
43.333333
154
0.641186
1,525
12,480
5.027541
0.097705
0.080344
0.081779
0.05165
0.770575
0.754924
0.739142
0.727925
0.706274
0.703274
0
0.000108
0.257612
12,480
287
155
43.484321
0.827415
0.317067
0
0.701149
1
0
0.086024
0.005676
0
0
0
0
0
1
0.155172
false
0.005747
0.028736
0
0.275862
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
5e0312f3a3b5f21044a0cd49b0f03eb3d866ca18
5,197
py
Python
comments/consumers.py
Isaacli0520/msnmatch
228c6d546e16bd54dc8c7e0803f0f8c408cb0219
[ "MIT" ]
null
null
null
comments/consumers.py
Isaacli0520/msnmatch
228c6d546e16bd54dc8c7e0803f0f8c408cb0219
[ "MIT" ]
18
2020-03-11T18:57:27.000Z
2022-02-26T11:14:38.000Z
comments/consumers.py
Isaacli0520/msnmatch
228c6d546e16bd54dc8c7e0803f0f8c408cb0219
[ "MIT" ]
null
null
null
from channels.generic.websocket import AsyncWebsocketConsumer from users.models import User from django.db.models import Q from channels.db import database_sync_to_async import asyncio import json import os class FilterConsumer(AsyncWebsocketConsumer): async def connect(self): self.event = asyncio.Event() # self.from_user_name = self.scope['url_route']['kwargs']['from_user_name'] self.user = self.scope["user"] self.group_name = "filter_group" # await self.channel_layer.group_add( # self.group_name, # self.channel_name, # ) await self.accept() async def disconnect(self, close_code): # Leave room group await self.channel_layer.group_discard( self.group_name, self.channel_name ) # Receive message from WebSocket async def receive(self, text_data): text_data_json = json.loads(text_data) command = text_data_json['command'] # print("data received", text_data_json) if command == 'join': self.group_name = "filter-" + str(text_data_json['slide_pk']) await self.channel_layer.group_add( self.group_name, self.channel_name ) await self.send(text_data=json.dumps({ 'type':'join', 'slide_pk':text_data_json['slide_pk'], })) elif command == 'send': await self.channel_layer.group_send( self.group_name, { 'type':"comment_unfiltered", 'text': text_data_json['text'], 'color': text_data_json['color'], 'size': text_data_json['size'], 'time': text_data_json['time'], 'mode': text_data_json['mode'], 'message_type':text_data_json['message_type'], } ) async def comment_unfiltered(self, text_data): # Send message to WebSocket await self.send(text_data=json.dumps({ 'type':"comment_unfiltered", 'text': text_data['text'], 'color': text_data['color'], 'size': text_data['size'], 'time': text_data['time'], 'mode': text_data['mode'], 'message_type':text_data['message_type'], })) class CommentsConsumer(AsyncWebsocketConsumer): async def connect(self): self.event = asyncio.Event() # self.from_user_name = self.scope['url_route']['kwargs']['from_user_name'] self.user = self.scope["user"] self.group_name = "comments_group" # await self.channel_layer.group_add( # self.group_name, # self.channel_name, # ) await self.accept() async def disconnect(self, close_code): # Leave room group await self.channel_layer.group_discard( self.group_name, self.channel_name ) # Receive message from WebSocket async def receive(self, text_data): text_data_json = json.loads(text_data) command = text_data_json['command'] if command == 'join': self.group_name = "comments-" + str(text_data_json['slide_pk']) await self.channel_layer.group_add( self.group_name, self.channel_name ) await self.send(text_data=json.dumps({ 'type':'join', 'slide_pk':text_data_json['slide_pk'], })) elif command == 'send': await self.channel_layer.group_send( self.group_name, { 'type':"comment_filtered", 'text': text_data_json['text'], 'color': text_data_json['color'], 'size': text_data_json['size'], 'time': text_data_json['time'], 'question_id':text_data_json['question_id'], 'mode': text_data_json['mode'], 'message_type':text_data_json['message_type'], } ) elif command == 'question_page': await self.channel_layer.group_send( self.group_name, { 'type':"question_command", 'direction':text_data_json['direction'], } ) async def question_command(self, text_data): # Send message to WebSocket await self.send(text_data=json.dumps({ 'type':"question_command", 'direction':text_data['direction'], })) async def comment_filtered(self, text_data): # Send message to WebSocket await self.send(text_data=json.dumps({ 'type':"comment_filtered", 'text': text_data['text'], 'color': text_data['color'], 'size': text_data['size'], 'time': text_data['time'], 'question_id':text_data['question_id'], 'mode': text_data['mode'], 'message_type':text_data['message_type'], }))
34.417219
83
0.542236
548
5,197
4.861314
0.133212
0.147147
0.126126
0.070946
0.852477
0.831456
0.774399
0.774399
0.774399
0.774399
0
0
0.341351
5,197
150
84
34.646667
0.778265
0.100827
0
0.706897
0
0
0.134293
0
0
0
0
0
0
1
0
false
0
0.060345
0
0.077586
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d81eeac3f97117bf225586af84956cec510c3493
133
py
Python
frappe_notification/frappe_notification/doctype/notification_client/__init__.py
leam-tech/frappe_notification
79e40f2c541d86d714a0b8d48b87f32b2f85076a
[ "MIT" ]
null
null
null
frappe_notification/frappe_notification/doctype/notification_client/__init__.py
leam-tech/frappe_notification
79e40f2c541d86d714a0b8d48b87f32b2f85076a
[ "MIT" ]
null
null
null
frappe_notification/frappe_notification/doctype/notification_client/__init__.py
leam-tech/frappe_notification
79e40f2c541d86d714a0b8d48b87f32b2f85076a
[ "MIT" ]
null
null
null
from .notification_client import NotificationClient # noqa from .test_notification_client import NotificationClientFixtures # noqa
44.333333
72
0.864662
13
133
8.615385
0.615385
0.321429
0.428571
0
0
0
0
0
0
0
0
0
0.105263
133
2
73
66.5
0.941176
0.067669
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
d8317b4dbc2ada7e037c37dd957f26a6d8e54e8f
144
py
Python
weathersplier/main.py
KamTing/spider
a69f4d82becd4dad2305e03d3c4c19d55682275c
[ "MIT" ]
null
null
null
weathersplier/main.py
KamTing/spider
a69f4d82becd4dad2305e03d3c4c19d55682275c
[ "MIT" ]
null
null
null
weathersplier/main.py
KamTing/spider
a69f4d82becd4dad2305e03d3c4c19d55682275c
[ "MIT" ]
null
null
null
from scrapy import cmdline # cmdline.execute("scrapy crawl weather_spider".split()) cmdline.execute("scrapy crawl temperature_spider".split())
28.8
58
0.798611
18
144
6.277778
0.555556
0.247788
0.353982
0.442478
0
0
0
0
0
0
0
0
0.083333
144
5
58
28.8
0.856061
0.375
0
0
0
0
0.348315
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
dc418014f44b2e40c6b80f08271c94b04e8238c0
17,052
py
Python
src/extrucal/extrusion.py
johnwslee/extrucal
77296a2fd1cd8ee7129b9c5a8111d72dad164162
[ "MIT" ]
null
null
null
src/extrucal/extrusion.py
johnwslee/extrucal
77296a2fd1cd8ee7129b9c5a8111d72dad164162
[ "MIT" ]
null
null
null
src/extrucal/extrusion.py
johnwslee/extrucal
77296a2fd1cd8ee7129b9c5a8111d72dad164162
[ "MIT" ]
null
null
null
import pandas as pd import numpy as np import altair as alt def throughput_cal(size, depth, density, rpm=1, pitch=None, w_flight=None, n_flight=1): """ Calculates the extrusion throughput (Drag Flow) given the screw size, RPM, the channel depth of metering channel, and screw pitch Parameters ---------- size : int or float Screw size [mm] depth : int or float Channel depth of metering section [mm] density : int or float Melt density of polymeric material [kg/m^3] rpm : int or float Screw RPM Default value is 1 (throughput per unit rpm) pitch : int or float Screw pitch [mm] If None, squared pitch (=1D) is used w_flight : int or float Flight width [mm] If None, 10% of screw size is used for flight width n_flight : int Number of flight [ea] Default value is 1 (single-flighted) Returns ------- throughput : float exturion throughput [kg/hr] Examples -------- >>> throughput_cal(size=200, depth=10, density=800) """ # Test input type if not isinstance(size, int): if not isinstance(size, float): raise TypeError("'size' should be either integer or float") if not isinstance(depth, int): if not isinstance(depth, float): raise TypeError("'depth' should be either integer or float") if not isinstance(density, int): if not isinstance(density, float): raise TypeError("'density' should be either integer or float") # Assign default values if pitch == None: pitch = size if w_flight == None: w_flight = size*0.1 # Tests the types of default variables if not isinstance(rpm, int): if not isinstance(rpm, float): raise TypeError("'rpm' should be either integer or float") if not isinstance(pitch, int): if not isinstance(pitch, float): raise TypeError("'pitch' should be either integer or float") if not isinstance(w_flight, int): if not isinstance(w_flight, float): raise TypeError("'w_flight' should be either integer or float") if not isinstance(n_flight, int): raise TypeError("'n_flight' should be integer") # Test input value if depth < size*0.01: raise ValueError("Channel depth is too shallow(<1% of screw size) to be used for extrusion screw") if depth > size*0.3: raise ValueError("Channel depth is too deep(>30% of screw size) to be used for extrusion screw") if size < 5: raise ValueError("Screw size is too small!!") if size > 500: raise ValueError("Screw size is too big!!") if density < 300: raise ValueError("This is not melt density for polymers. Too low!!") if density > 3000: raise ValueError("This is not melt density for polymers. Too high!!") if pitch < size*0.2: raise ValueError("Screw pitch is too small") if pitch > size*2.5: raise ValueError("Screw pitch is too big") if w_flight < size*0.01: raise ValueError("Flight width is too small") if w_flight > size*0.7: raise ValueError("Flight width is too big") if n_flight not in [1, 2]: raise ValueError("You chose wrong value for n_flight. It should be either 1 or 2") # Calculate basic variables screw_root_size = size - (depth*2) helix_angle_b = np.arctan(pitch/(np.pi*size)) helix_angle_c = np.arctan(pitch/(np.pi*screw_root_size)) channel_width_b = ((pitch/n_flight)*np.cos(helix_angle_b))-w_flight channel_width_c = ((pitch/n_flight)*np.cos(helix_angle_c))-w_flight avg_channel_width = (channel_width_b + channel_width_c)/2 # Generates table for shape factor (Drag) f_d_dict = dict() idx = [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25] for i in idx: f_d_dict[i] = (1/(i**3))*np.tanh((i*np.pi*depth)/(2*avg_channel_width)) f_d_df = pd.Series(f_d_dict) # Calculation of basic variables continues shape_factor_drag = ((16*avg_channel_width)/(np.pi**3*depth))*f_d_df.sum() rotation_per_sec = rpm/60 barrel_rot_speed = (np.pi*rotation_per_sec*size*np.cos(helix_angle_b))/1000 throughput_per_sec = ( n_flight*density*barrel_rot_speed*( avg_channel_width/1000)*(depth/1000)*shape_factor_drag ) / 2 throughput_per_hr = throughput_per_sec * 60 * 60 return round(throughput_per_hr, 2) def throughput_table( size, density, pitch=None, w_flight=None, n_flight=1, min_depth=None, max_depth=None, delta_depth=None, min_rpm=5, max_rpm=50, delta_rpm=5 ): """ Generate a table containing the extrusion throughput with respect to channel depth and screw RPM Parameters ---------- size : int or float Screw size [mm] density : int or float Melt density of polymeric material [kg/m^3] pitch : int or float Screw pitch [mm] If None, squared pitch (=1D) is used w_flight : int or float Flight width [mm] If None, 10% of screw size is used for flight width n_flight : int Number of flight [ea] Default value is 1 (single-flighted) min_depth : int or float Minimum depth for calculation [mm] If None, 2% of screw size is used for minimum depth max_depth : int or float Maximum depth for calculation [mm] If None, 9% of screw size is used for maximum depth delta_depth : int or float Amount of increment in depth for calculation [mm] If None, 1% of screw size is used for depth increment min_rpm : int or float Minimum screw RPM for calculation [RPM] Default value is 5 (5RPM) max_rpm : int or float Maximum screw RPM for calculation [RPM] Default value is 50 (50RPM) delta_rpm : int or float Amount of increment in RPM for calculation [RPM] Default value is 5 (5RPM) Returns ------- table : pandas.DataFrame dataframe containing the throughput as a function of channel depth and screw RPM Examples -------- >>> output_table(size=200, density=800) """ # Test input type if not isinstance(size, int): if not isinstance(size, float): raise TypeError("'size' should be either integer or float") if not isinstance(density, int): if not isinstance(density, float): raise TypeError("'density' should be either integer or float") # Assign default values if pitch == None: pitch = size if w_flight == None: w_flight = size*0.1 if min_depth == None: min_depth = size * 0.02 if max_depth == None: max_depth = size * 0.09 if delta_depth == None: delta_depth = size * 0.01 # Tests the types of default variables if not isinstance(pitch, int): if not isinstance(pitch, float): raise TypeError("'pitch' should be either integer or float") if not isinstance(w_flight, int): if not isinstance(w_flight, float): raise TypeError("'w_flight' should be either integer or float") if not isinstance(n_flight, int): raise TypeError("'n_flight' should be integer") if not isinstance(min_depth, int): if not isinstance(min_depth, float): raise TypeError("'min_depth' should be either integer or float") if not isinstance(max_depth, int): if not isinstance(max_depth, float): raise TypeError("'max_depth' should be either integer or float") if not isinstance(delta_depth, int): if not isinstance(delta_depth, float): raise TypeError("'delta_depth' should be either integer or float") if not isinstance(min_rpm, int): if not isinstance(min_rpm, float): raise TypeError("'min_rpm' should be either integer or float") if not isinstance(max_rpm, int): if not isinstance(max_rpm, float): raise TypeError("'max_rpm' should be either integer or float") if not isinstance(delta_rpm, int): if not isinstance(delta_rpm, float): raise TypeError("'delta_rpm' should be either integer or float") # Test input value if min_depth < size*0.01: raise ValueError("Channel depth is too shallow(<1% of screw size) to be used for extrusion screw") if max_depth > size*0.3: raise ValueError("Channel depth is too deep(>30% of screw size) to be used for extrusion screw") if delta_depth > max_depth-min_depth: raise ValueError("'delta_depth' can not be greater than 'max_depth - min_depth'") if size < 5: raise ValueError("Screw size is too small!!") if size > 500: raise ValueError("Screw size is too big!!") if density < 300: raise ValueError("This is not melt density for polymers. Too low!!") if density > 3000: raise ValueError("This is not melt density for polymers. Too high!!") if pitch < size*0.2: raise ValueError("Screw pitch is too small") if pitch > size*2.5: raise ValueError("Screw pitch is too big") if w_flight < size*0.01: raise ValueError("Flight width is too small") if w_flight > size*0.7: raise ValueError("Flight width is too big") if n_flight not in [1, 2]: raise ValueError("You chose wrong value for n_flight. It should be either 1 or 2") # Generates table table = dict() throughput_list = list() depth = [round(i, 2) for i in np.arange(min_depth, max_depth+0.1, delta_depth)] rpm = [j for j in np.arange(min_rpm, max_rpm+0.1, delta_rpm)] rpm_title = [f"rpm={k}" for k in np.arange(min_rpm, max_rpm+0.1, delta_rpm)] for d in depth: for r in rpm: throughput_list.append(throughput_cal(size, d, density, r, pitch, w_flight, n_flight)) table[f"depth={d}"] = throughput_list throughput_list = [] table_df = pd.DataFrame(table, index=rpm_title) return table_df def throughput_plot( size, density, pitch=None, w_flight=None, n_flight=1, min_depth=None, max_depth=None, delta_depth=None, min_rpm=0, max_rpm=50, delta_rpm=1 ): """ Generates a plot containing the extrusion throughput with respect to channel depth and screw RPM Parameters ---------- size : int or float Screw size [mm] density : int or float Melt density of polymeric material [kg/m^3] pitch : int or float Screw pitch [mm] If None, squared pitch (=1D) is used w_flight : int or float Flight width [mm] If None, 10% of screw size is used for flight width n_flight : int Number of flight [ea] Default value is 1 (single-flighted) min_depth : int or float Minimum depth for calculation [mm] If None, 2% of screw size is used for minimum depth max_depth : int or float Maximum depth for calculation [mm] If None, 9% of screw size is used for maximum depth delta_depth : int or float Amount of increment in depth for calculation [mm] If None, 1% of screw size is used for depth increment min_rpm : int or float Minimum screw RPM for calculation [RPM] Default value is 0 (0RPM) max_rpm : int or float Maximum screw RPM for calculation [RPM] Default value is 50 (50RPM) delta_rpm : int or float Amount of increment in RPM for calculation [RPM] Default value is 1 (1RPM) Returns ------- plot : altair.Chart object a line chart showing the throughput as a function of channel depth and screw RPM Examples -------- >>> output_plot(size=200, density=800) """ # Test input type if not isinstance(size, int): if not isinstance(size, float): raise TypeError("'size' should be either integer or float") if not isinstance(density, int): if not isinstance(density, float): raise TypeError("'density' should be either integer or float") # Assign default values if pitch == None: pitch = size if w_flight == None: w_flight = size*0.1 if min_depth == None: min_depth = size * 0.02 if max_depth == None: max_depth = size * 0.09 if delta_depth == None: delta_depth = size * 0.01 # Tests the types of default variables if not isinstance(pitch, int): if not isinstance(pitch, float): raise TypeError("'pitch' should be either integer or float") if not isinstance(w_flight, int): if not isinstance(w_flight, float): raise TypeError("'w_flight' should be either integer or float") if not isinstance(n_flight, int): raise TypeError("'n_flight' should be integer") if not isinstance(min_depth, int): if not isinstance(min_depth, float): raise TypeError("'min_depth' should be either integer or float") if not isinstance(max_depth, int): if not isinstance(max_depth, float): raise TypeError("'max_depth' should be either integer or float") if not isinstance(delta_depth, int): if not isinstance(delta_depth, float): raise TypeError("'delta_depth' should be either integer or float") if not isinstance(min_rpm, int): if not isinstance(min_rpm, float): raise TypeError("'min_rpm' should be either integer or float") if not isinstance(max_rpm, int): if not isinstance(max_rpm, float): raise TypeError("'max_rpm' should be either integer or float") if not isinstance(delta_rpm, int): if not isinstance(delta_rpm, float): raise TypeError("'delta_rpm' should be either integer or float") # Test input value if min_depth < size*0.01: raise ValueError("Channel depth is too shallow(<1% of screw size) to be used for extrusion screw") if max_depth > size*0.3: raise ValueError("Channel depth is too deep(>30% of screw size) to be used for extrusion screw") if delta_depth > max_depth-min_depth: raise ValueError("'delta_depth' can not be greater than 'max_depth - min_depth'") if size < 5: raise ValueError("Screw size is too small!!") if size > 500: raise ValueError("Screw size is too big!!") if density < 300: raise ValueError("This is not melt density for polymers. Too low!!") if density > 3000: raise ValueError("This is not melt density for polymers. Too high!!") if pitch < size*0.2: raise ValueError("Screw pitch is too small") if pitch > size*2.5: raise ValueError("Screw pitch is too big") if w_flight < size*0.01: raise ValueError("Flight width is too small") if w_flight > size*0.7: raise ValueError("Flight width is too big") if n_flight not in [1, 2]: raise ValueError("You chose wrong value for n_flight. It should be either 1 or 2") # Generate table for plot table = dict() throughput_list = list() depth = [round(i, 2) for i in np.arange(min_depth, max_depth+0.1, delta_depth)] rpm = [j for j in np.arange(min_rpm, max_rpm+0.1, delta_rpm)] for d in depth: for r in rpm: throughput_list.append(throughput_cal(size, d, density, r, pitch, w_flight, n_flight)) table[d] = throughput_list throughput_list = [] table_df = pd.DataFrame(table, index=rpm) table_for_plot = table_df.reset_index() table_for_plot = table_for_plot.rename(columns={"index": "RPM"}) table_for_plot = table_for_plot.melt(id_vars="RPM", var_name="depth", value_name="throughput") table_for_plot["depth"] = table_for_plot["depth"].astype('category') # Generate plot plot = alt.Chart(table_for_plot, title='Throughput vs Screw RPM & Channel Depth').mark_circle().encode( alt.X("RPM", title="Screw RPM", scale=alt.Scale(domain=(0, max_rpm))), alt.Y("throughput", title="Throughput [kg/hr]"), alt.Color( "depth", title="Channel depth [mm]", sort=alt.EncodingSortField('throughput', op='mean', order='descending') ), tooltip=["RPM", "depth", "throughput"] ).configure_axis( labelFontSize=14, titleFontSize=16 ).configure_legend( labelFontSize=16, titleFontSize=14 ).configure_title( fontSize=18 ) return plot
40.028169
107
0.617464
2,402
17,052
4.273522
0.092839
0.02679
0.08037
0.045592
0.826693
0.81247
0.805358
0.800097
0.79396
0.788505
0
0.021682
0.291344
17,052
425
108
40.122353
0.827789
0.26818
0
0.753968
0
0
0.241976
0
0
0
0
0
0
1
0.011905
false
0
0.011905
0
0.035714
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
f4db9a01764125925b49da846749cd189e73662a
26,514
py
Python
sdk/python/pulumi_sysdig/monitor/team.py
Sysdig-Hackathon-Picasso/pulumi-sysdig
e25b655f6ad4a5f52678bc445be2d59f28f5bb4b
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_sysdig/monitor/team.py
Sysdig-Hackathon-Picasso/pulumi-sysdig
e25b655f6ad4a5f52678bc445be2d59f28f5bb4b
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_sysdig/monitor/team.py
Sysdig-Hackathon-Picasso/pulumi-sysdig
e25b655f6ad4a5f52678bc445be2d59f28f5bb4b
[ "ECL-2.0", "Apache-2.0" ]
1
2021-12-01T08:57:09.000Z
2021-12-01T08:57:09.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from . import outputs from ._inputs import * __all__ = ['TeamArgs', 'Team'] @pulumi.input_type class TeamArgs: def __init__(__self__, *, entrypoints: pulumi.Input[Sequence[pulumi.Input['TeamEntrypointArgs']]], can_see_infrastructure_events: Optional[pulumi.Input[bool]] = None, can_use_aws_data: Optional[pulumi.Input[bool]] = None, can_use_sysdig_capture: Optional[pulumi.Input[bool]] = None, default_team: Optional[pulumi.Input[bool]] = None, description: Optional[pulumi.Input[str]] = None, filter: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, scope_by: Optional[pulumi.Input[str]] = None, theme: Optional[pulumi.Input[str]] = None, user_roles: Optional[pulumi.Input[Sequence[pulumi.Input['TeamUserRoleArgs']]]] = None): """ The set of arguments for constructing a Team resource. :param pulumi.Input[Sequence[pulumi.Input['TeamEntrypointArgs']]] entrypoints: Main entry point for the current team in the product. See the Entrypoint argument reference section for more information. :param pulumi.Input[bool] can_see_infrastructure_events: TODO. Default: false. :param pulumi.Input[bool] can_use_aws_data: TODO. Default: false. :param pulumi.Input[str] description: A description of the team. :param pulumi.Input[str] filter: If the team can only see some resources, write down a filter of such resources. :param pulumi.Input[str] name: The name of the Monitor Team. It must be unique and must not exist in Secure. :param pulumi.Input[str] scope_by: Scope for the team. Default: "container". :param pulumi.Input[str] theme: Colour of the team. Default: "#73A1F7". """ pulumi.set(__self__, "entrypoints", entrypoints) if can_see_infrastructure_events is not None: pulumi.set(__self__, "can_see_infrastructure_events", can_see_infrastructure_events) if can_use_aws_data is not None: pulumi.set(__self__, "can_use_aws_data", can_use_aws_data) if can_use_sysdig_capture is not None: pulumi.set(__self__, "can_use_sysdig_capture", can_use_sysdig_capture) if default_team is not None: pulumi.set(__self__, "default_team", default_team) if description is not None: pulumi.set(__self__, "description", description) if filter is not None: pulumi.set(__self__, "filter", filter) if name is not None: pulumi.set(__self__, "name", name) if scope_by is not None: pulumi.set(__self__, "scope_by", scope_by) if theme is not None: pulumi.set(__self__, "theme", theme) if user_roles is not None: pulumi.set(__self__, "user_roles", user_roles) @property @pulumi.getter def entrypoints(self) -> pulumi.Input[Sequence[pulumi.Input['TeamEntrypointArgs']]]: """ Main entry point for the current team in the product. See the Entrypoint argument reference section for more information. """ return pulumi.get(self, "entrypoints") @entrypoints.setter def entrypoints(self, value: pulumi.Input[Sequence[pulumi.Input['TeamEntrypointArgs']]]): pulumi.set(self, "entrypoints", value) @property @pulumi.getter(name="canSeeInfrastructureEvents") def can_see_infrastructure_events(self) -> Optional[pulumi.Input[bool]]: """ TODO. Default: false. """ return pulumi.get(self, "can_see_infrastructure_events") @can_see_infrastructure_events.setter def can_see_infrastructure_events(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "can_see_infrastructure_events", value) @property @pulumi.getter(name="canUseAwsData") def can_use_aws_data(self) -> Optional[pulumi.Input[bool]]: """ TODO. Default: false. """ return pulumi.get(self, "can_use_aws_data") @can_use_aws_data.setter def can_use_aws_data(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "can_use_aws_data", value) @property @pulumi.getter(name="canUseSysdigCapture") def can_use_sysdig_capture(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "can_use_sysdig_capture") @can_use_sysdig_capture.setter def can_use_sysdig_capture(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "can_use_sysdig_capture", value) @property @pulumi.getter(name="defaultTeam") def default_team(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "default_team") @default_team.setter def default_team(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "default_team", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: """ A description of the team. """ return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @property @pulumi.getter def filter(self) -> Optional[pulumi.Input[str]]: """ If the team can only see some resources, write down a filter of such resources. """ return pulumi.get(self, "filter") @filter.setter def filter(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "filter", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The name of the Monitor Team. It must be unique and must not exist in Secure. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="scopeBy") def scope_by(self) -> Optional[pulumi.Input[str]]: """ Scope for the team. Default: "container". """ return pulumi.get(self, "scope_by") @scope_by.setter def scope_by(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "scope_by", value) @property @pulumi.getter def theme(self) -> Optional[pulumi.Input[str]]: """ Colour of the team. Default: "#73A1F7". """ return pulumi.get(self, "theme") @theme.setter def theme(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "theme", value) @property @pulumi.getter(name="userRoles") def user_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TeamUserRoleArgs']]]]: return pulumi.get(self, "user_roles") @user_roles.setter def user_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TeamUserRoleArgs']]]]): pulumi.set(self, "user_roles", value) @pulumi.input_type class _TeamState: def __init__(__self__, *, can_see_infrastructure_events: Optional[pulumi.Input[bool]] = None, can_use_aws_data: Optional[pulumi.Input[bool]] = None, can_use_sysdig_capture: Optional[pulumi.Input[bool]] = None, default_team: Optional[pulumi.Input[bool]] = None, description: Optional[pulumi.Input[str]] = None, entrypoints: Optional[pulumi.Input[Sequence[pulumi.Input['TeamEntrypointArgs']]]] = None, filter: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, scope_by: Optional[pulumi.Input[str]] = None, theme: Optional[pulumi.Input[str]] = None, user_roles: Optional[pulumi.Input[Sequence[pulumi.Input['TeamUserRoleArgs']]]] = None, version: Optional[pulumi.Input[int]] = None): """ Input properties used for looking up and filtering Team resources. :param pulumi.Input[bool] can_see_infrastructure_events: TODO. Default: false. :param pulumi.Input[bool] can_use_aws_data: TODO. Default: false. :param pulumi.Input[str] description: A description of the team. :param pulumi.Input[Sequence[pulumi.Input['TeamEntrypointArgs']]] entrypoints: Main entry point for the current team in the product. See the Entrypoint argument reference section for more information. :param pulumi.Input[str] filter: If the team can only see some resources, write down a filter of such resources. :param pulumi.Input[str] name: The name of the Monitor Team. It must be unique and must not exist in Secure. :param pulumi.Input[str] scope_by: Scope for the team. Default: "container". :param pulumi.Input[str] theme: Colour of the team. Default: "#73A1F7". """ if can_see_infrastructure_events is not None: pulumi.set(__self__, "can_see_infrastructure_events", can_see_infrastructure_events) if can_use_aws_data is not None: pulumi.set(__self__, "can_use_aws_data", can_use_aws_data) if can_use_sysdig_capture is not None: pulumi.set(__self__, "can_use_sysdig_capture", can_use_sysdig_capture) if default_team is not None: pulumi.set(__self__, "default_team", default_team) if description is not None: pulumi.set(__self__, "description", description) if entrypoints is not None: pulumi.set(__self__, "entrypoints", entrypoints) if filter is not None: pulumi.set(__self__, "filter", filter) if name is not None: pulumi.set(__self__, "name", name) if scope_by is not None: pulumi.set(__self__, "scope_by", scope_by) if theme is not None: pulumi.set(__self__, "theme", theme) if user_roles is not None: pulumi.set(__self__, "user_roles", user_roles) if version is not None: pulumi.set(__self__, "version", version) @property @pulumi.getter(name="canSeeInfrastructureEvents") def can_see_infrastructure_events(self) -> Optional[pulumi.Input[bool]]: """ TODO. Default: false. """ return pulumi.get(self, "can_see_infrastructure_events") @can_see_infrastructure_events.setter def can_see_infrastructure_events(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "can_see_infrastructure_events", value) @property @pulumi.getter(name="canUseAwsData") def can_use_aws_data(self) -> Optional[pulumi.Input[bool]]: """ TODO. Default: false. """ return pulumi.get(self, "can_use_aws_data") @can_use_aws_data.setter def can_use_aws_data(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "can_use_aws_data", value) @property @pulumi.getter(name="canUseSysdigCapture") def can_use_sysdig_capture(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "can_use_sysdig_capture") @can_use_sysdig_capture.setter def can_use_sysdig_capture(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "can_use_sysdig_capture", value) @property @pulumi.getter(name="defaultTeam") def default_team(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "default_team") @default_team.setter def default_team(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "default_team", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: """ A description of the team. """ return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @property @pulumi.getter def entrypoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TeamEntrypointArgs']]]]: """ Main entry point for the current team in the product. See the Entrypoint argument reference section for more information. """ return pulumi.get(self, "entrypoints") @entrypoints.setter def entrypoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TeamEntrypointArgs']]]]): pulumi.set(self, "entrypoints", value) @property @pulumi.getter def filter(self) -> Optional[pulumi.Input[str]]: """ If the team can only see some resources, write down a filter of such resources. """ return pulumi.get(self, "filter") @filter.setter def filter(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "filter", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The name of the Monitor Team. It must be unique and must not exist in Secure. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="scopeBy") def scope_by(self) -> Optional[pulumi.Input[str]]: """ Scope for the team. Default: "container". """ return pulumi.get(self, "scope_by") @scope_by.setter def scope_by(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "scope_by", value) @property @pulumi.getter def theme(self) -> Optional[pulumi.Input[str]]: """ Colour of the team. Default: "#73A1F7". """ return pulumi.get(self, "theme") @theme.setter def theme(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "theme", value) @property @pulumi.getter(name="userRoles") def user_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TeamUserRoleArgs']]]]: return pulumi.get(self, "user_roles") @user_roles.setter def user_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TeamUserRoleArgs']]]]): pulumi.set(self, "user_roles", value) @property @pulumi.getter def version(self) -> Optional[pulumi.Input[int]]: return pulumi.get(self, "version") @version.setter def version(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "version", value) class Team(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, can_see_infrastructure_events: Optional[pulumi.Input[bool]] = None, can_use_aws_data: Optional[pulumi.Input[bool]] = None, can_use_sysdig_capture: Optional[pulumi.Input[bool]] = None, default_team: Optional[pulumi.Input[bool]] = None, description: Optional[pulumi.Input[str]] = None, entrypoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TeamEntrypointArgs']]]]] = None, filter: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, scope_by: Optional[pulumi.Input[str]] = None, theme: Optional[pulumi.Input[str]] = None, user_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TeamUserRoleArgs']]]]] = None, __props__=None): """ ## Import Monitor Teams can be imported using the ID, e.g. ```sh $ pulumi import sysdig:Monitor/team:Team example 12345 ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[bool] can_see_infrastructure_events: TODO. Default: false. :param pulumi.Input[bool] can_use_aws_data: TODO. Default: false. :param pulumi.Input[str] description: A description of the team. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TeamEntrypointArgs']]]] entrypoints: Main entry point for the current team in the product. See the Entrypoint argument reference section for more information. :param pulumi.Input[str] filter: If the team can only see some resources, write down a filter of such resources. :param pulumi.Input[str] name: The name of the Monitor Team. It must be unique and must not exist in Secure. :param pulumi.Input[str] scope_by: Scope for the team. Default: "container". :param pulumi.Input[str] theme: Colour of the team. Default: "#73A1F7". """ ... @overload def __init__(__self__, resource_name: str, args: TeamArgs, opts: Optional[pulumi.ResourceOptions] = None): """ ## Import Monitor Teams can be imported using the ID, e.g. ```sh $ pulumi import sysdig:Monitor/team:Team example 12345 ``` :param str resource_name: The name of the resource. :param TeamArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(TeamArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, can_see_infrastructure_events: Optional[pulumi.Input[bool]] = None, can_use_aws_data: Optional[pulumi.Input[bool]] = None, can_use_sysdig_capture: Optional[pulumi.Input[bool]] = None, default_team: Optional[pulumi.Input[bool]] = None, description: Optional[pulumi.Input[str]] = None, entrypoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TeamEntrypointArgs']]]]] = None, filter: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, scope_by: Optional[pulumi.Input[str]] = None, theme: Optional[pulumi.Input[str]] = None, user_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TeamUserRoleArgs']]]]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = TeamArgs.__new__(TeamArgs) __props__.__dict__["can_see_infrastructure_events"] = can_see_infrastructure_events __props__.__dict__["can_use_aws_data"] = can_use_aws_data __props__.__dict__["can_use_sysdig_capture"] = can_use_sysdig_capture __props__.__dict__["default_team"] = default_team __props__.__dict__["description"] = description if entrypoints is None and not opts.urn: raise TypeError("Missing required property 'entrypoints'") __props__.__dict__["entrypoints"] = entrypoints __props__.__dict__["filter"] = filter __props__.__dict__["name"] = name __props__.__dict__["scope_by"] = scope_by __props__.__dict__["theme"] = theme __props__.__dict__["user_roles"] = user_roles __props__.__dict__["version"] = None super(Team, __self__).__init__( 'sysdig:Monitor/team:Team', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, can_see_infrastructure_events: Optional[pulumi.Input[bool]] = None, can_use_aws_data: Optional[pulumi.Input[bool]] = None, can_use_sysdig_capture: Optional[pulumi.Input[bool]] = None, default_team: Optional[pulumi.Input[bool]] = None, description: Optional[pulumi.Input[str]] = None, entrypoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TeamEntrypointArgs']]]]] = None, filter: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, scope_by: Optional[pulumi.Input[str]] = None, theme: Optional[pulumi.Input[str]] = None, user_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TeamUserRoleArgs']]]]] = None, version: Optional[pulumi.Input[int]] = None) -> 'Team': """ Get an existing Team resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[bool] can_see_infrastructure_events: TODO. Default: false. :param pulumi.Input[bool] can_use_aws_data: TODO. Default: false. :param pulumi.Input[str] description: A description of the team. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TeamEntrypointArgs']]]] entrypoints: Main entry point for the current team in the product. See the Entrypoint argument reference section for more information. :param pulumi.Input[str] filter: If the team can only see some resources, write down a filter of such resources. :param pulumi.Input[str] name: The name of the Monitor Team. It must be unique and must not exist in Secure. :param pulumi.Input[str] scope_by: Scope for the team. Default: "container". :param pulumi.Input[str] theme: Colour of the team. Default: "#73A1F7". """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _TeamState.__new__(_TeamState) __props__.__dict__["can_see_infrastructure_events"] = can_see_infrastructure_events __props__.__dict__["can_use_aws_data"] = can_use_aws_data __props__.__dict__["can_use_sysdig_capture"] = can_use_sysdig_capture __props__.__dict__["default_team"] = default_team __props__.__dict__["description"] = description __props__.__dict__["entrypoints"] = entrypoints __props__.__dict__["filter"] = filter __props__.__dict__["name"] = name __props__.__dict__["scope_by"] = scope_by __props__.__dict__["theme"] = theme __props__.__dict__["user_roles"] = user_roles __props__.__dict__["version"] = version return Team(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="canSeeInfrastructureEvents") def can_see_infrastructure_events(self) -> pulumi.Output[Optional[bool]]: """ TODO. Default: false. """ return pulumi.get(self, "can_see_infrastructure_events") @property @pulumi.getter(name="canUseAwsData") def can_use_aws_data(self) -> pulumi.Output[Optional[bool]]: """ TODO. Default: false. """ return pulumi.get(self, "can_use_aws_data") @property @pulumi.getter(name="canUseSysdigCapture") def can_use_sysdig_capture(self) -> pulumi.Output[Optional[bool]]: return pulumi.get(self, "can_use_sysdig_capture") @property @pulumi.getter(name="defaultTeam") def default_team(self) -> pulumi.Output[Optional[bool]]: return pulumi.get(self, "default_team") @property @pulumi.getter def description(self) -> pulumi.Output[Optional[str]]: """ A description of the team. """ return pulumi.get(self, "description") @property @pulumi.getter def entrypoints(self) -> pulumi.Output[Sequence['outputs.TeamEntrypoint']]: """ Main entry point for the current team in the product. See the Entrypoint argument reference section for more information. """ return pulumi.get(self, "entrypoints") @property @pulumi.getter def filter(self) -> pulumi.Output[Optional[str]]: """ If the team can only see some resources, write down a filter of such resources. """ return pulumi.get(self, "filter") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ The name of the Monitor Team. It must be unique and must not exist in Secure. """ return pulumi.get(self, "name") @property @pulumi.getter(name="scopeBy") def scope_by(self) -> pulumi.Output[Optional[str]]: """ Scope for the team. Default: "container". """ return pulumi.get(self, "scope_by") @property @pulumi.getter def theme(self) -> pulumi.Output[Optional[str]]: """ Colour of the team. Default: "#73A1F7". """ return pulumi.get(self, "theme") @property @pulumi.getter(name="userRoles") def user_roles(self) -> pulumi.Output[Optional[Sequence['outputs.TeamUserRole']]]: return pulumi.get(self, "user_roles") @property @pulumi.getter def version(self) -> pulumi.Output[int]: return pulumi.get(self, "version")
42.219745
159
0.641774
3,125
26,514
5.19488
0.05952
0.109092
0.117038
0.060983
0.890785
0.875385
0.847789
0.836454
0.828754
0.813231
0
0.001952
0.246474
26,514
627
160
42.287081
0.810601
0.216188
0
0.813131
1
0
0.106442
0.029734
0
0
0
0.022329
0
1
0.164141
false
0.002525
0.017677
0.027778
0.280303
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
9
5235e1911fed823deb44d400386dd440d062b110
4,407
py
Python
sports_manager/tests/timeslot/tests_time_slot_list_view.py
hbuyse/dj-sports-manager
7e32cc41347b968b4ede9ea6846de14d9504c3f9
[ "MIT" ]
null
null
null
sports_manager/tests/timeslot/tests_time_slot_list_view.py
hbuyse/dj-sports-manager
7e32cc41347b968b4ede9ea6846de14d9504c3f9
[ "MIT" ]
null
null
null
sports_manager/tests/timeslot/tests_time_slot_list_view.py
hbuyse/dj-sports-manager
7e32cc41347b968b4ede9ea6846de14d9504c3f9
[ "MIT" ]
null
null
null
#! /usr/bin/env python # coding=utf-8 """Tests the views.""" # Django from django.test import TestCase from django.urls import reverse from ..helper import create_team, create_time_slot, create_user class TestTimeSlotListViewAsAnonymous(TestCase): """Tests ListView for TimeSlot.""" def setUp(self): """Create a team.""" self.team = create_team()[1] def tests_empty(self): """Tests.""" r = self.client.get(reverse('sports-manager:team-time-slot-list', kwargs={'slug': self.team.slug})) self.assertEqual(r.status_code, 200) self.assertEqual(len(r.context['timeslot_list']), 0) def tests_one_team(self): """Tests.""" ts = create_time_slot(team=self.team)[1] r = self.client.get(reverse('sports-manager:team-time-slot-list', kwargs={'slug': self.team.slug})) self.assertEqual(r.status_code, 200) self.assertEqual(len(r.context['timeslot_list']), 1) self.assertIn(ts, r.context['timeslot_list']) class TestTimeSlotListViewAsLogged(TestCase): """Tests ListView for TimeSlot.""" def setUp(self): """Create a user that will be able to log in.""" self.user_info, self.user = create_user() self.team = create_team()[1] def tests_empty(self): """Tests.""" self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password'])) r = self.client.get(reverse('sports-manager:team-time-slot-list', kwargs={'slug': self.team.slug})) self.assertEqual(r.status_code, 200) self.assertEqual(len(r.context['timeslot_list']), 0) def tests_one_team(self): """Tests.""" ts = create_time_slot(team=self.team)[1] self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password'])) r = self.client.get(reverse('sports-manager:team-time-slot-list', kwargs={'slug': self.team.slug})) self.assertEqual(r.status_code, 200) self.assertEqual(len(r.context['timeslot_list']), 1) self.assertIn(ts, r.context['timeslot_list']) class TestTimeSlotListViewAsStaff(TestCase): """Tests ListView for TimeSlot.""" def setUp(self): """Create a user that will be able to log in.""" self.user_info, self.user = create_user(staff=True) self.team = create_team()[1] def tests_empty(self): """Tests.""" self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password'])) r = self.client.get(reverse('sports-manager:team-time-slot-list', kwargs={'slug': self.team.slug})) self.assertEqual(r.status_code, 200) self.assertEqual(len(r.context['timeslot_list']), 0) def tests_one_team(self): """Tests.""" ts = create_time_slot(team=self.team)[1] self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password'])) r = self.client.get(reverse('sports-manager:team-time-slot-list', kwargs={'slug': self.team.slug})) self.assertEqual(r.status_code, 200) self.assertEqual(len(r.context['timeslot_list']), 1) self.assertIn(ts, r.context['timeslot_list']) class TestTimeSlotListViewAsSuperuser(TestCase): """Tests ListView for TimeSlot.""" def setUp(self): """Create a user that will be able to log in.""" self.user_info, self.user = create_user(superuser=True) self.team = create_team()[1] def tests_empty(self): """Tests.""" self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password'])) r = self.client.get(reverse('sports-manager:team-time-slot-list', kwargs={'slug': self.team.slug})) self.assertEqual(r.status_code, 200) self.assertEqual(len(r.context['timeslot_list']), 0) def tests_one_team(self): """Tests.""" ts = create_time_slot(team=self.team)[1] self.assertTrue(self.client.login(username=self.user_info['username'], password=self.user_info['password'])) r = self.client.get(reverse('sports-manager:team-time-slot-list', kwargs={'slug': self.team.slug})) self.assertEqual(r.status_code, 200) self.assertEqual(len(r.context['timeslot_list']), 1) self.assertIn(ts, r.context['timeslot_list'])
36.725
116
0.655548
583
4,407
4.842196
0.120069
0.05101
0.063762
0.085016
0.897627
0.897627
0.897627
0.897627
0.897627
0.897627
0
0.011354
0.180622
4,407
119
117
37.033613
0.770424
0.084865
0
0.84375
0
0
0.140938
0.068948
0
0
0
0
0.40625
1
0.1875
false
0.09375
0.046875
0
0.296875
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
1
0
0
0
0
0
9
bfdbf1a33be37a0c47499653ecdca0de928db365
4,643
py
Python
G1.FacePinball/step.py
ninetailskim/PaddleEffect
320904be1ce597c08967b436fc1a2a274e21616e
[ "MIT" ]
22
2020-12-11T06:23:30.000Z
2021-12-01T11:23:35.000Z
G1.FacePinball/step.py
ninetailskim/PaddleEffect
320904be1ce597c08967b436fc1a2a274e21616e
[ "MIT" ]
null
null
null
G1.FacePinball/step.py
ninetailskim/PaddleEffect
320904be1ce597c08967b436fc1a2a274e21616e
[ "MIT" ]
3
2021-03-09T02:52:22.000Z
2021-12-01T03:17:05.000Z
''' 初始化 1拿图像,确定图像 2拿到图像的左眼右眼作为histgram 3制作mask 4转化为pygame可以使用的格式,并画到screen上 ''' ''' while 拿图像,拿到左眼右眼,确定是否操作 如果有操作: 替换眼睛的贴图 操作杠杆 ''' img_mask = np.zeros(ss, np.uint8) mask_pairs = [1,4,7,9,11,14,17,25,20] mask_point = np.array([res[x - 1] for x in mask_pairs]) cv2.fillPoly(img_mask, [mask_point], 1) mask_pairs = [[28,29,29,28],[30,31,31,30]] mask_point = np.array([[res[y - 1][0] + (-2 if i > 1 else 2),res[y - 1][1]] for x in mask_pairs for i, y in enumerate(x,0)]) mask_point = np.reshape(mask_point, (2,4,2)) cv2.fillPoly(img_mask, mask_point, 0) mask_pairs = [49,50,51,52,53,54,55,56,57,58,59,60] mask_point = np.array([res[x - 1] for x in mask_pairs]) cv2.fillPoly(img_mask, [mask_point], 0) ''' 先取反 然后,看那个线的角度 如果线的角度大于90度,则让她减去180 然后反的角度+2倍的角度 我真的很爱你,嘿嘿,我爱你呦 我草你个傻逼 我真不知道要说啥了 呵呵呵呵 你个大傻逼 ''' class leftBu(pg.sprite.Sprite): def __init__(self, name, x, y): pg.sprite.Sprite.__init__(self) self.image, self.rect = load_image(name) self.max_angle = 179; self.min_angle = 60; self.cur_angle = 60; self.controlled = False self.transforming = False self.rect.left = x self.rect.top = y self.timage = pg.transform.rotate(self.image, self.cur_angle) self.trect = self.rect.copy() def up(self): self.cur_angle += 0.5 if self.cur_angle > self.max_angle: self.cur_angle = self.max_angle self.controlled = False self.timage = pg.transform.rotate(self.image, self.cur_angle) if self.cur_angle > 90: return self.timage, (0, - math.sin(degreeToRad(self.cur_angle - 90)) * self.rect.height) else: return self.timage, (0, 0) def down(self): self.cur_angle -= 0.5 if self.cur_angle < self.min_angle: self.cur_angle = self.min_angle self.timage = pg.transform.rotate(self.image, self.cur_angle) self.transforming = False if self.cur_angle > 90: return self.timage, (0, - math.sin(degreeToRad(self.cur_angle - 90)) * self.rect.height) else: return self.timage, (0, 0) def update(self): print(self.cur_angle) if self.controlled: _, (tx, ty) = self.up() self.trect.top = self.rect.top + ty self.trect.left = self.rect.left + tx else: _, (tx, ty) = self.down() self.trect.top = self.rect.top + ty self.trect.left = self.rect.left + tx class rightBu(pg.sprite.Sprite): def __init__(self, name, x, y): pg.sprite.Sprite.__init__(self) self.image, self.rect = load_image(name) self.max_angle = 300; self.min_angle = 181; self.cur_angle = 300; self.controlled = False self.transforming = False self.rect.left = x self.rect.top = y self.timage = pg.transform.rotate(self.image, self.cur_angle) self.trect = self.rect.copy() def up(self): self.cur_angle -= 0.5 if self.cur_angle < self.min_angle: self.cur_angle = self.min_angle self.controlled = False self.timage = pg.transform.rotate(self.image, self.cur_angle) if self.cur_angle < 270: return self.timage, ((1 - math.cos(degreeToRad(270 - self.cur_angle))) * self.rect.height, -math.sin(degreeToRad(270 - self.cur_angle)) * self.rect.height) else: return self.timage, ((1 - math.cos(degreeToRad(self.cur_angle - 270))) * self.rect.height, math.sin(degreeToRad(self.cur_angle - 270)) *self.rect.height) def down(self): self.cur_angle += 0.5 if self.cur_angle > self.max_angle: self.cur_angle = self.max_angle self.timage = pg.transform.rotate(self.image,, self.cur_angle) self.transforming = False if self.cur_angle < 270: return self.timage, ((1 - math.cos(degreeToRad(270 - self.cur_angle))) * self.rect.height, -math.sin(degreeToRad(270 - self.cur_angle)) * self.rect.height) else: return self.timage, ((1 - math.cos(degreeToRad(self.cur_angle - 270))) * self.rect.height, math.sin(degreeToRad(self.cur_angle - 270)) *self.rect.height) def update(self): print(self.cur_angle) if self.controlled: _, (tx, ty) = self.up() self.trect.top = self.rect.top + ty self.trect.left = self.rect.left + tx else: _, (tx, ty) = self.down() self.trect.top = self.rect.top + ty self.trect.left = self.rect.left + tx
33.890511
167
0.598966
670
4,643
4.019403
0.179104
0.093576
0.160416
0.095061
0.837727
0.830672
0.824731
0.813962
0.813962
0.813962
0
0.045734
0.265346
4,643
137
168
33.890511
0.74377
0
0
0.814433
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0.020619
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
87130ba5be2a95c0bc7b7b08ce1b329bc0a6e53c
21,314
py
Python
WebBrickLibs/EventHandlers/tests/TestTimer.py
AndyThirtover/wb_gateway
69f9c870369085f4440033201e2fb263a463a523
[ "BSD-3-Clause" ]
null
null
null
WebBrickLibs/EventHandlers/tests/TestTimer.py
AndyThirtover/wb_gateway
69f9c870369085f4440033201e2fb263a463a523
[ "BSD-3-Clause" ]
null
null
null
WebBrickLibs/EventHandlers/tests/TestTimer.py
AndyThirtover/wb_gateway
69f9c870369085f4440033201e2fb263a463a523
[ "BSD-3-Clause" ]
null
null
null
# Copyright L.P.Klyne 2013 # Licenced under 3 clause BSD licence # # Tests for timer # import sys, logging, time import unittest from MiscLib.DomHelpers import * from EventLib.Event import Event, makeEvent from EventHandlers.BaseHandler import * from EventHandlers.EventRouterLoad import EventRouterLoader import EventHandlers.tests.TestEventLogger as TestEventLogger import Events from Utils import * # Configuration for the tests # # this test uses the an event test a timer # testConfigTimer = """<?xml version="1.0" encoding="utf-8"?> <eventInterfaces> <eventInterface module='EventHandlers.tests.TestEventLogger' name='TestEventLogger'> <!-- This saves all events --> <eventtype type=""> <eventsource source="" > <event> <!-- interested in all events --> </event> </eventsource> </eventtype> </eventInterface> <eventInterface module='EventHandlers.Timer' name='Timer' > <presence type='http://id.webbrick.co.uk/events/webbrick/DO' source='webbrick/100/DO/0' key="state" invert="true"/> <duration type='http://id.webbrick.co.uk/events/config/get' source='test/timer/1/duration' /> <enable type='http://id.webbrick.co.uk/events/config/get' source='test/timer/1/enable' /> <hold type='http://id.webbrick.co.uk/events/webbrick/DO' source='webbrick/100/DO/1' key="state" invert="true" /> <eventtype type="http://id.webbrick.co.uk/events/timer" > <eventsource source="testing/timer/1"> <event> <params> <testEq name="dayphase" value='Morning:Dark' /> <testEq name="occupancy" value='1' /> <testEq name="hold" value="0" /> </params> <newEvent type='testing' source='result/timertest' > </newEvent> </event> </eventsource> </eventtype> </eventInterface> </eventInterfaces> """ testConfigTimer2 = """<?xml version="1.0" encoding="utf-8"?> <eventInterfaces> <eventInterface module='EventHandlers.tests.TestEventLogger' name='TestEventLogger'> <!-- This saves all events --> <eventtype type=""> <eventsource source="" > <event> <!-- interested in all events --> </event> </eventsource> </eventtype> </eventInterface> <eventInterface module='EventHandlers.Timer' name='Timer' > <presence type='http://id.webbrick.co.uk/events/webbrick/DO' source='webbrick/100/DO/0' key="state" /> <duration type='http://id.webbrick.co.uk/events/config/get' source='test/timer/1/duration' /> <enable type='http://id.webbrick.co.uk/events/config/get' source='test/timer/1/enable' /> <hold type='http://id.webbrick.co.uk/events/webbrick/DO' source='webbrick/100/DO/1' key="state" invert="1" /> <eventtype type="http://id.webbrick.co.uk/events/timer" > <eventsource source="testing/timer/1"> <event> <params> <testEq name="dayphase" value='Morning:Dark' /> <testEq name="occupancy" value='1' /> <testEq name="hold" value="0" /> </params> <newEvent type='testing' source='result/timertest' > </newEvent> </event> </eventsource> </eventtype> </eventInterface> </eventInterfaces> """ testConfigTimer3 = """<?xml version="1.0" encoding="utf-8"?> <eventInterfaces> <eventInterface module='EventHandlers.tests.TestEventLogger' name='TestEventLogger'> <!-- This saves all events --> <eventtype type=""> <eventsource source="" > <event> <!-- interested in all events --> </event> </eventsource> </eventtype> </eventInterface> <eventInterface module='EventHandlers.Timer' name='Timer' > <presence type='http://id.webbrick.co.uk/events/webbrick/DO' source='webbrick/100/DO/0' key="state" invert="true"/> <light_state type='http://id.webbrick.co.uk/events/webbrick/DO' source='webbrick/100/DO/2' invert='true'/> <duration type='http://id.webbrick.co.uk/events/config/get' source='test/timer/1/duration' /> <enable type='http://id.webbrick.co.uk/events/config/get' source='test/timer/1/enable' /> <hold type='http://id.webbrick.co.uk/events/webbrick/DO' source='webbrick/100/DO/1' key="state" invert="true" /> <eventtype type="http://id.webbrick.co.uk/events/timer" > <eventsource source="testing/timer/1"> <event> <params> <testEq name="dayphase" value='Morning:Dark' /> <testEq name="occupancy" value='1' /> <testEq name="hold" value="0" /> </params> <newEvent type='testing' source='result/timertest' > </newEvent> </event> </eventsource> </eventtype> </eventInterface> </eventInterfaces> """ testConfigTimer4 = """<?xml version="1.0" encoding="utf-8"?> <eventInterfaces> <eventInterface module='EventHandlers.tests.TestEventLogger' name='TestEventLogger'> <!-- This saves all events --> <eventtype type=""> <eventsource source="" > <event> <!-- interested in all events --> </event> </eventsource> </eventtype> </eventInterface> <eventInterface module='EventHandlers.Timer' name='Timer' > <presence type='http://id.webbrick.co.uk/events/webbrick/DO' source='webbrick/100/DO/0' key="state" invert="true"/> <light_state type='http://id.webbrick.co.uk/events/webbrick/AI' source='webbrick/100/AI/0' threshold='80' invert="true"/> <duration type='http://id.webbrick.co.uk/events/config/get' source='test/timer/1/duration' /> <enable type='http://id.webbrick.co.uk/events/config/get' source='test/timer/1/enable' /> <hold type='http://id.webbrick.co.uk/events/webbrick/DO' source='webbrick/100/DO/1' key="state" invert="true" /> <eventtype type="http://id.webbrick.co.uk/events/timer" > <eventsource source="testing/timer/1"> <event> <params> <testEq name="dayphase" value='Morning:Dark' /> <testEq name="occupancy" value='1' /> <testEq name="hold" value="0" /> <testEq name="light_state" value="0" /> </params> <newEvent type='testing' source='result/timertest' > </newEvent> </event> </eventsource> </eventtype> </eventInterface> </eventInterfaces> """ class TestTimerAction(unittest.TestCase): def setUp(self): self._log = logging.getLogger( "TestTimerAction" ) self._log.debug( "\n\nsetUp" ) self.router = None self.loader = None def tearDown(self): self._log.debug( "\n\ntearDown" ) if self.loader: self.loader.stop() # all tasks self.loader = None self.router = None time.sleep(2) def expectNevents(self, cnt ): idx = 20 while (len(TestEventLogger._events) < cnt) and (idx > 0): time.sleep(0.05) idx = idx - 1 if ( len(TestEventLogger._events) != cnt): TestEventLogger.logEvents() self.assertEqual( len(TestEventLogger._events), cnt) # Actual tests follow def testTimerEvent(self): self._log.debug( "\n\ntestTimerEvent" ) self.loader = EventRouterLoader() self.loader.loadHandlers( getDictFromXmlString(testConfigTimer) ) self.loader.start() # all tasks self.router = self.loader.getEventRouter() self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConEnable ) self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConDuration ) self.router.publish( EventAgent("TestTimerAction"), Events.evtHome ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtMorningDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_0_on ) # this should kick presence but not create an event self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_0_off ) # this should kick presence because invert is true self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_0_off ) # this should kick presence because invert is true (twice because this happens) self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond0 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond1 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond2 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond3 ) # Create a 'second' time.sleep(1) self.expectNevents( 14 ) self.assertEqual( TestEventLogger._events[12].getType(), u'http://id.webbrick.co.uk/events/timer' ) self.assertEqual( TestEventLogger._events[12].getSource(), "testing/timer/1" ) self.assertEqual( TestEventLogger._events[13].getType(), u'testing' ) self.assertEqual( TestEventLogger._events[13].getSource(), "result/timertest" ) def testTimerDisable(self): self._log.debug( "\n\ntestTimerDisable" ) self.loader = EventRouterLoader() self.loader.loadHandlers( getDictFromXmlString(testConfigTimer) ) self.loader.start() # all tasks self.router = self.loader.getEventRouter() self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConDisable ) self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConDuration ) self.router.publish( EventAgent("TestTimerAction"), Events.evtHome ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtMorningDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_0_off ) # this should kick presence self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond0 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond1 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond2 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond3 ) # Create a 'second' self.expectNevents( 10 ) def testTimerHold(self): self._log.debug( "\n\ntestTimerHold" ) self.loader = EventRouterLoader() self.loader.loadHandlers( getDictFromXmlString(testConfigTimer) ) self.loader.start() # all tasks self.router = self.loader.getEventRouter() self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConEnable ) self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConDuration ) self.router.publish( EventAgent("TestTimerAction"), Events.evtHome ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtMorningDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_1_off ) # this should create a hold state (because invert is set) self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_0_off ) # this should kick presence self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond0 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond1 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond2 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond3 ) # Create a 'second' self.expectNevents( 11 ) def testTimerHoldPI(self): self._log.debug( "\n\ntestTimerHoldPI" ) self.loader = EventRouterLoader() self.loader.loadHandlers( getDictFromXmlString(testConfigTimer2) ) self.loader.start() # all tasks self.router = self.loader.getEventRouter() self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConEnable ) self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConDuration ) self.router.publish( EventAgent("TestTimerAction"), Events.evtHome ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtMorningDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_1_on ) # this should NOT create a hold state (because invert is set) self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_0_on ) # this should kick presence self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond0 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond1 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond2 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond3 ) # Create a 'second' self.expectNevents( 13 ) self.assertEqual( TestEventLogger._events[11].getType(), u'http://id.webbrick.co.uk/events/timer' ) self.assertEqual( TestEventLogger._events[11].getSource(), "testing/timer/1" ) self.assertEqual( TestEventLogger._events[12].getType(), u'testing' ) self.assertEqual( TestEventLogger._events[12].getSource(), "result/timertest" ) def testTimerEventWithState(self): self._log.debug( "\n\ntestTimerEventWithState" ) self.loader = EventRouterLoader() self.loader.loadHandlers( getDictFromXmlString(testConfigTimer3) ) self.loader.start() # all tasks self.router = self.loader.getEventRouter() self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConEnable ) self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConDuration ) self.router.publish( EventAgent("TestTimerAction"), Events.evtHome ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtMorningDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_2_off ) # this should kick light on, but without presence self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond0 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond1 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond2 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond3 ) # Create a 'second' self.expectNevents( 12 ) self.assertEqual( TestEventLogger._events[10].getType(), u'http://id.webbrick.co.uk/events/timer' ) self.assertEqual( TestEventLogger._events[10].getSource(), "testing/timer/1" ) self.assertEqual( TestEventLogger._events[11].getType(), u'testing' ) self.assertEqual( TestEventLogger._events[11].getSource(), "result/timertest" ) def testTimerEventWithStateAndPresence(self): self._log.debug( "\n\ntestTimerEventWithStateAndPresence" ) self.loader = EventRouterLoader() self.loader.loadHandlers( getDictFromXmlString(testConfigTimer3) ) self.loader.start() # all tasks self.router = self.loader.getEventRouter() self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConEnable ) self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConDuration ) self.router.publish( EventAgent("TestTimerAction"), Events.evtHome ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtMorningDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_2_off ) # this should kick light on, self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_0_off ) # this should kick presence self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_0_off ) # this should kick presence (twice because this happens!) self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond0 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond1 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond2 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond3 ) # Create a 'second' self.expectNevents( 14 ) self.assertEqual( TestEventLogger._events[12].getType(), u'http://id.webbrick.co.uk/events/timer' ) self.assertEqual( TestEventLogger._events[12].getSource(), "testing/timer/1" ) self.assertEqual( TestEventLogger._events[13].getType(), u'testing' ) self.assertEqual( TestEventLogger._events[13].getSource(), "result/timertest" ) def testTimerEventWithAnalogue(self): self._log.debug( "\n\ntestTimerEventWithAnalogue" ) self.loader = EventRouterLoader() self.loader.loadHandlers( getDictFromXmlString(testConfigTimer4) ) self.loader.start() # all tasks self.router = self.loader.getEventRouter() self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConEnable ) self.router.publish( EventAgent("TestTimerAction"), Events.evtTimerConDuration ) self.router.publish( EventAgent("TestTimerAction"), Events.evtHome ) self.router.publish( EventAgent("TestTimerAction"), Events.evtDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtMorningDark ) self.router.publish( EventAgent("TestTimerAction"), Events.evtAI_0_10 ) # this should show light off, self.router.publish( EventAgent("TestTimerAction"), Events.evtAI_0_90 ) # this should kick light on, self.router.publish( EventAgent("TestTimerAction"), Events.evtAI_0_90 ) # this should kick light on, twice because it can happen self.router.publish( EventAgent("TestTimerAction"), Events.evtDO_0_off ) # this should kick presence self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond0 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond1 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond2 ) # Create a 'second' self.router.publish( EventAgent("TestTimerAction"), Events.evtSecond3 ) # Create a 'second' self.expectNevents( 15 ) self.assertEqual( TestEventLogger._events[13].getType(), u'http://id.webbrick.co.uk/events/timer' ) self.assertEqual( TestEventLogger._events[13].getSource(), "testing/timer/1" ) self.assertEqual( TestEventLogger._events[14].getType(), u'testing' ) self.assertEqual( TestEventLogger._events[14].getSource(), "result/timertest" ) # Code to run unit tests directly from command line. # Constructing the suite manually allows control over the order of tests. def getTestSuite(): suite = unittest.TestSuite() suite.addTest(TestTimerAction("testTimerEvent")) return suite from MiscLib import TestUtils def getTestSuite(select="unit"): """ Get test suite select is one of the following: "unit" return suite of unit tests only "component" return suite of unit and component tests "all" return suite of unit, component and integration tests "pending" return suite of pending tests name a single named test to be run """ testdict = { "unit": [ "testTimerEvent" , "testTimerDisable" , "testTimerHold" , "testTimerHoldPI" , "testTimerEventWithState" , "testTimerEventWithStateAndPresence" , "testTimerEventWithAnalogue" ], "zzcomponent": [ "testComponents" ], "zzintegration": [ "testIntegration" ], "zzpending": [ "testPending" ] } return TestUtils.getTestSuite(TestTimerAction, testdict, select=select) # Run unit tests directly from command line if __name__ == "__main__": TestUtils.runTests("TestTimerAction.log", getTestSuite, sys.argv)
48.885321
161
0.643896
2,104
21,314
6.486692
0.108365
0.064478
0.098403
0.156287
0.836093
0.822904
0.820999
0.788687
0.775498
0.775498
0
0.012501
0.226846
21,314
435
162
48.997701
0.815705
0.090504
0
0.71137
0
0.052478
0.442687
0.064834
0
0
0
0
0.061224
1
0.034985
false
0
0.029155
0
0.072886
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
5e3e0d06e57db24ca4f80a4ff0ef33b4afba1726
3,946
py
Python
button.py
kunjmehta/alien-shooter
efdcca8d3e9b4712803bc00f8236c86462deadd2
[ "MIT" ]
null
null
null
button.py
kunjmehta/alien-shooter
efdcca8d3e9b4712803bc00f8236c86462deadd2
[ "MIT" ]
null
null
null
button.py
kunjmehta/alien-shooter
efdcca8d3e9b4712803bc00f8236c86462deadd2
[ "MIT" ]
2
2019-03-25T19:03:31.000Z
2020-11-18T04:35:03.000Z
import pygame.font class ArcadeButton: """Class to define Arcade Mode button""" def __init__(self, screen, msg): """Initialize button attributes""" self.screen = screen self.screen_rect = screen.get_rect() # Set the dimensions and properties of the button. self.width, self.height = 200, 50 self.button_color = (0, 255, 0) self.text_color = (255, 255, 255) self.font = pygame.font.SysFont(None, 48) # Build the button's rect object, and center it. self.rect = pygame.Rect(0, 0, self.width, self.height) self.rect.center = self.screen_rect.center self.rect.centery = self.screen_rect.centery - self.height - 20 # The button message only needs to be prepped once. self.prep_msg(msg) def prep_msg(self, msg): """Turn msg into a rendered image, and center text on the button.""" self.msg_image = self.font.render(msg, True, self.text_color, self.button_color) self.msg_image_rect = self.msg_image.get_rect() self.msg_image_rect.center = self.rect.center def draw_button(self): """Draw blank button, then draw message.""" self.screen.fill(self.button_color, self.rect) self.screen.blit(self.msg_image, self.msg_image_rect) class TimeButton: """"Class to define Timed Mode button""" def __init__(self, screen, msg): """Initialize button attributes""" self.screen = screen self.screen_rect = screen.get_rect() # Set the dimensions and properties of the button. self.width, self.height = 200, 50 self.button_color = (0, 255, 0) self.text_color = (255, 255, 255) self.font = pygame.font.SysFont(None, 48) # Build the button's rect object, and center it. self.rect = pygame.Rect(0, 0, self.width, self.height) self.rect.center = self.screen_rect.center # The button message only needs to be prepped once. self.prep_msg(msg) def prep_msg(self, msg): """Turn msg into a rendered image, and center text on the button.""" self.msg_image = self.font.render(msg, True, self.text_color, self.button_color) self.msg_image_rect = self.msg_image.get_rect() self.msg_image_rect.center = self.rect.center def draw_button(self): """Draw blank button, then draw message.""" self.screen.fill(self.button_color, self.rect) self.screen.blit(self.msg_image, self.msg_image_rect) class SurvivalButton: """"Class to define Survival Mode button""" def __init__(self, screen, msg): """Initialize button attributes""" self.screen = screen self.screen_rect = screen.get_rect() # Set the dimensions and properties of the button. self.width, self.height = 250, 50 self.button_color = (0, 255, 0) self.text_color = (255, 255, 255) self.font = pygame.font.SysFont(None, 48) # Build the button's rect object, and center it. self.rect = pygame.Rect(0, 0, self.width, self.height) self.rect.center = self.screen_rect.center self.rect.centery = self.screen_rect.centery + self.height + 20 # The button message only needs to be prepped once. self.prep_msg(msg) def prep_msg(self, msg): """Turn msg into a rendered image, and center text on the button.""" self.msg_image = self.font.render(msg, True, self.text_color, self.button_color) self.msg_image_rect = self.msg_image.get_rect() self.msg_image_rect.center = self.rect.center def draw_button(self): """Draw blank button, then draw message.""" self.screen.fill(self.button_color, self.rect) self.screen.blit(self.msg_image, self.msg_image_rect)
38.31068
76
0.62519
542
3,946
4.409594
0.125461
0.061506
0.090377
0.060251
0.950628
0.950628
0.950628
0.950628
0.950628
0.950628
0
0.025277
0.26812
3,946
102
77
38.686275
0.802285
0.237202
0
0.883333
0
0
0
0
0
0
0
0
0
1
0.15
false
0
0.016667
0
0.216667
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
0d7cb968d043c6899ecf747f7fb5dce3ff8dd6ea
2,560
py
Python
test/project_tests/test_linear_transfer.py
amit17133129/pyMG-2016
b82a60811bb0a8b91d8793c47177a240221f9176
[ "BSD-2-Clause" ]
2
2016-04-04T15:20:50.000Z
2020-08-01T19:28:55.000Z
test/project_tests/test_linear_transfer.py
amit17133129/pyMG-2016
b82a60811bb0a8b91d8793c47177a240221f9176
[ "BSD-2-Clause" ]
1
2020-10-02T05:44:45.000Z
2020-10-02T05:44:45.000Z
test/project_tests/test_linear_transfer.py
amit17133129/pyMG-2016
b82a60811bb0a8b91d8793c47177a240221f9176
[ "BSD-2-Clause" ]
11
2016-03-26T18:37:06.000Z
2020-10-01T19:44:55.000Z
import numpy as np from project.linear_transfer import LinearTransfer def test_prolong_has_expected_order_of_accuracy(): expected_order = 2 k = 4 ntests = 6 ndofs = [] err_list = [] for i in range(ntests): ndofs.append(int(2 ** (i + 5) - 1)) ndofs_fine = ndofs[-1] ndofs_coarse = int((ndofs_fine + 1) / 2 - 1) trans = LinearTransfer(ndofs_fine=ndofs_fine, ndofs_coarse=ndofs_coarse) dx_coarse = 1.0/(ndofs_coarse+1) x_coarse = np.array([(i + 1) * dx_coarse for i in range(ndofs_coarse)]) u_coarse = np.sin(np.pi * k * x_coarse) dx_fine = 1.0/(ndofs_fine+1) x_fine = np.array([(i + 1) * dx_fine for i in range(ndofs_fine)]) u_fine_exact = np.sin(np.pi * k * x_fine) u_fine_comp = trans.prolong(u_coarse) err_list.append(np.linalg.norm(u_fine_exact - u_fine_comp, np.inf) / np.linalg.norm(u_fine_exact, np.inf)) order = [] for i in range(1, len(err_list)): order.append(np.log(err_list[i - 1] / err_list[i]) / np.log(ndofs[i] / ndofs[i - 1])) order = np.array(order) assert (order > expected_order * 0.9).all() and (order < expected_order * 1.1).all(), \ 'Order of accuracy of the prolongation is not ' + str(expected_order) def test_restrict_has_expected_order_of_accuracy(): expected_order = 2 k = 4 ntests = 6 ndofs = [] err_list = [] for i in range(ntests): ndofs.append(int(2 ** (i + 5) - 1)) ndofs_fine = ndofs[-1] ndofs_coarse = int((ndofs_fine + 1) / 2 - 1) trans = LinearTransfer(ndofs_fine=ndofs_fine, ndofs_coarse=ndofs_coarse) dx_coarse = 1.0/(ndofs_coarse+1) x_coarse = np.array([(i + 1) * dx_coarse for i in range(ndofs_coarse)]) u_coarse_exact = np.sin(np.pi * k * x_coarse) dx_fine = 1.0/(ndofs_fine+1) x_fine = np.array([(i + 1) * dx_fine for i in range(ndofs_fine)]) u_fine = np.sin(np.pi * k * x_fine) u_coarse_comp = trans.restrict(u_fine) err_list.append(np.linalg.norm(u_coarse_exact - u_coarse_comp, np.inf) / np.linalg.norm(u_coarse_exact, np.inf)) order = [] for i in range(1, len(err_list)): order.append(np.log(err_list[i - 1] / err_list[i]) / np.log(ndofs[i] / ndofs[i - 1])) order = np.array(order) assert (order > expected_order * 0.9).all() and (order < expected_order * 1.1).all(), \ 'Order of accuracy of the restriction is not ' + str(expected_order)
32
93
0.603125
406
2,560
3.573892
0.150246
0.074431
0.033081
0.060648
0.876637
0.847691
0.827016
0.753963
0.731909
0.731909
0
0.026441
0.261328
2,560
79
94
32.405063
0.740878
0
0
0.703704
0
0
0.034766
0
0
0
0
0
0.037037
1
0.037037
false
0
0.037037
0
0.074074
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
0d8806574ebbd3e8b4a6d84f1f76223cc5965073
14,499
py
Python
chemreg/substance/migrations/0001_vega_sprint.py
Chemical-Curation/chemcurator
bcd7fab84e407f06502e6873c38820724d4e54e7
[ "MIT" ]
1
2020-10-05T18:02:24.000Z
2020-10-05T18:02:24.000Z
chemreg/substance/migrations/0001_vega_sprint.py
Chemical-Curation/chemcurator_django
bcd7fab84e407f06502e6873c38820724d4e54e7
[ "MIT" ]
207
2020-01-30T19:17:44.000Z
2021-02-24T19:45:29.000Z
chemreg/substance/migrations/0001_vega_sprint.py
Chemical-Curation/chemcurator_django
bcd7fab84e407f06502e6873c38820724d4e54e7
[ "MIT" ]
null
null
null
# Generated by Django 3.0.3 on 2020-11-25 19:33 import chemreg.common.utils import chemreg.common.validators import chemreg.substance.utils from django.conf import settings import django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('compound', '0001_vega_sprint'), ] operations = [ migrations.CreateModel( name='QCLevelsType', fields=[ ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('name', models.SlugField(max_length=49, primary_key=True, serialize=False, unique=True)), ('label', models.CharField(max_length=99, unique=True)), ('short_description', models.CharField(max_length=499)), ('long_description', models.TextField()), ('deprecated', models.BooleanField(default=False)), ('rank', models.IntegerField(unique=True)), ('created_by', models.ForeignKey(default=chemreg.common.utils.get_current_user_pk, editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='qclevelstype_created_by_set', to=settings.AUTH_USER_MODEL)), ('updated_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='qclevelstype_updated_by_set', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['pk'], 'abstract': False, }, ), migrations.CreateModel( name='RelationshipType', fields=[ ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('name', models.SlugField(max_length=49, primary_key=True, serialize=False, unique=True)), ('label', models.CharField(max_length=99, unique=True)), ('short_description', models.CharField(max_length=499)), ('long_description', models.TextField()), ('deprecated', models.BooleanField(default=False)), ('corrolary_label', models.CharField(max_length=99)), ('corrolary_short_description', models.CharField(max_length=499)), ('created_by', models.ForeignKey(default=chemreg.common.utils.get_current_user_pk, editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='relationshiptype_created_by_set', to=settings.AUTH_USER_MODEL)), ('updated_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='relationshiptype_updated_by_set', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['pk'], 'abstract': False, }, ), migrations.CreateModel( name='Source', fields=[ ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('name', models.SlugField(max_length=49, primary_key=True, serialize=False, unique=True)), ('label', models.CharField(max_length=99, unique=True)), ('short_description', models.CharField(max_length=499)), ('long_description', models.TextField()), ('deprecated', models.BooleanField(default=False)), ('created_by', models.ForeignKey(default=chemreg.common.utils.get_current_user_pk, editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='source_created_by_set', to=settings.AUTH_USER_MODEL)), ('updated_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='source_updated_by_set', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['pk'], 'abstract': False, }, ), migrations.CreateModel( name='Substance', fields=[ ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('id', models.CharField(default=chemreg.substance.utils.build_sid, max_length=50, primary_key=True, serialize=False, unique=True)), ('preferred_name', models.CharField(max_length=255, unique=True, validators=[django.core.validators.RegexValidator("^[a-zA-Z0-9 =<>\\-':.,^%&/{}[\\]()+?=]{3,}$", message="The proposed Preferred Name does not conform to the regular expression ^[a-zA-Z0-9 =<>\\-':.,^%&/{}[\\]()+?=]{3,}$")])), ('display_name', models.CharField(max_length=255, null=True, unique=True, validators=[django.core.validators.RegexValidator("^[a-zA-Z0-9 =<>\\-':.,^%&/{}[\\]()+?=]{3,}$", message="The proposed display name does not conform to the regular expression ^[a-zA-Z0-9 =<>\\-':.,^%&/{}[\\]()+?=]{3,}$")])), ('description', models.CharField(blank=True, max_length=1024)), ('public_qc_note', models.CharField(blank=True, max_length=1024)), ('private_qc_note', models.CharField(blank=True, max_length=1024)), ('casrn', models.CharField(max_length=50, null=True, unique=True, validators=[django.core.validators.RegexValidator('^[0-9]{2,7}-[0-9]{2}-[0-9]$', message='The proposed CASRN does not conform to the regular expression ^[0-9]{2,7}-[0-9]{2}-[0-9]$'), chemreg.common.validators.validate_casrn_checksum])), ('associated_compound', models.OneToOneField(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='substance', to='compound.BaseCompound')), ('created_by', models.ForeignKey(default=chemreg.common.utils.get_current_user_pk, editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='substance_created_by_set', to=settings.AUTH_USER_MODEL)), ('qc_level', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='substance.QCLevelsType', validators=[chemreg.common.validators.validate_deprecated])), ('source', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='substance.Source', validators=[chemreg.common.validators.validate_deprecated])), ], options={ 'ordering': ['pk'], 'abstract': False, }, ), migrations.CreateModel( name='SynonymType', fields=[ ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('name', models.SlugField(max_length=49, primary_key=True, serialize=False, unique=True)), ('label', models.CharField(max_length=99, unique=True)), ('short_description', models.CharField(max_length=499)), ('long_description', models.TextField()), ('deprecated', models.BooleanField(default=False)), ('validation_regular_expression', models.TextField(blank=True, validators=[chemreg.common.validators.validate_is_regex])), ('score_modifier', models.FloatField(default=0)), ('is_casrn', models.BooleanField()), ('created_by', models.ForeignKey(default=chemreg.common.utils.get_current_user_pk, editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='synonymtype_created_by_set', to=settings.AUTH_USER_MODEL)), ('updated_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='synonymtype_updated_by_set', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['pk'], 'abstract': False, }, ), migrations.CreateModel( name='SynonymQuality', fields=[ ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('name', models.SlugField(max_length=49, primary_key=True, serialize=False, unique=True)), ('label', models.CharField(max_length=99, unique=True)), ('short_description', models.CharField(max_length=499)), ('long_description', models.TextField()), ('deprecated', models.BooleanField(default=False)), ('score_weight', models.FloatField(default=1.0)), ('is_restrictive', models.BooleanField(default=False)), ('created_by', models.ForeignKey(default=chemreg.common.utils.get_current_user_pk, editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='synonymquality_created_by_set', to=settings.AUTH_USER_MODEL)), ('updated_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='synonymquality_updated_by_set', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['pk'], 'abstract': False, }, ), migrations.CreateModel( name='Synonym', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('identifier', models.TextField(max_length=1024)), ('qc_notes', models.TextField(blank=True, max_length=1024)), ('created_by', models.ForeignKey(default=chemreg.common.utils.get_current_user_pk, editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='synonym_created_by_set', to=settings.AUTH_USER_MODEL)), ('source', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='substance.Source')), ('substance', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='substance.Substance')), ('synonym_quality', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='substance.SynonymQuality')), ('synonym_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='substance.SynonymType')), ('updated_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='synonym_updated_by_set', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['pk'], 'abstract': False, }, ), migrations.CreateModel( name='SubstanceType', fields=[ ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('name', models.SlugField(max_length=49, primary_key=True, serialize=False, unique=True)), ('label', models.CharField(max_length=99, unique=True)), ('short_description', models.CharField(max_length=499)), ('long_description', models.TextField()), ('deprecated', models.BooleanField(default=False)), ('created_by', models.ForeignKey(default=chemreg.common.utils.get_current_user_pk, editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='substancetype_created_by_set', to=settings.AUTH_USER_MODEL)), ('updated_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='substancetype_updated_by_set', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['pk'], 'abstract': False, }, ), migrations.AddField( model_name='substance', name='substance_type', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='substance.SubstanceType', validators=[chemreg.common.validators.validate_deprecated]), ), migrations.AddField( model_name='substance', name='updated_by', field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='substance_updated_by_set', to=settings.AUTH_USER_MODEL), ), migrations.CreateModel( name='SubstanceRelationship', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('qc_notes', models.CharField(blank=True, max_length=1024)), ('created_by', models.ForeignKey(default=chemreg.common.utils.get_current_user_pk, editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='substancerelationship_created_by_set', to=settings.AUTH_USER_MODEL)), ('from_substance', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='relationships', to='substance.Substance')), ('relationship_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='substance.RelationshipType')), ('source', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='substance.Source')), ('to_substance', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='related_to', to='substance.Substance')), ('updated_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='substancerelationship_updated_by_set', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['pk'], 'base_manager_name': 'objects', 'unique_together': {('from_substance', 'to_substance', 'source', 'relationship_type')}, }, ), ]
68.71564
318
0.63432
1,551
14,499
5.715023
0.098646
0.028881
0.048962
0.07694
0.840817
0.836191
0.800542
0.781476
0.759477
0.733529
0
0.011779
0.221257
14,499
210
319
69.042857
0.773271
0.003104
0
0.591133
1
0.014778
0.181497
0.060891
0
0
0
0
0
1
0
false
0
0.034483
0
0.054187
0.004926
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
21e53800b1c62184a50f7f18c63767d4dbd0e7e7
2,484
py
Python
notebooks/mypysmps/io/txt_read.py
fvanden/SOC-app
488322c05b6ebe646f003274fa1a2125d993ecd0
[ "MIT" ]
null
null
null
notebooks/mypysmps/io/txt_read.py
fvanden/SOC-app
488322c05b6ebe646f003274fa1a2125d993ecd0
[ "MIT" ]
null
null
null
notebooks/mypysmps/io/txt_read.py
fvanden/SOC-app
488322c05b6ebe646f003274fa1a2125d993ecd0
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- ################# import os import numpy as np #import pandas as pd import csv from csv import reader from .csv_read import read_aim_csv ################# """ mysmps.io.txt_read ================== Functions for reading of csv files: read_aim_txt read_opc_txt Created on Thu Jul 24 11:22 2020 @author: flovan / fvanden Revision history: 24.07.2020 - Created 14.09.2020 - read_opc_txt added """ def read_aim_txt(filename, fileorg = 'AIM', **kwargs): """ Reads SMPS data from a text file generated by AIM software Parameters ---------- filename : str path and name of file to read fileorg : str different file organisations can be found in the default_config for new filetypes add mappings here and specify the filetype kwargs : metadata, dict : user defined metadata - DEFAULT: taken from file header, list : user defined header - DEFAULT: taken from file delimiter, str : user defined delimiter - DEFAULT: taken from file Returns ------- smps : smps mysmps.core.smps object """ # rename file into csv file pre, ext = os.path.splitext(filename) newfilename = pre + '.csv' os.rename(filename, newfilename) # pass to csv reader SMPS = read_aim_csv(newfilename, fileorg, encoding = 'iso8859_15', **kwargs) return SMPS def read_opc_txt(filename, fileorg = 'OPC', **kwargs): """ Reads OPC data from a text file Parameters ---------- filename : str path and name of file to read fileorg : str different file organisations can be found in the default_config for new filetypes add mappings here and specify the filetype kwargs : metadata, dict : user defined metadata - DEFAULT: taken from file header, list : user defined header - DEFAULT: taken from mypysmps.default_config file delimiter, str : user defined delimiter - DEFAULT: taken from file Returns ------- smps : smps mysmps.core.smps object """ # rename file into csv file pre, ext = os.path.splitext(filename) newfilename = pre + '.csv' os.rename(filename, newfilename) # pass to csv reader #SMPS = read_aim_csv(newfilename, fileorg, encoding = 'iso8859_15', **kwargs) #return SMPS
23.884615
93
0.60467
305
2,484
4.84918
0.32459
0.044625
0.064909
0.067613
0.730223
0.707235
0.707235
0.707235
0.707235
0.707235
0
0.022337
0.297101
2,484
103
94
24.116505
0.824742
0.542673
0
0.4
0
0
0.037559
0
0
0
0
0
0
1
0.133333
false
0
0.333333
0
0.533333
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
7
df50543c979efe110ce86ac741954bee536aef27
133
py
Python
src/xincraft/__init__.py
SomePr0grammer/xincraft.py
f1ad494a8bcb2e47a86ec4a25adab7e53636cadb
[ "MIT" ]
3
2022-01-22T06:58:51.000Z
2022-01-22T06:58:54.000Z
src/xincraft/__init__.py
SomeHybrid/xincraft.py
f1ad494a8bcb2e47a86ec4a25adab7e53636cadb
[ "MIT" ]
null
null
null
src/xincraft/__init__.py
SomeHybrid/xincraft.py
f1ad494a8bcb2e47a86ec4a25adab7e53636cadb
[ "MIT" ]
null
null
null
from .mixins.aio import loop from .mixins.aio import run from .mixins import errors from .client import Client __version__ = "2.0.7"
22.166667
28
0.774436
22
133
4.5
0.545455
0.30303
0.262626
0.383838
0
0
0
0
0
0
0
0.026316
0.142857
133
6
29
22.166667
0.842105
0
0
0
0
0
0.037313
0
0
0
0
0
0
1
0
false
0
0.8
0
0.8
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
7
df52fd4ec7820f3674b90cdc81f15670700864f4
6,230
py
Python
oxe-api/test/resource/private/test_get_my_article_content.py
CybersecurityLuxembourg/openxeco
8d4e5578bde6a07f5d6d569b16b4de224abf7bf0
[ "BSD-2-Clause" ]
null
null
null
oxe-api/test/resource/private/test_get_my_article_content.py
CybersecurityLuxembourg/openxeco
8d4e5578bde6a07f5d6d569b16b4de224abf7bf0
[ "BSD-2-Clause" ]
null
null
null
oxe-api/test/resource/private/test_get_my_article_content.py
CybersecurityLuxembourg/openxeco
8d4e5578bde6a07f5d6d569b16b4de224abf7bf0
[ "BSD-2-Clause" ]
null
null
null
import datetime from test.BaseCase import BaseCase class TestGetMyArticleContent(BaseCase): @BaseCase.login def test_ok(self, token): self.db.insert({"id": 14, "name": "My company"}, self.db.tables["Company"]) self.db.insert({"user_id": 1, "company_id": 14}, self.db.tables["UserCompanyAssignment"]) self.db.insert({ "id": 2, "title": "TITLE2", "handle": "handle-2", "status": "PUBLIC", "publication_date": datetime.datetime.strptime('01-22-2021', '%m-%d-%Y').date() }, self.db.tables["Article"]) self.db.insert({"id": 1, "article_id": 2, "name": "VERSION 0", "is_main": 1}, self.db.tables["ArticleVersion"]) self.db.insert({"id": 1, "article_version_id": 1, "position": 1, "type": "TITLE1", "content": "title 1"}, self.db.tables["ArticleBox"]) self.db.insert({"article": 2, "company": 14}, self.db.tables["ArticleCompanyTag"]) response = self.application.get('/private/get_my_article_content/2', headers=self.get_standard_header(token)) self.assertEqual(200, response.status_code) self.assertEqual([{ "id": 1, "article_version_id": 1, "position": 1, "type": "TITLE1", "content": "title 1" }], response.json ) @BaseCase.login def test_ko_not_found(self, token): response = self.application.get('/private/get_my_article_content/2', headers=self.get_standard_header(token)) self.assertEqual("422 Article ID not found", response.status) @BaseCase.login def test_ko_no_company_assigned(self, token): self.db.insert({ "id": 2, "title": "TITLE2", "handle": "handle-2", "status": "PUBLIC", "publication_date": datetime.datetime.strptime('01-22-2021', '%m-%d-%Y').date() }, self.db.tables["Article"]) response = self.application.get('/private/get_my_article_content/2', headers=self.get_standard_header(token)) self.assertEqual("422 Article has no company assigned", response.status) @BaseCase.login def test_ko_too_much_company_assigned(self, token): self.db.insert({"id": 14, "name": "My company"}, self.db.tables["Company"]) self.db.insert({"id": 15, "name": "My company"}, self.db.tables["Company"]) self.db.insert({"user_id": 1, "company_id": 14}, self.db.tables["UserCompanyAssignment"]) self.db.insert({"user_id": 1, "company_id": 15}, self.db.tables["UserCompanyAssignment"]) self.db.insert({ "id": 2, "title": "TITLE2", "handle": "handle-2", "status": "PUBLIC", "publication_date": datetime.datetime.strptime('01-22-2021', '%m-%d-%Y').date() }, self.db.tables["Article"]) self.db.insert({"article": 2, "company": 14}, self.db.tables["ArticleCompanyTag"]) self.db.insert({"article": 2, "company": 15}, self.db.tables["ArticleCompanyTag"]) response = self.application.get('/private/get_my_article_content/2', headers=self.get_standard_header(token)) self.assertEqual("422 Article has too much companies assigned", response.status) @BaseCase.login def test_ko_user_not_assigned(self, token): self.db.insert({"id": 14, "name": "My company"}, self.db.tables["Company"]) self.db.insert({ "id": 2, "title": "TITLE2", "handle": "handle-2", "status": "PUBLIC", "publication_date": datetime.datetime.strptime('01-22-2021', '%m-%d-%Y').date() }, self.db.tables["Article"]) self.db.insert({"article": 2, "company": 14}, self.db.tables["ArticleCompanyTag"]) response = self.application.get('/private/get_my_article_content/2', headers=self.get_standard_header(token)) self.assertEqual("422 User not assign to the company", response.status) @BaseCase.login def test_ko_no_main_version(self, token): self.db.insert({"id": 14, "name": "My company"}, self.db.tables["Company"]) self.db.insert({"user_id": 1, "company_id": 14}, self.db.tables["UserCompanyAssignment"]) self.db.insert({ "id": 2, "title": "TITLE2", "handle": "handle-2", "status": "PUBLIC", "publication_date": datetime.datetime.strptime('01-22-2021', '%m-%d-%Y').date() }, self.db.tables["Article"]) self.db.insert({"article": 2, "company": 14}, self.db.tables["ArticleCompanyTag"]) response = self.application.get('/private/get_my_article_content/2', headers=self.get_standard_header(token)) self.assertEqual("422 Article main version not found. Please contact the administrator", response.status) @BaseCase.login def test_ko_too_much_versions(self, token): self.db.insert({"id": 14, "name": "My company"}, self.db.tables["Company"]) self.db.insert({"user_id": 1, "company_id": 14}, self.db.tables["UserCompanyAssignment"]) self.db.insert({ "id": 2, "title": "TITLE2", "handle": "handle-2", "status": "PUBLIC", "publication_date": datetime.datetime.strptime('01-22-2021', '%m-%d-%Y').date() }, self.db.tables["Article"]) self.db.insert({"id": 1, "article_id": 2, "name": "VERSION 0", "is_main": 1}, self.db.tables["ArticleVersion"]) self.db.insert({"id": 2, "article_id": 2, "name": "VERSION 1", "is_main": 1}, self.db.tables["ArticleVersion"]) self.db.insert({"article": 2, "company": 14}, self.db.tables["ArticleCompanyTag"]) response = self.application.get('/private/get_my_article_content/2', headers=self.get_standard_header(token)) self.assertEqual("422 Too much main version found. Please contact the administrator", response.status)
45.808824
119
0.575281
726
6,230
4.816804
0.108815
0.092651
0.092651
0.064055
0.910209
0.897627
0.889906
0.866457
0.816414
0.791822
0
0.034683
0.254896
6,230
135
120
46.148148
0.718656
0
0
0.711712
0
0
0.280257
0.053933
0
0
0
0
0.072072
1
0.063063
false
0
0.018018
0
0.09009
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
df5ef8f3ec5e5541e52d30317b39a9e6a53cda2b
96,031
py
Python
exetera/core/fields.py
ericspod/ExeTera
6d888a4396fcf90804120779a907f08f08473efb
[ "Apache-2.0" ]
null
null
null
exetera/core/fields.py
ericspod/ExeTera
6d888a4396fcf90804120779a907f08f08473efb
[ "Apache-2.0" ]
null
null
null
exetera/core/fields.py
ericspod/ExeTera
6d888a4396fcf90804120779a907f08f08473efb
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 KCL-BMEIS - King's College London # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import Callable, Optional, Union from datetime import datetime, timezone import operator import numpy as np import numba import h5py from exetera.core.abstract_types import Field from exetera.core.data_writer import DataWriter from exetera.core import operations as ops from exetera.core import validation as val class HDF5Field(Field): def __init__(self, session, group, dataframe, write_enabled=False): super().__init__() # if name is None: # field = group # else: # field = group[name] self._session = session self._field = group self._fieldtype = self._field.attrs['fieldtype'] self._dataframe = dataframe self._write_enabled = write_enabled self._value_wrapper = None self._valid_reference = True @property def valid(self): """ Returns whether the field is a valid field object. Fields can become invalid as a result of certain operations, such as a field being moved from one dataframe to another. A field that is invalid with throw exceptions if any other operation is performed on them. """ return self._valid_reference @property def name(self): """ The name of the field within a dataframe, if the field belongs to a dataframe """ self._ensure_valid() return self._field.name.split('/')[-1] @property def dataframe(self): """ The owning dataframe of this field, or None if the field is now owned by a dataframe """ self._ensure_valid() return self._dataframe @property def timestamp(self): """ The timestamp representing the field creation time. This is the time at which the data for this field was added to the dataset, rather than the point at which the field wrapper was created. """ self._ensure_valid() return self._field.attrs['timestamp'] @property def chunksize(self): """ The chunksize for the field. This is not generally required for users, and may be ignored depending on the storage medium. """ self._ensure_valid() return self._field.attrs['chunksize'] @property def indexed(self): """ Whether the field is an indexed field or not. Indexed fields store their data internally as index and value arrays for efficiency, as well as making it accessible through the data property. """ self._ensure_valid() return False def __bool__(self): # this method is required to prevent __len__ being called on derived methods when fields are queried as # if f: # rather than # if f is not None: self._ensure_valid() return True def get_spans(self): raise NotImplementedError("Please use get_spans() on specific fields, not the field base class.") def apply_filter(self, filter_to_apply, dstfld=None): raise NotImplementedError("Please use apply_filter() on specific fields, not the field base class.") def apply_index(self, index_to_apply, dstfld=None): raise NotImplementedError("Please use apply_index() on specific fields, not the field base class.") def _ensure_valid(self): if not self._valid_reference: raise ValueError("This field no longer refers to a valid underlying field object") class MemoryField(Field): def __init__(self, session): super().__init__() self._session = session self._write_enabled = True self._value_wrapper = None @property def valid(self): return True @property def name(self): return None @property def dataframe(self): return None @property def timestamp(self): return None @property def chunksize(self): return None @property def indexed(self): return False def __bool__(self): # this method is required to prevent __len__ being called on derived methods when fields are queried as # if f: # rather than # if f is not None: return True def get_spans(self): raise NotImplementedError("Please use get_spans() on specific fields, not the field base class.") def apply_filter(self, filter_to_apply, dstfld=None): raise NotImplementedError("Please use apply_filter() on specific fields, not the field base class.") def apply_index(self, index_to_apply, dstfld=None): raise NotImplementedError("Please use apply_index() on specific fields, not the field base class.") class ReadOnlyFieldArray: def __init__(self, field, dataset_name): self._field = field self._name = dataset_name self._dataset = field[dataset_name] def __len__(self): return len(self._dataset) @property def dtype(self): return self._dataset.dtype def __getitem__(self, item): return self._dataset[item] def __setitem__(self, key, value): raise PermissionError("This field was created read-only; call <field>.writeable() " "for a writeable copy of the field") def clear(self): raise PermissionError("This field was created read-only; call <field>.writeable() " "for a writeable copy of the field") def write_part(self, part): raise PermissionError("This field was created read-only; call <field>.writeable() " "for a writeable copy of the field") def write(self, part): raise PermissionError("This field was created read-only; call <field>.writeable() " "for a writeable copy of the field") def complete(self): raise PermissionError("This field was created read-only; call <field>.writeable() " "for a writeable copy of the field") # Field arrays # ============ class WriteableFieldArray: def __init__(self, field, dataset_name): self._field = field self._name = dataset_name self._dataset = field[dataset_name] def __len__(self): return len(self._dataset) @property def dtype(self): return self._dataset.dtype def __getitem__(self, item): return self._dataset[item] def __setitem__(self, key, value): self._dataset[key] = value def clear(self): nformat = self._dataset.dtype DataWriter._clear_dataset(self._field, self._name) DataWriter.write(self._field, self._name, [], 0, nformat) self._dataset = self._field[self._name] def write_part(self, part): DataWriter.write(self._field, self._name, part, len(part), dtype=self._dataset.dtype) def write(self, part): if isinstance(part, Field): part = part.data[:] DataWriter.write(self._field, self._name, part, len(part), dtype=self._dataset.dtype) self.complete() def complete(self): DataWriter.flush(self._field[self._name]) class MemoryFieldArray: def __init__(self, dtype): self._dtype = dtype self._dataset = None def __len__(self): return 0 if self._dataset is None else len(self._dataset) @property def dtype(self): return self._dtype def __getitem__(self, item): if self._dataset is None: # raise ValueError("Cannot get data from an empty Field") return np.zeros(0, dtype=np.uint8) return self._dataset[item] def __setitem__(self, key, value): self._dataset[key] = value def clear(self): self._dataset = None def write_part(self, part, move_mem=False): if not isinstance(part, np.ndarray): raise ValueError("'part' must be a numpy array but is '{}'".format(type(part))) if self._dataset is None: if move_mem is True and dtype_to_str(part.dtype) == self._dtype: self._dataset = part else: self._dataset = part.copy() else: new_dataset = np.zeros(len(self._dataset) + len(part), dtype=self._dataset.dtype) new_dataset[:len(self._dataset)] = self._dataset new_dataset[-len(part):] = part self._dataset = new_dataset def write(self, part): self.write_part(part) self.complete() def complete(self): pass class ReadOnlyIndexedFieldArray: def __init__(self, field, indices, values): self._field = field self._indices = indices self._values = values def __len__(self): # TODO: this occurs because of the initialized state of an indexed string. It would be better for the # index to be initialised as [0] return max(len(self._indices)-1, 0) @property def dtype(self): return self._dtype def __getitem__(self, item): try: if isinstance(item, slice): start = item.start if item.start is not None else 0 stop = item.stop if item.stop is not None else len(self._indices) - 1 step = item.step #TODO: validate slice index = self._indices[start:stop+1] bytestr = self._values[index[0]:index[-1]] results = [None] * (len(index)-1) startindex = self._indices[start] for ir in range(len(results)): results[ir] =\ bytestr[index[ir]-np.int64(startindex): index[ir+1]-np.int64(startindex)].tobytes().decode() return results elif isinstance(item, int): if item >= len(self._indices) - 1: raise ValueError("index is out of range") start, stop = self._indices[item:item+2] if start == stop: return '' value = self._values[start:stop].tobytes().decode() return value except Exception as e: print("{}: unexpected exception {}".format(self._field.name, e)) raise def __setitem__(self, key, value): raise PermissionError("This field was created read-only; call <field>.writeable() " "for a writeable copy of the field") def clear(self): raise PermissionError("This field was created read-only; call <field>.writeable() " "for a writeable copy of the field") def write_part(self, part): raise PermissionError("This field was created read-only; call <field>.writeable() " "for a writeable copy of the field") def write(self, part): raise PermissionError("This field was created read-only; call <field>.writeable() " "for a writeable copy of the field") def complete(self): raise PermissionError("This field was created read-only; call <field>.writeable() " "for a writeable copy of the field") class WriteableIndexedFieldArray: def __init__(self, chunksize, indices, values): # self._field = field self._indices = indices self._values = values # self._chunksize = self._field.attrs['chunksize'] self._chunksize = chunksize self._raw_values = np.zeros(self._chunksize, dtype=np.uint8) self._raw_indices = np.zeros(self._chunksize, dtype=np.int64) self._accumulated = self._indices[-1] if len(self._indices) > 0 else 0 self._index_index = 0 self._value_index = 0 def __len__(self): return max(len(self._indices) - 1, 0) @property def dtype(self): return self._dtype def __getitem__(self, item): try: if isinstance(item, slice): start = item.start if item.start is not None else 0 stop = item.stop if item.stop is not None else len(self._indices) - 1 step = item.step # TODO: validate slice index = self._indices[start:stop+1] if len(index) == 0: return [] bytestr = self._values[index[0]:index[-1]] results = [None] * (len(index) - 1) startindex = self._indices[start] rmax = min(len(results), stop - start) for ir in range(rmax): rbytes = bytestr[index[ir] - np.int64(startindex): index[ir + 1] - np.int64(startindex)].tobytes() rstr = rbytes.decode() results[ir] = rstr return results elif isinstance(item, int): if item >= len(self._indices) - 1: raise ValueError("index is out of range") start, stop = self._indices[item:item+2] if start == stop: return '' value = self._values[start:stop].tobytes().decode() return value except Exception as e: print(e) raise def __setitem__(self, key, value): raise PermissionError("IndexedStringField instances cannot be edited via array syntax;" "use clear and then write/write_part or write_raw/write_part_raw") def clear(self): self._accumulated = 0 self._indices.clear() self._values.clear() self._accumulated = 0 def write_part(self, part): for s in part: evalue = s.encode() for v in evalue: self._raw_values[self._value_index] = v self._value_index += 1 if self._value_index == self._chunksize: self._values.write_part(self._raw_values[:self._value_index]) self._value_index = 0 self._accumulated += 1 self._raw_indices[self._index_index] = self._accumulated self._index_index += 1 if self._index_index == self._chunksize: if len(self._indices) == 0: self._indices.write_part(np.array([0])) self._indices.write_part(self._raw_indices[:self._index_index]) self._index_index = 0 def write(self, part): self.write_part(part) self.complete() def complete(self): if self._value_index != 0: self._values.write(self._raw_values[:self._value_index]) self._value_index = 0 if self._index_index != 0: if len(self._indices) == 0: self._indices.write_part(np.array([0])) self._indices.write(self._raw_indices[:self._index_index]) self._index_index = 0 # Memory-based fields # =================== class IndexedStringMemField(MemoryField): def __init__(self, session, chunksize=None): super().__init__(session) self._session = session self._chunksize = session.chunksize if chunksize is None else chunksize self._data_wrapper = None self._index_wrapper = None self._value_wrapper = None def writeable(self): return self def create_like(self, group=None, name=None, timestamp=None): return FieldDataOps.indexed_string_create_like(group, name, timestamp) @property def indexed(self): return True @property def data(self): if self._data_wrapper is None: self._data_wrapper = WriteableIndexedFieldArray(self._chunksize, self.indices, self.values) return self._data_wrapper def is_sorted(self): if len(self) < 2: return True indices = self.indices[:] values = self.values[:] last = values[indices[0]:indices[1]].tobytes() for i in range(1, len(indices)-1): cur = values[indices[i]:indices[i+1]].tobytes() if last > cur: return False last = cur return True @property def indices(self): if self._index_wrapper is None: self._index_wrapper = MemoryFieldArray('int64') return self._index_wrapper @property def values(self): if self._value_wrapper is None: self._value_wrapper = MemoryFieldArray('int8') return self._value_wrapper def __len__(self): return len(self.data) def get_spans(self): return ops._get_spans_for_index_string_field(self.indices[:], self.values[:]) def apply_filter(self, filter_to_apply, target=None, in_place=False): """ Apply a boolean filter to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the filtered data is written to. :param filter_to_apply: a Field or numpy array that contains the boolean filter data :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The filtered field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ return FieldDataOps.apply_filter_to_indexed_field(self, filter_to_apply, target, in_place) def apply_index(self, index_to_apply, target=None, in_place=False): """ Apply an index to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the reindexed data is written to. :param index_to_apply: a Field or numpy array that contains the indices :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The reindexed field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ return FieldDataOps.apply_index_to_indexed_field(self, index_to_apply, target, in_place) def apply_spans_first(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_first(self, spans_to_apply, target, in_place) def apply_spans_last(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_last(self, spans_to_apply, target, in_place) def apply_spans_min(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_min(self, spans_to_apply, target, in_place) def apply_spans_max(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_max(self, spans_to_apply, target, in_place) class FixedStringMemField(MemoryField): def __init__(self, session, length): super().__init__(session) # TODO: caution; we may want to consider the issues with long-lived field instances getting # out of sync with their stored counterparts. Maybe a revision number of the stored field # is required that we can check to see if we are out of date. That or just make this a # property and have it always look the value up self._length = length def writeable(self): return self def create_like(self, group=None, name=None, timestamp=None): return FieldDataOps.fixed_string_field_create_like(self, group, name, timestamp) @property def data(self): if self._value_wrapper is None: self._value_wrapper = MemoryFieldArray("S{}".format(self._length)) return self._value_wrapper def is_sorted(self): if len(self) < 2: return True data = self.data[:] return np.all(np.char.compare_chararrays(data[:-1], data[1:], "<=", False)) def __len__(self): return len(self.data) def get_spans(self): return ops.get_spans_for_field(self.data[:]) def apply_filter(self, filter_to_apply, target=None, in_place=False): """ Apply a boolean filter to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the filtered data is written to. :param filter_to_apply: a Field or numpy array that contains the boolean filter data :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The filtered field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ return FieldDataOps.apply_filter_to_field(self, filter_to_apply, target, in_place) def apply_index(self, index_to_apply, target=None, in_place=False): """ Apply an index to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the reindexed data is written to. :param index_to_apply: a Field or numpy array that contains the indices :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The reindexed field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ return FieldDataOps.apply_index_to_field(self, index_to_apply, target, in_place) def apply_spans_first(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_first(self, spans_to_apply, target, in_place) def apply_spans_last(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_last(self, spans_to_apply, target, in_place) def apply_spans_min(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_min(self, spans_to_apply, target, in_place) def apply_spans_max(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_max(self, spans_to_apply, target, in_place) class NumericMemField(MemoryField): def __init__(self, session, nformat): super().__init__(session) self._nformat = nformat def writeable(self): return self def create_like(self, group=None, name=None, timestamp=None): return FieldDataOps.numeric_field_create_like(self, group, name, timestamp) @property def data(self): if self._value_wrapper is None: self._value_wrapper = MemoryFieldArray(self._nformat) return self._value_wrapper def is_sorted(self): if len(self) < 2: return True data = self.data[:] return np.all(data[:-1] <= data[1:]) def __len__(self): return len(self.data) def get_spans(self): return ops.get_spans_for_field(self.data[:]) def apply_filter(self, filter_to_apply, target=None, in_place=False): """ Apply a boolean filter to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the filtered data is written to. :param filter_to_apply: a Field or numpy array that contains the boolean filter data :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The filtered field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ return FieldDataOps.apply_filter_to_field(self, filter_to_apply, target, in_place) def apply_index(self, index_to_apply, target=None, in_place=False): """ Apply an index to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the reindexed data is written to. :param index_to_apply: a Field or numpy array that contains the indices :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The reindexed field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ return FieldDataOps.apply_index_to_field(self, index_to_apply, target, in_place) def apply_spans_first(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_first(self, spans_to_apply, target, in_place) def apply_spans_last(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_last(self, spans_to_apply, target, in_place) def apply_spans_min(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_min(self, spans_to_apply, target, in_place) def apply_spans_max(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_max(self, spans_to_apply, target, in_place) def __add__(self, second): return FieldDataOps.numeric_add(self._session, self, second) def __radd__(self, first): return FieldDataOps.numeric_add(self._session, first, self) def __sub__(self, second): return FieldDataOps.numeric_sub(self._session, self, second) def __rsub__(self, first): return FieldDataOps.numeric_sub(self._session, first, self) def __mul__(self, second): return FieldDataOps.numeric_mul(self._session, self, second) def __rmul__(self, first): return FieldDataOps.numeric_mul(self._session, first, self) def __truediv__(self, second): return FieldDataOps.numeric_truediv(self._session, self, second) def __rtruediv__(self, first): return FieldDataOps.numeric_truediv(self._session, first, self) def __floordiv__(self, second): return FieldDataOps.numeric_floordiv(self._session, self, second) def __rfloordiv__(self, first): return FieldDataOps.numeric_floordiv(self._session, first, self) def __mod__(self, second): return FieldDataOps.numeric_mod(self._session, self, second) def __rmod__(self, first): return FieldDataOps.numeric_mod(self._session, first, self) def __divmod__(self, second): return FieldDataOps.numeric_divmod(self._session, self, second) def __rdivmod__(self, first): return FieldDataOps.numeric_divmod(self._session, first, self) def __and__(self, second): return FieldDataOps.numeric_and(self._session, self, second) def __rand__(self, first): return FieldDataOps.numeric_and(self._session, first, self) def __xor__(self, second): return FieldDataOps.numeric_xor(self._session, self, second) def __rxor__(self, first): return FieldDataOps.numeric_xor(self._session, first, self) def __or__(self, second): return FieldDataOps.numeric_or(self._session, self, second) def __ror__(self, first): return FieldDataOps.numeric_or(self._session, first, self) def __lt__(self, value): return FieldDataOps.less_than(self._session, self, value) def __le__(self, value): return FieldDataOps.less_than_equal(self._session, self, value) def __eq__(self, value): return FieldDataOps.equal(self._session, self, value) def __ne__(self, value): return FieldDataOps.not_equal(self._session, self, value) def __gt__(self, value): return FieldDataOps.greater_than(self._session, self, value) def __ge__(self, value): return FieldDataOps.greater_than_equal(self._session, self, value) def __invert__(self): return FieldDataOps.invert(self._session, self) def logical_not(self): return FieldDataOps.logical_not(self._session, self) class CategoricalMemField(MemoryField): def __init__(self, session, nformat, keys): super().__init__(session) self._nformat = nformat self._keys = keys def writeable(self): return self def create_like(self, group=None, name=None, timestamp=None): return FieldDataOps.categorical_field_create_like(self, group, name, timestamp) @property def data(self): if self._value_wrapper is None: self._value_wrapper = MemoryFieldArray(self._nformat) return self._value_wrapper def is_sorted(self): if len(self) < 2: return True data = self.data[:] return np.all(data[:-1] <= data[1:]) def __len__(self): return len(self.data) def get_spans(self): return ops.get_spans_for_field(self.data[:]) # Note: key is presented as value: str, even though the dictionary must be presented # as str: value @property def keys(self): kv = self._keys.values() kn = self._keys.keys() keys = dict(zip(kv, kn)) return keys def remap(self, key_map, new_key): values = self.data[:] for k in key_map: values = np.where(values == k[0], k[1], values) result = CategoricalMemField(self._session, self._nformat, new_key) result.data.write(values) return result def apply_filter(self, filter_to_apply, target=None, in_place=False): """ Apply a boolean filter to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the filtered data is written to. :param filter_to_apply: a Field or numpy array that contains the boolean filter data :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The filtered field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ return FieldDataOps.apply_filter_to_field(self, filter_to_apply, target, in_place) def apply_index(self, index_to_apply, target=None, in_place=False): """ Apply an index to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the reindexed data is written to. :param index_to_apply: a Field or numpy array that contains the indices :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The reindexed field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ return FieldDataOps.apply_index_to_field(self, index_to_apply, target, in_place) def apply_spans_first(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_first(self, spans_to_apply, target, in_place) def apply_spans_last(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_last(self, spans_to_apply, target, in_place) def apply_spans_min(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_min(self, spans_to_apply, target, in_place) def apply_spans_max(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_max(self, spans_to_apply, target, in_place) def __lt__(self, value): return FieldDataOps.less_than(self._session, self, value) def __le__(self, value): return FieldDataOps.less_than_equal(self._session, self, value) def __eq__(self, value): return FieldDataOps.equal(self._session, self, value) def __ne__(self, value): return FieldDataOps.not_equal(self._session, self, value) def __gt__(self, value): return FieldDataOps.greater_than(self._session, self, value) def __ge__(self, value): return FieldDataOps.greater_than_equal(self._session, self, value) class TimestampMemField(MemoryField): def __init__(self, session): super().__init__(session) def writeable(self): return self def create_like(self, group=None, name=None, timestamp=None): return FieldDataOps.timestamp_field_create_like(self, group, name, timestamp) @property def data(self): if self._value_wrapper is None: self._value_wrapper = MemoryFieldArray(np.float64) return self._value_wrapper def is_sorted(self): if len(self) < 2: return True data = self.data[:] return np.all(data[:-1] <= data[1:]) def __len__(self): return len(self.data) def get_spans(self): return ops.get_spans_for_field(self.data[:]) def apply_filter(self, filter_to_apply, target=None, in_place=False): """ Apply a boolean filter to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the filtered data is written to. :param filter_to_apply: a Field or numpy array that contains the boolean filter data :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The filtered field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ return FieldDataOps.apply_filter_to_field(self, filter_to_apply, target, in_place) def apply_index(self, index_to_apply, target=None, in_place=False): """ Apply an index to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the reindexed data is written to. :param index_to_apply: a Field or numpy array that contains the indices :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The reindexed field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ return FieldDataOps.apply_index_to_field(self, index_to_apply, target, in_place) def apply_spans_first(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_first(self, spans_to_apply, target, in_place) def apply_spans_last(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_last(self, spans_to_apply, target, in_place) def apply_spans_min(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_min(self, spans_to_apply, target, in_place) def apply_spans_max(self, spans_to_apply, target=None, in_place=False): return FieldDataOps.apply_spans_max(self, spans_to_apply, target, in_place) def __add__(self, second): return FieldDataOps.numeric_add(self._session, self, second) def __radd__(self, first): return FieldDataOps.numeric_add(self._session, first, self) def __sub__(self, second): return FieldDataOps.numeric_sub(self._session, self, second) def __rsub__(self, first): return FieldDataOps.numeric_sub(self._session, first, self) def __mul__(self, second): return FieldDataOps.numeric_mul(self._session, self, second) def __rmul__(self, first): return FieldDataOps.numeric_mul(self._session, first, self) def __truediv__(self, second): return FieldDataOps.numeric_truediv(self._session, self, second) def __rtruediv__(self, first): return FieldDataOps.numeric_truediv(self._session, first, self) def __floordiv__(self, second): return FieldDataOps.numeric_floordiv(self._session, self, second) def __rfloordiv__(self, first): return FieldDataOps.numeric_floordiv(self._session, first, self) def __mod__(self, second): return FieldDataOps.numeric_mod(self._session, self, second) def __rmod__(self, first): return FieldDataOps.numeric_mod(self._session, first, self) def __divmod__(self, second): return FieldDataOps.numeric_divmod(self._session, self, second) def __rdivmod__(self, first): return FieldDataOps.numeric_divmod(self._session, first, self) def __lt__(self, value): return FieldDataOps.less_than(self._session, self, value) def __le__(self, value): return FieldDataOps.less_than_equal(self._session, self, value) def __eq__(self, value): return FieldDataOps.equal(self._session, self, value) def __eq__(self, value): return FieldDataOps.not_equal(self._session, self, value) def __gt__(self, value): return FieldDataOps.greater_than(self._session, self, value) def __ge__(self, value): return FieldDataOps.greater_than_equal(self._session, self, value) # HDF5 field constructors # ======================= def base_field_contructor(session, group, name, timestamp=None, chunksize=None): """ Constructor are for 1)create the field (hdf5 group), 2)add basic attributes like chunksize, timestamp, field type, and 3)add the dataset to the field (hdf5 group) under the name 'values' """ if name in group: msg = "Field '{}' already exists in group '{}'" raise ValueError(msg.format(name, group)) field = group.create_group(name) field.attrs['chunksize'] = session.chunksize if chunksize is None else chunksize field.attrs['timestamp'] = session.chunksize if chunksize is None else chunksize return field def indexed_string_field_constructor(session, group, name, timestamp=None, chunksize=None): field = base_field_contructor(session, group, name, timestamp, chunksize) field.attrs['fieldtype'] = 'indexedstring' DataWriter.write(field, 'index', [], 0, 'int64') DataWriter.write(field, 'values', [], 0, 'uint8') def fixed_string_field_constructor(session, group, name, length, timestamp=None, chunksize=None): field = base_field_contructor(session, group, name, timestamp, chunksize) field.attrs['fieldtype'] = 'fixedstring,{}'.format(length) field.attrs['strlen'] = length DataWriter.write(field, 'values', [], 0, "S{}".format(length)) def numeric_field_constructor(session, group, name, nformat, timestamp=None, chunksize=None): field = base_field_contructor(session, group, name, timestamp, chunksize) field.attrs['fieldtype'] = 'numeric,{}'.format(nformat) field.attrs['nformat'] = nformat DataWriter.write(field, 'values', [], 0, nformat) def categorical_field_constructor(session, group, name, nformat, key, timestamp=None, chunksize=None): field = base_field_contructor(session, group, name, timestamp, chunksize) field.attrs['fieldtype'] = 'categorical,{}'.format(nformat) field.attrs['nformat'] = nformat DataWriter.write(field, 'values', [], 0, nformat) key_ = val.validate_and_normalize_categorical_key('key', key) key_values = [v for k, v in key_.items()] key_names = [k for k, v in key_.items()] DataWriter.write(field, 'key_values', key_values, len(key_values), 'int8') DataWriter.write(field, 'key_names', key_names, len(key_names), h5py.special_dtype(vlen=str)) def timestamp_field_constructor(session, group, name, timestamp=None, chunksize=None): field = base_field_contructor(session, group, name, timestamp, chunksize) field.attrs['fieldtype'] = 'timestamp' DataWriter.write(field, 'values', [], 0, 'float64') # HDF5 fields # =========== class IndexedStringField(HDF5Field): def __init__(self, session, group, dataframe, write_enabled=False): super().__init__(session, group, dataframe, write_enabled=write_enabled) self._session = session self._dataframe = None self._data_wrapper = None self._index_wrapper = None self._value_wrapper = None def writeable(self): """ Indicates whether this field permits write operations. By default, dataframe fields are read-only in order to protect accidental writes to datasets """ self._ensure_valid() return IndexedStringField(self._session, self._field, self._dataframe, write_enabled=True) def create_like(self, group=None, name=None, timestamp=None): """ Create an empty field of the same type as this field. """ self._ensure_valid() return FieldDataOps.indexed_string_create_like(self, group, name, timestamp) @property def indexed(self): self._ensure_valid() return True @property def data(self): self._ensure_valid() if self._data_wrapper is None: wrapper =\ WriteableIndexedFieldArray if self._write_enabled else ReadOnlyIndexedFieldArray self._data_wrapper = wrapper(self.chunksize, self.indices, self.values) return self._data_wrapper def is_sorted(self): self._ensure_valid() if len(self) < 2: return True indices = self.indices[:] values = self.values[:] last = values[indices[0]:indices[1]].tobytes() for i in range(1, len(indices)-1): cur = values[indices[i]:indices[i+1]].tobytes() if last > cur: return False last = cur return True @property def indices(self): self._ensure_valid() if self._index_wrapper is None: wrapper = WriteableFieldArray if self._write_enabled else ReadOnlyFieldArray self._index_wrapper = wrapper(self._field, 'index') return self._index_wrapper @property def values(self): self._ensure_valid() if self._value_wrapper is None: wrapper = WriteableFieldArray if self._write_enabled else ReadOnlyFieldArray self._value_wrapper = wrapper(self._field, 'values') return self._value_wrapper def __len__(self): self._ensure_valid() return len(self.data) def get_spans(self): self._ensure_valid() return ops._get_spans_for_index_string_field(self.indices[:], self.values[:]) def apply_filter(self, filter_to_apply, target=None, in_place=False): """ Apply a boolean filter to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the filtered data is written to. :param filter_to_apply: a Field or numpy array that contains the boolean filter data :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The filtered field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ self._ensure_valid() return FieldDataOps.apply_filter_to_indexed_field(self, filter_to_apply, target, in_place) def apply_index(self, index_to_apply, target=None, in_place=False): """ Apply an index to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the reindexed data is written to. :param index_to_apply: a Field or numpy array that contains the indices :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The reindexed field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ self._ensure_valid() return FieldDataOps.apply_index_to_indexed_field(self, index_to_apply, target, in_place) def apply_spans_first(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_first(self, spans_to_apply, target, in_place) def apply_spans_last(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_last(self, spans_to_apply, target, in_place) def apply_spans_min(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_min(self, spans_to_apply, target, in_place) def apply_spans_max(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_max(self, spans_to_apply, target, in_place) class FixedStringField(HDF5Field): def __init__(self, session, group, dataframe, write_enabled=False): super().__init__(session, group, dataframe, write_enabled=write_enabled) # TODO: caution; we may want to consider the issues with long-lived field instances getting # out of sync with their stored counterparts. Maybe a revision number of the stored field # is required that we can check to see if we are out of date. That or just make this a # property and have it always look the value up self._length = self._field.attrs['strlen'] def writeable(self): self._ensure_valid() return FixedStringField(self._session, self._field, self._dataframe, write_enabled=True) def create_like(self, group=None, name=None, timestamp=None): self._ensure_valid() return FieldDataOps.fixed_string_field_create_like(self, group, name, timestamp) @property def data(self): self._ensure_valid() if self._value_wrapper is None: if self._write_enabled: self._value_wrapper = WriteableFieldArray(self._field, 'values') else: self._value_wrapper = ReadOnlyFieldArray(self._field, 'values') return self._value_wrapper def is_sorted(self): self._ensure_valid() if len(self) < 2: return True data = self.data[:] return np.all(np.char.compare_chararrays(data[:-1], data[1:], "<=", False)) def __len__(self): self._ensure_valid() return len(self.data) def get_spans(self): self._ensure_valid() return ops.get_spans_for_field(self.data[:]) def apply_filter(self, filter_to_apply, target=None, in_place=False): """ Apply a boolean filter to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the filtered data is written to. :param filter_to_apply: a Field or numpy array that contains the boolean filter data :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The filtered field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ self._ensure_valid() return FieldDataOps.apply_filter_to_field(self, filter_to_apply, target, in_place) def apply_index(self, index_to_apply, target=None, in_place=False): """ Apply an index to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the reindexed data is written to. :param index_to_apply: a Field or numpy array that contains the indices :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The reindexed field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ self._ensure_valid() return FieldDataOps.apply_index_to_field(self, index_to_apply, target, in_place) def apply_spans_first(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_first(self, spans_to_apply, target, in_place) def apply_spans_last(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_last(self, spans_to_apply, target, in_place) def apply_spans_min(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_min(self, spans_to_apply, target, in_place) def apply_spans_max(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_max(self, spans_to_apply, target, in_place) class NumericField(HDF5Field): def __init__(self, session, group, dataframe, write_enabled=False): super().__init__(session, group, dataframe, write_enabled=write_enabled) self._nformat = self._field.attrs['nformat'] def writeable(self): self._ensure_valid() return NumericField(self._session, self._field, None, write_enabled=True) def create_like(self, group=None, name=None, timestamp=None): self._ensure_valid() return FieldDataOps.numeric_field_create_like(self, group, name, timestamp) @property def data(self): self._ensure_valid() if self._value_wrapper is None: if self._write_enabled: self._value_wrapper = WriteableFieldArray(self._field, 'values') else: self._value_wrapper = ReadOnlyFieldArray(self._field, 'values') return self._value_wrapper def is_sorted(self): self._ensure_valid() if len(self) < 2: return True data = self.data[:] return np.all(data[:-1] <= data[1:]) def __len__(self): self._ensure_valid() return len(self.data) def astype(self, dtype:str, casting='unsafe'): """ Convert the field data type to dtype parameter given. :param dtype: The new datatype, given as a str object. The dtype must be a subtype of np.number, e.g. int, float, etc. :param casting: Similar to the casting parameter in numpy ndarray.astype, can be 'no’, ‘equiv’, ‘safe’, ‘same_kind’, or ‘unsafe’. :return: The field with new datatype. """ if not np.issubdtype(dtype, np.number): raise ValueError("The dtype to convert must be a subtype of np.number, but type {} given.".format(dtype)) else: content = np.array(self.data[:]).astype(dtype, casting=casting) name = self.name del self.dataframe[name] fld = self.dataframe.create_numeric(name, str(dtype)) fld.data.write(content) return fld def get_spans(self): self._ensure_valid() return ops.get_spans_for_field(self.data[:]) def apply_filter(self, filter_to_apply, target=None, in_place=False): """ Apply a boolean filter to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the filtered data is written to. :param filter_to_apply: a Field or numpy array that contains the boolean filter data :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The filtered field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ self._ensure_valid() return FieldDataOps.apply_filter_to_field(self, filter_to_apply, target, in_place) def apply_index(self, index_to_apply, target=None, in_place=False): """ Apply an index to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the reindexed data is written to. :param index_to_apply: a Field or numpy array that contains the indices :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The reindexed field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ self._ensure_valid() return FieldDataOps.apply_index_to_field(self, index_to_apply, target, in_place) def apply_spans_first(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_first(self, spans_to_apply, target, in_place) def apply_spans_last(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_last(self, spans_to_apply, target, in_place) def apply_spans_min(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_min(self, spans_to_apply, target, in_place) def apply_spans_max(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_max(self, spans_to_apply, target, in_place) def __add__(self, second): self._ensure_valid() return FieldDataOps.numeric_add(self._session, self, second) def __radd__(self, first): self._ensure_valid() return FieldDataOps.numeric_add(self._session, first, self) def __sub__(self, second): self._ensure_valid() return FieldDataOps.numeric_sub(self._session, self, second) def __rsub__(self, first): self._ensure_valid() return FieldDataOps.numeric_sub(self._session, first, self) def __mul__(self, second): self._ensure_valid() return FieldDataOps.numeric_mul(self._session, self, second) def __rmul__(self, first): self._ensure_valid() return FieldDataOps.numeric_mul(self._session, first, self) def __truediv__(self, second): self._ensure_valid() return FieldDataOps.numeric_truediv(self._session, self, second) def __rtruediv__(self, first): self._ensure_valid() return FieldDataOps.numeric_truediv(self._session, first, self) def __floordiv__(self, second): self._ensure_valid() return FieldDataOps.numeric_floordiv(self._session, self, second) def __rfloordiv__(self, first): self._ensure_valid() return FieldDataOps.numeric_floordiv(self._session, first, self) def __mod__(self, second): self._ensure_valid() return FieldDataOps.numeric_mod(self._session, self, second) def __rmod__(self, first): self._ensure_valid() return FieldDataOps.numeric_mod(self._session, first, self) def __divmod__(self, second): self._ensure_valid() return FieldDataOps.numeric_divmod(self._session, self, second) def __rdivmod__(self, first): self._ensure_valid() return FieldDataOps.numeric_divmod(self._session, first, self) def __and__(self, second): self._ensure_valid() return FieldDataOps.numeric_and(self._session, self, second) def __rand__(self, first): self._ensure_valid() return FieldDataOps.numeric_and(self._session, first, self) def __xor__(self, second): self._ensure_valid() return FieldDataOps.numeric_xor(self._session, self, second) def __rxor__(self, first): self._ensure_valid() return FieldDataOps.numeric_xor(self._session, first, self) def __or__(self, second): self._ensure_valid() return FieldDataOps.numeric_or(self._session, self, second) def __ror__(self, first): self._ensure_valid() return FieldDataOps.numeric_or(self._session, first, self) def __lt__(self, value): self._ensure_valid() return FieldDataOps.less_than(self._session, self, value) def __le__(self, value): self._ensure_valid() return FieldDataOps.less_than_equal(self._session, self, value) def __eq__(self, value): self._ensure_valid() return FieldDataOps.equal(self._session, self, value) def __ne__(self, value): self._ensure_valid() return FieldDataOps.not_equal(self._session, self, value) def __gt__(self, value): self._ensure_valid() return FieldDataOps.greater_than(self._session, self, value) def __ge__(self, value): self._ensure_valid() return FieldDataOps.greater_than_equal(self._session, self, value) def __invert__(self): self._ensure_valid() return FieldDataOps.invert(self._session, self) def logical_not(self): self._ensure_valid() return FieldDataOps.logical_not(self._session, self) class CategoricalField(HDF5Field): def __init__(self, session, group, dataframe, write_enabled=False): super().__init__(session, group, dataframe, write_enabled=write_enabled) self._nformat = self._field.attrs['nformat'] if 'nformat' in self._field.attrs else 'int8' def writeable(self): self._ensure_valid() return CategoricalField(self._session, self._field, self._dataframe, write_enabled=True) def create_like(self, group=None, name=None, timestamp=None): self._ensure_valid() return FieldDataOps.categorical_field_create_like(self, group, name, timestamp) @property def data(self): self._ensure_valid() if self._value_wrapper is None: if self._write_enabled: self._value_wrapper = WriteableFieldArray(self._field, 'values') else: self._value_wrapper = ReadOnlyFieldArray(self._field, 'values') return self._value_wrapper def is_sorted(self): self._ensure_valid() if len(self) < 2: return True data = self.data[:] return np.all(data[:-1] <= data[1:]) def __len__(self): self._ensure_valid() return len(self.data) def get_spans(self): self._ensure_valid() return ops.get_spans_for_field(self.data[:]) @property def nformat(self): self._ensure_valid() return self._nformat # Note: key is presented as value: str, even though the dictionary must be presented # as str: value @property def keys(self): self._ensure_valid() if isinstance(self._field['key_values'][0], str): # convert into bytearray to keep up with linux kv = [bytes(i, 'utf-8') for i in self._field['key_values']] else: kv = self._field['key_values'] if isinstance(self._field['key_names'][0], str): kn = [bytes(i, 'utf-8') for i in self._field['key_names']] else: kn = self._field['key_names'] keys = dict(zip(kv, kn)) return keys def remap(self, key_map, new_key): self._ensure_valid() values = self.data[:] for k in key_map: values = np.where(values == k[0], k[1], values) result = CategoricalMemField(self._session, self._nformat, new_key) result.data.write(values) return result def apply_filter(self, filter_to_apply, target=None, in_place=False): """ Apply a boolean filter to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the filtered data is written to. :param filter_to_apply: a Field or numpy array that contains the boolean filter data :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The filtered field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ self._ensure_valid() return FieldDataOps.apply_filter_to_field(self, filter_to_apply, target, in_place) def apply_index(self, index_to_apply, target=None, in_place=False): """ Apply an index to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the reindexed data is written to. :param index_to_apply: a Field or numpy array that contains the indices :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The reindexed field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ self._ensure_valid() return FieldDataOps.apply_index_to_field(self, index_to_apply, target, in_place) def apply_spans_first(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_first(self, spans_to_apply, target, in_place) def apply_spans_last(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_last(self, spans_to_apply, target, in_place) def apply_spans_min(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_min(self, spans_to_apply, target, in_place) def apply_spans_max(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_max(self, spans_to_apply, target, in_place) def __lt__(self, value): self._ensure_valid() return FieldDataOps.less_than(self._session, self, value) def __le__(self, value): self._ensure_valid() return FieldDataOps.less_than_equal(self._session, self, value) def __eq__(self, value): self._ensure_valid() return FieldDataOps.equal(self._session, self, value) def __ne__(self, value): self._ensure_valid() return FieldDataOps.not_equal(self._session, self, value) def __gt__(self, value): self._ensure_valid() return FieldDataOps.greater_than(self._session, self, value) def __ge__(self, value): self._ensure_valid() return FieldDataOps.greater_than_equal(self._session, self, value) class TimestampField(HDF5Field): def __init__(self, session, group, dataframe, write_enabled=False): super().__init__(session, group, dataframe, write_enabled=write_enabled) def writeable(self): self._ensure_valid() return TimestampField(self._session, self._field, self._dataframe, write_enabled=True) def create_like(self, group=None, name=None, timestamp=None): self._ensure_valid() return FieldDataOps.timestamp_field_create_like(self, group, name, timestamp) @property def data(self): self._ensure_valid() if self._value_wrapper is None: if self._write_enabled: self._value_wrapper = WriteableFieldArray(self._field, 'values') else: self._value_wrapper = ReadOnlyFieldArray(self._field, 'values') return self._value_wrapper def is_sorted(self): self._ensure_valid() if len(self) < 2: return True data = self.data[:] return np.all(data[:-1] <= data[1:]) def __len__(self): self._ensure_valid() return len(self.data) def get_spans(self): self._ensure_valid() return ops.get_spans_for_field(self.data[:]) def apply_filter(self, filter_to_apply, target=None, in_place=False): """ Apply a boolean filter to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the filtered data is written to. :param filter_to_apply: a Field or numpy array that contains the boolean filter data :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The filtered field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ self._ensure_valid() return FieldDataOps.apply_filter_to_field(self, filter_to_apply, target, in_place) def apply_index(self, index_to_apply, target=None, in_place=False): """ Apply an index to this field. This operation doesn't modify the field on which it is called unless 'in_place is set to true'. The user can specify a 'target' field that the reindexed data is written to. :param index_to_apply: a Field or numpy array that contains the indices :param target: if set, this is the field that is written to. This field must be writable. If 'target' is set, 'in_place' must be False. :param in_place: if True, perform the operation destructively on this field. This field must be writable. If 'in_place' is True, 'target' must be None :return: The reindexed field. This is a new field instance unless 'target' is set, in which case it is the target field, or unless 'in_place' is True, in which case it is this field. """ self._ensure_valid() return FieldDataOps.apply_index_to_field(self, index_to_apply, target, in_place) def apply_spans_first(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_first(self, spans_to_apply, target, in_place) def apply_spans_last(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_last(self, spans_to_apply, target, in_place) def apply_spans_min(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_min(self, spans_to_apply, target, in_place) def apply_spans_max(self, spans_to_apply, target=None, in_place=False): self._ensure_valid() return FieldDataOps.apply_spans_max(self, spans_to_apply, target, in_place) def __add__(self, second): self._ensure_valid() return FieldDataOps.numeric_add(self._session, self, second) def __radd__(self, first): self._ensure_valid() return FieldDataOps.numeric_add(self._session, first, self) def __sub__(self, second): self._ensure_valid() return FieldDataOps.numeric_sub(self._session, self, second) def __rsub__(self, first): self._ensure_valid() return FieldDataOps.numeric_sub(self._session, first, self) def __mul__(self, second): self._ensure_valid() return FieldDataOps.numeric_mul(self._session, self, second) def __rmul__(self, first): self._ensure_valid() return FieldDataOps.numeric_mul(self._session, first, self) def __truediv__(self, second): self._ensure_valid() return FieldDataOps.numeric_truediv(self._session, self, second) def __rtruediv__(self, first): self._ensure_valid() return FieldDataOps.numeric_truediv(self._session, first, self) def __floordiv__(self, second): self._ensure_valid() return FieldDataOps.numeric_floordiv(self._session, self, second) def __rfloordiv__(self, first): self._ensure_valid() return FieldDataOps.numeric_floordiv(self._session, first, self) def __mod__(self, second): self._ensure_valid() return FieldDataOps.numeric_mod(self._session, self, second) def __rmod__(self, first): self._ensure_valid() return FieldDataOps.numeric_mod(self._session, first, self) def __divmod__(self, second): self._ensure_valid() return FieldDataOps.numeric_divmod(self._session, self, second) def __rdivmod__(self, first): self._ensure_valid() return FieldDataOps.numeric_divmod(self._session, first, self) def __lt__(self, value): self._ensure_valid() return FieldDataOps.less_than(self._session, self, value) def __le__(self, value): self._ensure_valid() return FieldDataOps.less_than_equal(self._session, self, value) def __eq__(self, value): self._ensure_valid() return FieldDataOps.equal(self._session, self, value) def __ne__(self, value): self._ensure_valid() return FieldDataOps.not_equal(self._session, self, value) def __gt__(self, value): self._ensure_valid() return FieldDataOps.greater_than(self._session, self, value) def __ge__(self, value): self._ensure_valid() return FieldDataOps.greater_than_equal(self._session, self, value) # Operation implementations # ========================= def as_field(data, key=None): if np.issubdtype(data.dtype, np.number): if key is None: r = NumericMemField(None, data.dtype) r.data.write(data) return r else: raise NotImplementedError() else: raise NotImplementedError() def argsort(field: Field, dtype: str=None): supported_dtypes = ('int32', 'int64', 'uint32') if dtype not in supported_dtypes: raise ValueError("If set, 'dtype' must be one of {}".format(supported_dtypes)) indices = np.argsort(field.data[:]) f = NumericMemField(None, dtype_to_str(indices.dtype) if dtype is None else dtype) f.data.write(indices) return f def dtype_to_str(dtype): if isinstance(dtype, str): return dtype if dtype == bool: return 'bool' elif dtype == np.int8: return 'int8' elif dtype == np.int16: return 'int16' elif dtype == np.int32: return 'int32' elif dtype == np.int64: return 'int64' elif dtype == np.uint8: return 'uint8' elif dtype == np.uint16: return 'uint16' elif dtype == np.uint32: return 'uint32' elif dtype == np.uint64: return 'uint64' elif dtype == np.float32: return 'float32' elif dtype == np.float64: return 'float64' raise ValueError("Unsupported dtype '{}'".format(dtype)) class FieldDataOps: @staticmethod def _binary_op(session, first, second, function): if isinstance(first, Field): first_data = first.data[:] else: first_data = first if isinstance(second, Field): second_data = second.data[:] else: second_data = second r = function(first_data, second_data) f = NumericMemField(session, dtype_to_str(r.dtype)) f.data.write(r) return f @staticmethod def _unary_op(session, first, function): if isinstance(first, Field): first_data = first.data[:] else: first_data = first r = function(first_data) f = NumericMemField(session, dtype_to_str(r.dtype)) f.data.write(r) return f @classmethod def numeric_add(cls, session, first, second): return cls._binary_op(session, first, second, operator.add) @classmethod def numeric_sub(cls, session, first, second): return cls._binary_op(session, first, second, operator.sub) @classmethod def numeric_mul(cls, session, first, second): return cls._binary_op(session, first, second, operator.mul) @classmethod def numeric_truediv(cls, session, first, second): return cls._binary_op(session, first, second, operator.truediv) @classmethod def numeric_floordiv(cls, session, first, second): return cls._binary_op(session, first, second, operator.floordiv) @classmethod def numeric_mod(cls, session, first, second): return cls._binary_op(session, first, second, operator.mod) @classmethod def numeric_divmod(cls, session, first, second): if isinstance(first, Field): first_data = first.data[:] else: first_data = first if isinstance(second, Field): second_data = second.data[:] else: second_data = second r1, r2 = np.divmod(first_data, second_data) f1 = NumericMemField(session, dtype_to_str(r1.dtype)) f1.data.write(r1) f2 = NumericMemField(session, dtype_to_str(r2.dtype)) f2.data.write(r2) return f1, f2 @classmethod def numeric_and(cls, session, first, second): return cls._binary_op(session, first, second, operator.and_) @classmethod def numeric_xor(cls, session, first, second): return cls._binary_op(session, first, second, operator.xor) @classmethod def numeric_or(cls, session, first, second): return cls._binary_op(session, first, second, operator.or_) @classmethod def invert(cls, session, first): return cls._unary_op(session, first, operator.invert) @classmethod def logical_not(cls, session, first): def function_logical_not(first): return np.logical_not(first) return cls._unary_op(session, first, function_logical_not) @classmethod def less_than(cls, session, first, second): return cls._binary_op(session, first, second, operator.lt) @classmethod def less_than_equal(cls, session, first, second): return cls._binary_op(session, first, second, operator.le) @classmethod def equal(cls, session, first, second): return cls._binary_op(session, first, second, operator.eq) @classmethod def not_equal(cls, session, first, second): return cls._binary_op(session, first, second, operator.ne) @classmethod def greater_than(cls, session, first, second): return cls._binary_op(session, first, second, operator.gt) @classmethod def greater_than_equal(cls, session, first, second): return cls._binary_op(session, first, second, operator.ge) @staticmethod def apply_filter_to_indexed_field(source, filter_to_apply, target=None, in_place=False): if in_place is True and target is not None: raise ValueError("if 'in_place is True, 'target' must be None") filter_to_apply_ = val.array_from_field_or_lower('filter_to_apply', filter_to_apply) dest_indices, dest_values = \ ops.apply_filter_to_index_values(filter_to_apply_, source.indices[:], source.values[:]) if in_place: if not source._write_enabled: raise ValueError("This field is marked read-only. Call writeable() on it before " "performing in-place filtering") source.indices.clear() source.indices.write(dest_indices) source.values.clear() source.values.write(dest_values) return source if target is not None: if len(target.indices) == len(dest_indices): target.indices[:] = dest_indices else: target.indices.clear() target.indices.write(dest_indices) if len(target.values) == len(dest_values): target.values[:] = dest_values else: target.values.clear() target.values.write(dest_values) return target else: mem_field = IndexedStringMemField(source._session, source.chunksize) mem_field.indices.write(dest_indices) mem_field.values.write(dest_values) return mem_field @staticmethod def apply_index_to_indexed_field(source, index_to_apply, target=None, in_place=False): if in_place is True and target is not None: raise ValueError("if 'in_place is True, 'target' must be None") index_to_apply_ = val.array_from_field_or_lower('index_to_apply', index_to_apply) dest_indices, dest_values = \ ops.apply_indices_to_index_values(index_to_apply_, source.indices[:], source.values[:]) if in_place: if not source._write_enabled: raise ValueError("This field is marked read-only. Call writeable() on it before " "performing in-place filtering") source.indices.clear() source.indices.write(dest_indices) source.values.clear() source.values.write(dest_values) return source if target is not None: if len(target.indices) == len(dest_indices): target.indices[:] = dest_indices else: target.indices.clear() target.indices.write(dest_indices) if len(target.values) == len(dest_values): target.values[:] = dest_values else: target.values.clear() target.values.write(dest_values) return target else: mem_field = IndexedStringMemField(source._session, source.chunksize) mem_field.indices.write(dest_indices) mem_field.values.write(dest_values) return mem_field @staticmethod def apply_filter_to_field(source, filter_to_apply, target=None, in_place=False): if in_place is True and target is not None: raise ValueError("if 'in_place is True, 'target' must be None") filter_to_apply_ = val.array_from_field_or_lower('filter_to_apply', filter_to_apply) dest_data = source.data[:][filter_to_apply_] if in_place: if not source._write_enabled: raise ValueError("This field is marked read-only. Call writeable() on it before " "performing in-place filtering") source.data.clear() source.data.write(dest_data) return source if target is not None: if len(target.data) == len(dest_data): target.data[:] = dest_data else: target.data.clear() target.data.write(dest_data) return target else: mem_field = source.create_like() mem_field.data.write(dest_data) return mem_field @staticmethod def apply_index_to_field(source, index_to_apply, target=None, in_place=False): if in_place is True and target is not None: raise ValueError("if 'in_place is True, 'target' must be None") index_to_apply_ = val.array_from_field_or_lower('index_to_apply', index_to_apply) dest_data = source.data[:][index_to_apply_] if in_place: if not source._write_enabled: raise ValueError("This field is marked read-only. Call writeable() on it before " "performing in-place filtering") source.data.clear() source.data.write(dest_data) return source if target is not None: if len(target.data) == len(dest_data): target.data[:] = dest_data else: target.data.clear() target.data.write(dest_data) return target else: mem_field = source.create_like() mem_field.data.write(dest_data) return mem_field @staticmethod def _apply_spans_src(source: Field, predicate: Callable[[np.ndarray, np.ndarray, np.ndarray], Field], spans: Union[Field, np.ndarray], target: Optional[Field] = None, in_place: bool = False) -> Field: if in_place is True and target is not None: raise ValueError("if 'in_place is True, 'target' must be None") spans_ = val.array_from_field_or_lower('spans', spans) result_inds = np.zeros(len(spans)) results = np.zeros(len(spans)-1, dtype=source.data.dtype) predicate(spans_, source.data[:], results) if in_place is True: if not source._write_enabled: raise ValueError("This field is marked read-only. Call writeable() on it before " "performing in-place apply_span methods") source.data.clear() source.data.write(results) return source if target is None: result_field = source.create_like() result_field.data.write(results) return result_field else: target.data.clear() target.data.write(results) return target @staticmethod def _apply_spans_indexed_src(source: Field, predicate: Callable[[np.ndarray, np.ndarray, np.ndarray, np.ndarray], Field], spans: Union[Field, np.ndarray], target: Optional[Field] = None, in_place: bool = False) -> Field: if in_place is True and target is not None: raise ValueError("if 'in_place is True, 'target' must be None") spans_ = val.array_from_field_or_lower('spans', spans) # step 1: get the indices through the index predicate results = np.zeros(len(spans)-1, dtype=np.int64) predicate(spans_, source.indices[:], source.values[:], results) # step 2: run apply_index on the source return FieldDataOps.apply_index_to_indexed_field(source, results, target, in_place) @staticmethod def _apply_spans_indexed_no_src(source: Field, predicate: Callable[[np.ndarray, np.ndarray], Field], spans: Union[Field, np.ndarray], target: Optional[Field] = None, in_place: bool = False) -> Field: if in_place is True and target is not None: raise ValueError("if 'in_place is True, 'target' must be None") spans_ = val.array_from_field_or_lower('spans', spans) # step 1: get the indices through the index predicate results = np.zeros(len(spans)-1, dtype=np.int64) predicate(spans_, results) # step 2: run apply_index on the source return FieldDataOps.apply_index_to_indexed_field(source, results, target, in_place) @staticmethod def apply_spans_first(source: Field, spans: Union[Field, np.ndarray], target: Optional[Field] = None, in_place: bool = None) -> Field: spans_ = val.array_from_field_or_lower('spans', spans) if np.any(spans_[:-1] == spans_[1:]): raise ValueError("cannot perform 'first' on spans with empty entries") if source.indexed: return FieldDataOps._apply_spans_indexed_no_src(source, ops.apply_spans_index_of_first, spans_, target, in_place) else: return FieldDataOps._apply_spans_src(source, ops.apply_spans_first, spans_, target, in_place) @staticmethod def apply_spans_last(source: Field, spans: Union[Field, np.ndarray], target: Optional[Field] = None, in_place: bool = None) -> Field: spans_ = val.array_from_field_or_lower('spans', spans) if np.any(spans_[:-1] == spans_[1:]): raise ValueError("cannot perform 'first' on spans with empty entries") if source.indexed: return FieldDataOps._apply_spans_indexed_no_src(source, ops.apply_spans_index_of_last, spans_, target, in_place) else: return FieldDataOps._apply_spans_src(source, ops.apply_spans_last, spans_, target, in_place) @staticmethod def apply_spans_min(source: Field, spans: Union[Field, np.ndarray], target: Optional[Field] = None, in_place: bool = None) -> Field: spans_ = val.array_from_field_or_lower('spans', spans) if np.any(spans_[:-1] == spans_[1:]): raise ValueError("cannot perform 'first' on spans with empty entries") if source.indexed: return FieldDataOps._apply_spans_indexed_src(source, ops.apply_spans_index_of_min_indexed, spans_, target, in_place) else: return FieldDataOps._apply_spans_src(source, ops.apply_spans_min, spans_, target, in_place) @staticmethod def apply_spans_max(source: Field, spans: Union[Field, np.ndarray], target: Optional[Field] = None, in_place: bool = None) -> Field: spans_ = val.array_from_field_or_lower('spans', spans) if np.any(spans_[:-1] == spans_[1:]): raise ValueError("cannot perform 'first' on spans with empty entries") if source.indexed: return FieldDataOps._apply_spans_indexed_src(source, ops.apply_spans_index_of_max_indexed, spans_, target, in_place) else: return FieldDataOps._apply_spans_src(source, ops.apply_spans_max, spans_, target, in_place) @staticmethod def indexed_string_create_like(source, group, name, timestamp): if group is None and name is not None: raise ValueError("if 'group' is None, 'name' must also be 'None'") ts = source.timestamp if timestamp is None else timestamp if group is None: return IndexedStringMemField(source._session, source.chunksize) if isinstance(group, h5py.Group): indexed_string_field_constructor(source._session, group, name, ts, source.chunksize) return IndexedStringField(source._session, group[name], None, write_enabled=True) else: return group.create_indexed_string(name, ts, source.chunksize) @staticmethod def fixed_string_field_create_like(source, group, name, timestamp): if group is None and name is not None: raise ValueError("if 'group' is None, 'name' must also be 'None'") ts = source.timestamp if timestamp is None else timestamp length = source._length if group is None: return FixedStringMemField(source._session, length) if isinstance(group, h5py.Group): fixed_string_field_constructor(source._session, group, name, length, ts, source.chunksize) return FixedStringField(source._session, group[name], None, write_enabled=True) else: return group.create_fixed_string(name, length, ts) @staticmethod def numeric_field_create_like(source, group, name, timestamp): if group is None and name is not None: raise ValueError("if 'group' is None, 'name' must also be 'None'") ts = source.timestamp if timestamp is None else timestamp nformat = source._nformat if group is None: return NumericMemField(source._session, nformat) if isinstance(group, h5py.Group): numeric_field_constructor(source._session, group, name, nformat, ts, source.chunksize) return NumericField(source._session, group[name], None, write_enabled=True) else: return group.create_numeric(name, nformat, ts) @staticmethod def categorical_field_create_like(source, group, name, timestamp): if group is None and name is not None: raise ValueError("if 'group' is None, 'name' must also be 'None'") ts = source.timestamp if timestamp is None else timestamp nformat = source._nformat keys = source.keys # TODO: we have to flip the keys until we fix https://github.com/KCL-BMEIS/ExeTera/issues/150 keys = {v: k for k, v in keys.items()} if group is None: return CategoricalMemField(source._session, nformat, keys) if isinstance(group, h5py.Group): categorical_field_constructor(source._session, group, name, nformat, keys, ts, source.chunksize) return CategoricalField(source._session, group[name], None, write_enabled=True) else: return group.create_categorical(name, nformat, keys, ts) @staticmethod def timestamp_field_create_like(source, group, name, timestamp): if group is None and name is not None: raise ValueError("if 'group' is None, 'name' must also be 'None'") ts = source.timestamp if timestamp is None else timestamp if group is None: return TimestampMemField(source._session) if isinstance(group, h5py.Group): timestamp_field_constructor(source._session, group, name, ts, source.chunksize) return TimestampField(source._session, group[name], None, write_enabled=True) else: return group.create_timestamp(name, ts)
39.879983
137
0.646093
12,447
96,031
4.748373
0.04009
0.031386
0.031978
0.039795
0.856674
0.835558
0.822124
0.808385
0.800044
0.794257
0
0.00321
0.26681
96,031
2,407
138
39.896552
0.836214
0.203538
0
0.785851
0
0
0.048442
0.000323
0
0
0
0.001662
0
1
0.231995
false
0.000637
0.006373
0.083493
0.481198
0.001275
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
8
df6650bf8889686e2b275f3c0b1b2cc27bd8e262
16,061
py
Python
sushy/tests/unit/test_auth.py
yrobla/sushy
74be09c798ac3422335a4e0e30b778639ff5a122
[ "Apache-2.0" ]
null
null
null
sushy/tests/unit/test_auth.py
yrobla/sushy
74be09c798ac3422335a4e0e30b778639ff5a122
[ "Apache-2.0" ]
null
null
null
sushy/tests/unit/test_auth.py
yrobla/sushy
74be09c798ac3422335a4e0e30b778639ff5a122
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from sushy import auth from sushy import connector from sushy import exceptions from sushy import main from sushy.tests.unit import base class BasicAuthTestCase(base.TestCase): @mock.patch.object(main, 'Sushy', autospec=True) @mock.patch.object(connector, 'Connector', autospec=True) def setUp(self, mock_connector, mock_root): super(BasicAuthTestCase, self).setUp() self.username = 'TestUsername' self.password = 'TestP@$$W0RD' self.base_auth = auth.BasicAuth(self.username, self.password) self.conn = mock_connector.return_value self.root = mock_root.return_value def test_init(self): self.assertEqual(self.username, self.base_auth._username) self.assertEqual(self.password, self.base_auth._password) self.assertIsNone(self.base_auth._root_resource) self.assertIsNone(self.base_auth._connector) def test_set_context(self): self.base_auth.set_context(self.root, self.conn) self.assertEqual(self.base_auth._root_resource, self.root) self.assertEqual(self.base_auth._connector, self.conn) def test__do_authenticate_no_context(self): self.assertRaises(RuntimeError, self.base_auth.authenticate) def test__do_authenticate(self): self.base_auth.set_context(self.root, self.conn) self.base_auth.authenticate() self.conn.set_http_basic_auth.assert_called_once_with(self.username, self.password) def test_can_refresh_session(self): self.assertFalse(self.base_auth.can_refresh_session()) @mock.patch.object(auth.BasicAuth, 'close', autospec=True) def test_context_manager(self, auth_close): with auth.BasicAuth(self.username, self.password) as base_auth: self.assertEqual(self.username, base_auth._username) self.assertEqual(self.password, base_auth._password) auth_close.assert_called_once_with(base_auth) class SessionAuthTestCase(base.TestCase): @mock.patch.object(main, 'Sushy', autospec=True) @mock.patch.object(connector, 'Connector', autospec=True) def setUp(self, mock_connector, mock_root): super(SessionAuthTestCase, self).setUp() self.username = 'TestUsername' self.password = 'TestP@$$W0RD' self.sess_key = 'TestingKey' self.sess_uri = ('https://testing:8000/redfish/v1/' 'SessionService/Sessions/testing') self.sess_auth = auth.SessionAuth(self.username, self.password) self.conn = mock_connector.return_value self.root = mock_root.return_value def test_init(self): self.assertEqual(self.username, self.sess_auth._username) self.assertEqual(self.password, self.sess_auth._password) self.assertIsNone(self.sess_auth._root_resource) self.assertIsNone(self.sess_auth._connector) self.assertIsNone(self.sess_auth._session_key) self.assertIsNone(self.sess_auth._session_resource_id) def test_get_session_key(self): self.sess_auth._session_key = self.sess_key self.assertEqual(self.sess_key, self.sess_auth.get_session_key()) def test_get_session_resource_id(self): self.sess_auth._session_resource_id = self.sess_uri self.assertEqual(self.sess_uri, self.sess_auth.get_session_resource_id()) def test_reset_session_attrs(self): self.sess_auth._session_key = self.sess_key self.sess_auth._session_resource_id = self.sess_uri self.assertEqual(self.sess_uri, self.sess_auth.get_session_resource_id()) self.assertEqual(self.sess_key, self.sess_auth.get_session_key()) self.sess_auth.reset_session_attrs() self.assertIsNone(self.sess_auth.get_session_resource_id()) self.assertIsNone(self.sess_auth.get_session_key()) def test_set_context(self): self.sess_auth.set_context(self.root, self.conn) self.assertEqual(self.sess_auth._root_resource, self.root) self.assertEqual(self.sess_auth._connector, self.conn) def test__do_authenticate_no_context(self): self.assertRaises(RuntimeError, self.sess_auth.authenticate) def test__do_authenticate(self): self.assertIsNone(self.sess_auth.get_session_resource_id()) self.assertIsNone(self.sess_auth.get_session_key()) mock_sess_serv = mock.Mock() mock_sess_serv.create_session.return_value = (self.sess_key, self.sess_uri) self.root.get_session_service.return_value = mock_sess_serv self.sess_auth.set_context(self.root, self.conn) self.sess_auth.authenticate() self.assertEqual(self.sess_uri, self.sess_auth.get_session_resource_id()) self.assertEqual(self.sess_key, self.sess_auth.get_session_key()) self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) def test_can_refresh_session(self): mock_sess_serv = mock.Mock() mock_sess_serv.create_session.return_value = (self.sess_key, self.sess_uri) self.root.get_session_service.return_value = mock_sess_serv self.sess_auth.set_context(self.root, self.conn) self.sess_auth.authenticate() self.assertTrue(self.sess_auth.can_refresh_session()) def test_refresh(self): self.assertIsNone(self.sess_auth.get_session_resource_id()) self.assertIsNone(self.sess_auth.get_session_key()) mock_sess_serv = mock.Mock() mock_sess_serv.create_session.return_value = (self.sess_key, self.sess_uri) self.root.get_session_service.return_value = mock_sess_serv self.sess_auth.set_context(self.root, self.conn) self.sess_auth.refresh_session() self.assertEqual(self.sess_uri, self.sess_auth.get_session_resource_id()) self.assertEqual(self.sess_key, self.sess_auth.get_session_key()) self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) def test_close_do_nothing(self): self.sess_auth._session_key = None self.sess_auth.set_context(self.root, self.conn) self.sess_auth.close() self.conn.delete.assert_not_called() def test_close(self): self.sess_auth._session_key = self.sess_key self.sess_auth._session_resource_id = self.sess_uri self.sess_auth.set_context(self.root, self.conn) self.sess_auth.close() self.conn.delete.assert_called_once_with(self.sess_uri) self.assertIsNone(self.sess_auth.get_session_resource_id()) self.assertIsNone(self.sess_auth.get_session_key()) @mock.patch.object(auth, 'LOG', autospec=True) def test_close_fail(self, mock_LOG): self.sess_auth._session_key = self.sess_key self.sess_auth._session_resource_id = self.sess_uri self.conn.delete.side_effect = ( exceptions.ServerSideError( 'DELETE', 'any_url', mock.MagicMock())) self.sess_auth.set_context(self.root, self.conn) self.sess_auth.close() self.assertTrue(mock_LOG.warning.called) self.assertIsNone(self.sess_auth.get_session_resource_id()) self.assertIsNone(self.sess_auth.get_session_key()) @mock.patch.object(auth.SessionAuth, 'close', autospec=True) def test_context_manager(self, auth_close): with auth.SessionAuth(self.username, self.password) as session_auth: self.assertEqual(self.username, session_auth._username) self.assertEqual(self.password, session_auth._password) auth_close.assert_called_once_with(session_auth) class SessionOrBasicAuthTestCase(base.TestCase): @mock.patch.object(main, 'Sushy', autospec=True) @mock.patch.object(connector, 'Connector', autospec=True) def setUp(self, mock_connector, mock_root): super(SessionOrBasicAuthTestCase, self).setUp() self.username = 'TestUsername' self.password = 'TestP@$$W0RD' self.sess_key = 'TestingKey' self.sess_uri = ('https://testing:8000/redfish/v1/' 'SessionService/Sessions/testing') self.conn = mock_connector.return_value self.root = mock_root.return_value self.sess_basic_auth = auth.SessionOrBasicAuth(self.username, self.password) def test_init(self): self.assertEqual(self.username, self.sess_basic_auth._username) self.assertEqual(self.password, self.sess_basic_auth._password) self.assertIsNone(self.sess_basic_auth._root_resource) self.assertIsNone(self.sess_basic_auth._connector) self.assertIsNone(self.sess_basic_auth._session_key) self.assertIsNone(self.sess_basic_auth._session_resource_id) def test_get_session_key(self): self.sess_basic_auth._session_key = self.sess_key self.assertEqual(self.sess_key, self.sess_basic_auth.get_session_key()) def test_get_session_resource_id(self): self.sess_basic_auth._session_resource_id = self.sess_uri self.assertEqual(self.sess_uri, self.sess_basic_auth.get_session_resource_id()) def test_reset_session_attrs(self): self.sess_basic_auth._session_key = self.sess_key self.sess_basic_auth._session_resource_id = self.sess_uri self.assertEqual(self.sess_uri, self.sess_basic_auth.get_session_resource_id()) self.assertEqual(self.sess_key, self.sess_basic_auth.get_session_key()) self.sess_basic_auth.reset_session_attrs() self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) self.assertIsNone(self.sess_basic_auth.get_session_key()) def test_set_context(self): self.sess_basic_auth.set_context(self.root, self.conn) self.assertEqual(self.sess_basic_auth._root_resource, self.root) self.assertEqual(self.sess_basic_auth._connector, self.conn) def test__do_authenticate_no_context(self): self.assertRaises(RuntimeError, self.sess_basic_auth.authenticate) def test__do_authenticate(self): self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) self.assertIsNone(self.sess_basic_auth.get_session_key()) mock_sess_serv = mock.Mock() mock_sess_serv.create_session.return_value = (self.sess_key, self.sess_uri) self.root.get_session_service.return_value = mock_sess_serv self.sess_basic_auth.set_context(self.root, self.conn) self.sess_basic_auth.authenticate() self.assertEqual(self.sess_uri, self.sess_basic_auth.get_session_resource_id()) self.assertEqual(self.sess_key, self.sess_basic_auth.get_session_key()) self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) def test__do_authenticate_for_basic_auth(self): self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) self.assertIsNone(self.sess_basic_auth.get_session_key()) mock_sess_serv = mock.Mock() mock_sess_serv.create_session.side_effect = exceptions.SushyError self.root.get_session_service.return_value = mock_sess_serv self.sess_basic_auth.set_context(self.root, self.conn) self.sess_basic_auth.authenticate() self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) self.assertIsNone(self.sess_basic_auth.get_session_key()) self.conn.set_http_basic_auth.assert_called_once_with( self.username, self.password) def test_can_refresh_session(self): mock_sess_serv = mock.Mock() mock_sess_serv.create_session.return_value = (self.sess_key, self.sess_uri) self.root.get_session_service.return_value = mock_sess_serv self.sess_basic_auth.set_context(self.root, self.conn) self.sess_basic_auth.authenticate() self.assertTrue(self.sess_basic_auth.can_refresh_session()) def test_refresh_no_previous_session(self): self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) self.assertIsNone(self.sess_basic_auth.get_session_key()) self.sess_basic_auth.set_context(self.root, self.conn) self.sess_basic_auth.refresh_session() self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) self.assertIsNone(self.sess_basic_auth.get_session_key()) self.conn.set_http_session_auth.assert_not_called() self.conn.set_http_basic_auth.assert_not_called() def test_refresh_previous_session_exists(self): self.sess_basic_auth._session_key = 'ThisisFirstKey' test_url = ('https://testing:8000/redfish/v1/SessionService' '/Sessions/testingfirst') self.sess_basic_auth._session_resource_id = test_url mock_sess_serv = mock.Mock() mock_sess_serv.create_session.return_value = (self.sess_key, self.sess_uri) self.root.get_session_service.return_value = mock_sess_serv self.sess_basic_auth.set_context(self.root, self.conn) self.sess_basic_auth.refresh_session() self.assertEqual(self.sess_uri, self.sess_basic_auth.get_session_resource_id()) self.assertEqual(self.sess_key, self.sess_basic_auth.get_session_key()) self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) def test_close_do_nothing(self): self.conn.delete.assert_not_called() def test_close(self): self.sess_basic_auth._session_key = self.sess_key self.sess_basic_auth._session_resource_id = self.sess_uri self.sess_basic_auth.set_context(self.root, self.conn) self.sess_basic_auth.close() self.conn.delete.assert_called_once_with(self.sess_uri) self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) self.assertIsNone(self.sess_basic_auth.get_session_key()) @mock.patch.object(auth.SessionOrBasicAuth, 'close', autospec=True) def test_context_manager(self, auth_close): with auth.SessionOrBasicAuth( self.username, self.password) as session_or_base_auth: self.assertEqual(self.username, session_or_base_auth._username) self.assertEqual(self.password, session_or_base_auth._password) auth_close.assert_called_once_with(session_or_base_auth)
45.115169
78
0.672561
2,003
16,061
5.032451
0.07988
0.12619
0.070933
0.092758
0.878075
0.871825
0.815873
0.755754
0.733234
0.709524
0
0.002127
0.239026
16,061
355
79
45.242254
0.822615
0.037295
0
0.705882
0
0
0.024147
0.005438
0
0
0
0
0.311419
1
0.124567
false
0.069204
0.020761
0
0.155709
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
df94fc4d074bc18f9ac2649dd8cbd924c3bc1896
14,488
py
Python
tests/test_iec-61162-450-binary.py
fkie-cad/maritime-dissector
c5970eaa3958fe8cc52954d02871be7dfdf702c3
[ "MIT" ]
null
null
null
tests/test_iec-61162-450-binary.py
fkie-cad/maritime-dissector
c5970eaa3958fe8cc52954d02871be7dfdf702c3
[ "MIT" ]
null
null
null
tests/test_iec-61162-450-binary.py
fkie-cad/maritime-dissector
c5970eaa3958fe8cc52954d02871be7dfdf702c3
[ "MIT" ]
1
2022-02-02T06:02:01.000Z
2022-02-02T06:02:01.000Z
import os import pytest import subprocess import json import re FRAME_PROTOCOLS = ['eth:ethertype:ip:udp:iec-61162-450-binary:iec-61162-450-binary', 'eth:ethertype:ip:udp:iec-61162-450-binary:iec-61162-450-binary:binary-file-descriptor'] LAYERS = ['iec-61162-450-binary', 'binary-file-descriptor'] @pytest.fixture def packets_iec_450_binary_type1(): process = subprocess.run(['tshark', '-T', 'json', '-X', 'lua_script:../maritime-dissector.lua', '-r', 'iec-61162-450-binary-type1.pcap', '-2'], stdout=subprocess.PIPE) packets = json.loads(process.stdout) return packets @pytest.fixture def packet_iec_450_binary_type1_1(packets_iec_450_binary_type1): return packets_iec_450_binary_type1[0] @pytest.fixture def packet_iec_450_binary_type1_2(packets_iec_450_binary_type1): return packets_iec_450_binary_type1[1] @pytest.fixture def packet_iec_450_binary_type2(): process = subprocess.run(['tshark', '-T', 'json', '-X', 'lua_script:../maritime-dissector.lua', '-r', 'iec-61162-450-binary-type2.pcap'], stdout=subprocess.PIPE) packet = json.loads(process.stdout) return packet[0] @pytest.fixture def packets_all(packet_iec_450_binary_type1_1, packet_iec_450_binary_type1_2, packet_iec_450_binary_type2): return packet_iec_450_binary_type1_1, packet_iec_450_binary_type1_2, packet_iec_450_binary_type2 @pytest.mark.dependency() def test_frame_protocols_iec_450_binary(packets_all): for packet in packets_all: assert packet['_source']['layers']['frame']['frame.protocols'] in FRAME_PROTOCOLS @pytest.mark.dependency(depends=['test_frame_protocols_iec_450_binary']) def test_layers_iec_450_binary(packets_all): for packet in packets_all: assert packet['_source']['layers'] assert any((True for layer in LAYERS if layer in packet['_source']['layers'])) @pytest.mark.dependency(depends=['test_layers_iec_450_binary']) def test_iec_450_binary_type1(packets_iec_450_binary_type1): packet1, packet2 = packets_iec_450_binary_type1 assert 'iec-61162-450-binary' in packet1['_source']['layers'] assert 'iec-61162-450-binary' in packet2['_source']['layers'] assert 'iec-61162-450-binary.token' in packet1['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.token' in packet2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.version' in packet1['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.version' in packet2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.srcid' in packet1['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.srcid' in packet2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.destid' in packet1['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.destid' in packet2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.mtype' in packet1['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.mtype' in packet2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.blockid' in packet1['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.blockid' in packet2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.seqnum' in packet1['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.seqnum' in packet2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.maxseqnum' in packet1['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.maxseqnum' in packet2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.nextpacket' in packet1['_source']['layers']['iec-61162-450-binary'] assert not 'iec-61162-450-binary.nextpacket' in packet2['_source']['layers']['iec-61162-450-binary'] assert not 'iec-61162-450-binary.firstpacket' in packet1['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.firstpacket' in packet2['_source']['layers']['iec-61162-450-binary'] assert not 'iec-61162-450-binary.prevpacket' in packet1['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.prevpacket' in packet2['_source']['layers']['iec-61162-450-binary'] assert 'binary-file-descriptor' in packet1['_source']['layers']['iec-61162-450-binary'] assert not 'binary-file-descriptor' in packet2['_source']['layers']['iec-61162-450-binary'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1']) def test_iec_450_binary_type1_bfd(packet_iec_450_binary_type1_1): assert 'binary-file-descriptor.fd_length' in packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor'] assert 'binary-file-descriptor.file_length' in packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor'] assert 'binary-file-descriptor.stat_of_acquisition' in packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor'] assert 'binary-file-descriptor.device' in packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor'] assert 'binary-file-descriptor.channel' in packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor'] assert 'binary-file-descriptor.type_length' in packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor'] assert 'binary-file-descriptor.data_type' in packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor'] assert 'binary-file-descriptor.stat_and_info' in packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor'] @pytest.mark.dependency(depends=['test_layers_iec_450_binary']) def test_iec_450_binary_type2(packet_iec_450_binary_type2): assert 'iec-61162-450-binary' in packet_iec_450_binary_type2['_source']['layers'] assert 'iec-61162-450-binary.token' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.version' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.srcid' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.destid' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.mtype' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.blockid' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.seqnum' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert 'iec-61162-450-binary.maxseqnum' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert not 'iec-61162-450-binary.nextpacket' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert not 'iec-61162-450-binary.firstpacket' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert not 'iec-61162-450-binary.prevpacket' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] assert not 'binary-file-descriptor' in packet_iec_450_binary_type2['_source']['layers']['iec-61162-450-binary'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_token(packets_all): for packet in packets_all: assert 'RrUdP' == packet['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.token'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_version(packets_all): for packet in packets_all: assert '1' == packet['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.version'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_srcid(packets_all): for packet in packets_all: assert 'EI0001' == packet['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.srcid'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_destid(packets_all): for packet in packets_all: assert 'VR0001' == packet['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.destid'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_mtype(packets_all): packet1, packet2, packet3 = packets_all assert '1' == packet1['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.mtype'] assert '1' == packet2['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.mtype'] assert '2' == packet3['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.mtype'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_blockid(packets_all): packet1, packet2, packet3 = packets_all assert '513' == packet1['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.blockid'] assert '513' == packet2['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.blockid'] assert '514' == packet3['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.blockid'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_seqnum(packets_all): packet1, packet2, packet3 = packets_all assert '1' == packet1['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.seqnum'] assert '2' == packet2['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.seqnum'] assert '0' == packet3['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.seqnum'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_maxseqnum(packets_all): packet1, packet2, packet3 = packets_all assert '2' == packet1['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.maxseqnum'] assert '2' == packet2['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.maxseqnum'] assert '0' == packet3['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.maxseqnum'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_firstpacket(packet_iec_450_binary_type1_2): assert '1' == packet_iec_450_binary_type1_2['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.firstpacket'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_nextpacket(packet_iec_450_binary_type1_1): assert '2' == packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.nextpacket'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd', 'test_iec_450_binary_type2']) def test_iec_450_binary_prevpacket(packet_iec_450_binary_type1_2): assert '1' == packet_iec_450_binary_type1_2['_source']['layers']['iec-61162-450-binary']['iec-61162-450-binary.prevpacket'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd']) def test_iec_450_binary_type1_bfd_fd_length(packet_iec_450_binary_type1_1): assert '31' == packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor']['binary-file-descriptor.fd_length'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd']) def test_iec_450_binary_type1_bfd_file_length(packet_iec_450_binary_type1_1): assert '1500' == packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor']['binary-file-descriptor.file_length'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd']) def test_iec_450_binary_type1_bfd_stat_of_acquisition(packet_iec_450_binary_type1_1): assert '0' == packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor']['binary-file-descriptor.stat_of_acquisition'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd']) def test_iec_450_binary_type1_bfd_device(packet_iec_450_binary_type1_1): assert '03' == packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor']['binary-file-descriptor.device'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd']) def test_iec_450_binary_type1_bfd_channel(packet_iec_450_binary_type1_1): assert '01' == packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor']['binary-file-descriptor.channel'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd']) def test_iec_450_binary_type1_bfd_type_length(packet_iec_450_binary_type1_1): assert '11' == packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor']['binary-file-descriptor.type_length'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd']) def test_iec_450_binary_type1_bfd_data_type(packet_iec_450_binary_type1_1): assert 'text/plain' == packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor']['binary-file-descriptor.data_type'] @pytest.mark.dependency(depends=['test_iec_450_binary_type1', 'test_iec_450_binary_type1_bfd']) def test_iec_450_binary_type1_bfd_stat_and_info(packet_iec_450_binary_type1_1): assert 'TEST\r\n' == packet_iec_450_binary_type1_1['_source']['layers']['iec-61162-450-binary']['binary-file-descriptor']['binary-file-descriptor.stat_and_info']
65.261261
173
0.763736
2,162
14,488
4.769658
0.044403
0.236521
0.16059
0.219259
0.94424
0.913887
0.896819
0.864721
0.820112
0.803336
0
0.129037
0.070334
14,488
221
174
65.556561
0.636573
0
0
0.222222
0
0.012346
0.467076
0.285892
0
0
0
0
0.475309
1
0.179012
false
0
0.030864
0.018519
0.240741
0
0
0
0
null
1
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
9
8015fca257e262754055eaa2ffbdb82c7af4bbe2
39,535
py
Python
demos/pymunk/bouncing_balls.py
rlugojr/PX8
b081611dde998a06910d57037ca20b5fbd90123b
[ "MIT" ]
21
2019-05-31T17:15:54.000Z
2022-02-26T04:59:07.000Z
examples/pymunk/bouncing_balls.py
peterbrittain/PX8
92bfe8e37a0019bedd8e3388180fe8a40a4ae250
[ "MIT" ]
null
null
null
examples/pymunk/bouncing_balls.py
peterbrittain/PX8
92bfe8e37a0019bedd8e3388180fe8a40a4ae250
[ "MIT" ]
1
2020-06-11T14:57:11.000Z
2020-06-11T14:57:11.000Z
pico-8 / python cartridge // http://www.pico-8.com version 5 __python__ import sys sys.path.append("/usr/local/lib/python3.5/site-packages/pymunk-5.1.0-py3.5-macosx-10.11-x86_64.egg/") import random import pymunk from pymunk import Vec2d, SpaceDebugDrawOptions class DrawOptions(SpaceDebugDrawOptions): def __init__(self): super(DrawOptions, self).__init__() def draw_circle(self, pos, angle, radius, outline_color, fill_color): print("DRAW CIRCLE", pos) p = to_px8(pos) #pygame.draw.circle(self.surface, fill_color, p, int(radius), 0) circ(p[0], p[1], int(radius), random.randint(1, 16)) def draw_segment(self, a, b, color): print("DRAW SEGMENT") def draw_fat_segment(self, a, b, radius, outline_color, fill_color): print("DRAW FAT SEGMENT", a, b, radius, outline_color, fill_color) p1 = to_px8(a) p2 = to_px8(b) line(p1[0], p1[1], p2[0], p2[1], 1) def to_px8(p): """Convenience method to convert pymunk coordinates to px8 """ return int(p[0]), 128 - int(p[1]) def add_ball(): global space, balls mass = random.randint(5, 20) radius = random.randint(1, 5) inertia = pymunk.moment_for_circle(mass, 0, radius, (0,0)) body = pymunk.Body(mass, inertia) body.position = random.randint(10,120), random.randint(100, 118) shape = pymunk.Circle(body, radius, (0,0)) shape.elasticity = 0.95 shape.friction = 0.9 space.add(body, shape) balls.append(shape) space = pymunk.Space() space.gravity = (0.0, -90.0) draw_options = DrawOptions() balls = [] def CreateSegment(static_body, x1, y1, x2, y2): return pymunk.Segment(static_body, (x1, 128 - y1), (x2, 128 - y2), 0.0) ### walls static_body = space.static_body static_lines = [CreateSegment(static_body, 20.0, 100.0, 100.0, 120.0), CreateSegment(static_body, 100.0, 120.0, 100.0, 80.0) ] for static_line in static_lines: static_line.elasticity = 0.95 static_line.friction = 0.9 space.add(static_lines) ticks_to_next_ball = 30 def _init(): cls() def _update(): pass def _draw(): print("DRAW") global draw_options, ticks_to_next_ball, space, balls cls() ticks_to_next_ball -= 1 if ticks_to_next_ball >= 0: add_ball() balls_to_remove = [] for ball in balls: if ball.body.position.x <= 0 or ball.body.position.x >= 128: balls_to_remove.append(ball) for ball in balls_to_remove: space.remove(ball, ball.body) balls.remove(ball) space.debug_draw(draw_options) dt = 1.0/60.0 for x in range(1): space.step(dt) __gfx__ 10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000088088000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000888887800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000888888800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000088888000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000008880000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000077007700777777007700000077000000777777000000000770007700777777007777770077000000777700000000000000000000000000000000000 00000000077007700770000007700000077000000770077000000000770007700770077007700770077000000770077000000000000000000000000000000000 00000000077007700770000007700000077000000770077000000000770707700770077007700770077000000770077000000000000000000000000000000000 00000000077777700777700007700000077000000770077000000000777777700770077007777000077000000770077000000000000000000000000000000000 00000000077007700770000007700000077000000770077000000000777077700770077007700770077000000770077000000000000000000000000000000000 00000000077007700777777007777770077777700777777000000000770007700777777007700770077777700777777000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 __gff__ 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 __map__ 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 __sfx__ 0110000000472004620c3400c34318470004311842500415003700c30500375183750c3000c3751f4730c375053720536211540114330c37524555247120c3730a470163521d07522375164120a211220252e315 01100000183732440518433394033c65539403185432b543184733940318433394033c655306053940339403184733940318423394033c655394031845321433184733940318473394033c655394033940339403 01100000247552775729755277552475527755297512775524755277552b755277552475527757297552775720755247572775524757207552475227755247522275526757297552675722752267522975526751 01100000001750c055003550c055001750c055003550c05500175180650c06518065001750c065003650c065051751106505365110650c17518075003650c0650a145160750a34516075111451d075113451d075 011000001b5771f55722537265171b5361f52622515265121b7771f76722757267471b7461f7362271522712185771b5571d53722517187361b7261d735227122454527537295252e5171d73514745227452e745 01100000275422754227542275422e5412e5452b7412b5422b5452b54224544245422754229541295422954224742277422e7422b7422b5422b5472954227542295422b742307422e5422e7472b547305462e742 0110000030555307652e5752b755295622e7722b752277622707227561297522b072295472774224042275421b4421b5451b5421b4421d542295471d442295422444624546245472444727546275462944729547 0110000000200002000020000200002000020000200002000020000200002000020000200002000020000200110171d117110171d227131211f227130371f2370f0411b1470f2471b35716051221571626722367 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002e775000002e1752e075000002e1752e77500000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 __music__ 00 00044208 00 00044108 00 00010304 00 00010304 01 00010203 00 00010203 00 00010305 00 00010306 00 00010305 00 00010306 00 00010245 02 00010243 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344 00 41424344
98.345771
256
0.967092
767
39,535
49.732725
0.208605
0.758369
1.11742
1.476471
0.854555
0.85251
0.85251
0.84981
0.84981
0.84981
0
0.946529
0.02365
39,535
401
257
98.591022
0.041684
0.001745
0
0.719346
0
0.002725
0.003173
0.002081
0
1
0
0
0
0
null
null
0.002725
0.010899
null
null
0.010899
0
0
1
null
1
1
1
1
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
1
0
0
0
0
null
1
0
0
0
1
0
0
0
0
0
0
0
0
15
801e3cd3a9767efc3aaafad7e07c05c9b2ac094f
5,305
py
Python
test/zmq/rrclient.py
MistSun-Chen/py_verifier
7e9161d1fdbb611fe4be5eeb2f89a6286fa7b555
[ "MIT" ]
null
null
null
test/zmq/rrclient.py
MistSun-Chen/py_verifier
7e9161d1fdbb611fe4be5eeb2f89a6286fa7b555
[ "MIT" ]
null
null
null
test/zmq/rrclient.py
MistSun-Chen/py_verifier
7e9161d1fdbb611fe4be5eeb2f89a6286fa7b555
[ "MIT" ]
null
null
null
# # Request-reply client in Python # Connects REQ socket to tcp://localhost:5559 # Sends "Hello" to server, expects "World" back # import zmq import cv2 import json import time import base64 #transfer pic in 1920*1080 average 164 #Detect Pose average 90ms def send_list_bytes(): # Prepare our context and sockets context = zmq.Context() socket = context.socket(zmq.REQ) socket.connect("tcp://localhost:5559") # Prepare sending image img = cv2.imread(r'C:\Users\63277\Pictures\car\1.jpg') # jpg encode img_encode = cv2.imencode('.jpg', img)[1] #transfer to list img_list = img_encode.tolist() # sum the time_used sum = 0 # Do 10 requests, waiting each time for a response for request in range(1, 11): # create json req = { "req": request, "interface": "43", "api_key": "", # json不认numpy的array "img": img_list, # "num":request } start_time = time.time() # sending json str socket.send(bytes(json.dumps(req), encoding="utf-8")) print("sending time is {}".format(str(start_time))) message = socket.recv() print(f"Received reply {request} [{message}]") end_time = time.time() print(round((end_time - start_time) * 1000, 4)) sum = sum + (end_time - start_time) * 1000 print("average is {}".format(sum / 10)) # Prepare our context and sockets context = zmq.Context() socket = context.socket(zmq.REQ) socket.connect("tcp://localhost:5559") # Prepare sending image img = cv2.imread(r'C:\Users\63277\Pictures\car\1.jpg') # jpg encode img_encode = cv2.imencode('.jpg', img)[1] #transfer to list img_list = img_encode.tolist() # sum the time_used sum = 0 # Do 10 requests, waiting each time for a response for request in range(1, 11): # create json req = { "req": request, "interface": "43", "api_key": "", # json不认numpy的array "img": img_list, # "num":request } start_time = time.time() # sending json str socket.send(bytes(json.dumps(req), encoding="utf-8")) print("sending time is {}".format(str(start_time))) message = socket.recv() print(f"Received reply {request} [{message}]") end_time = time.time() print(round((end_time - start_time) * 1000, 4)) sum = sum + (end_time - start_time) * 1000 print("average is {}".format(sum / 10)) def send_base64_bytes(): # Prepare our context and sockets context = zmq.Context() socket = context.socket(zmq.DEALER) socket.connect("tcp://localhost:5559") # Prepare sending image img = cv2.imread(r'C:\Users\63277\Pictures\car\1.jpg') # jpg encode img_encode = cv2.imencode('.jpg', img)[1] #transfer to list base_64 = str(base64.b64encode(img_encode),encoding='utf-8') # sum the time_used sum = 0 # Do 10 requests, waiting each time for a response for request in range(1, 11): # create json req = { "req": request, "interface": "43", "api_key": "", # json不认numpy的array "img": base_64, # "num":request } start_time = time.time() # sending json str socket.send(bytes(json.dumps(req), encoding="utf-8")) print("sending time is {}".format(str(start_time))) message = socket.recv() print(f"Received reply {request} [{message}]") end_time = time.time() print(round((end_time - start_time) * 1000, 4)) sum = sum + (end_time - start_time) * 1000 print("average is {}".format(sum / 10)) def send_str_bytes(): # Prepare our context and sockets context = zmq.Context() socket = context.socket(zmq.REQ) socket.connect("tcp://localhost:5559") #Prepare sending image img = cv2.imread(r'C:\Users\63277\Pictures\car\1.jpg') #jpg encode img_encode = cv2.imencode('.jpg', img)[1] # transfer to bytes img_bytes = img_encode.tobytes() print(type(img_bytes)) # transfer to string img_str = bytes.decode(img_bytes,encoding="ascii") # print(type(img_encode)) # print(type(img_encode.tostring())) # img_str = img_encode.tostring() # print(type(img_str)) #sum the time_used sum = 0 # Do 10 requests, waiting each time for a response for request in range(1, 11): # create json req = { "req": request, "interface": "43", "api_key": "", # json不认numpy的array "img": img_str, # "num":request } #sending json str socket.send(bytes(json.dumps(req), encoding="utf-8")) start_time = time.time() message = socket.recv() print(f"Received reply {request} [{message}]") end_time = time.time() print(round((end_time - start_time) * 1000, 4)) sum = sum + (end_time - start_time) * 1000 print("average is {}".format(sum / 10)) if __name__ == '__main__': # send_str_bytes() # send_list_bytes() send_base64_bytes()
25.628019
64
0.576626
666
5,305
4.474474
0.163664
0.042953
0.032215
0.042953
0.821477
0.821477
0.821477
0.821477
0.821477
0.821477
0
0.043848
0.290669
5,305
207
65
25.628019
0.748073
0.224882
0
0.796117
0
0
0.150888
0.032544
0
0
0
0
0
1
0.029126
false
0
0.048544
0
0.07767
0.15534
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1d309bcd85d19096a8be9f62a61c9f962f95c36e
119,965
py
Python
tests/test_gsf.py
bbc/rd-apmm-python-lib-mediagrains
84c9de511cc53418c277867eaf143f2cc8730d02
[ "ECL-2.0", "Apache-2.0" ]
6
2018-03-26T23:49:34.000Z
2021-12-23T10:06:09.000Z
tests/test_gsf.py
bbc/rd-apmm-python-lib-mediagrains
84c9de511cc53418c277867eaf143f2cc8730d02
[ "ECL-2.0", "Apache-2.0" ]
34
2018-03-21T16:45:10.000Z
2022-03-28T13:27:34.000Z
tests/test_gsf.py
bbc/rd-apmm-python-lib-mediagrains
84c9de511cc53418c277867eaf143f2cc8730d02
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # # Copyright 2018 British Broadcasting Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from asynctest import TestCase, mock from uuid import UUID from mediagrains import Grain, VideoGrain, AudioGrain, CodedVideoGrain, CodedAudioGrain, EventGrain from mediagrains.grain import VIDEOGRAIN, AUDIOGRAIN, CODEDVIDEOGRAIN, CODEDAUDIOGRAIN, EVENTGRAIN from mediagrains.gsf import loads, load, dumps, GSFEncoder, GSFDecoder, AsyncGSFBlock, GrainDataLoadingMode from mediagrains.gsf import GSFDecodeError from mediagrains.gsf import GSFEncodeError from mediagrains.gsf import GSFDecodeBadVersionError from mediagrains.gsf import GSFDecodeBadFileTypeError from mediagrains.gsf import GSFEncodeAddToActiveDump from mediagrains.comparison import compare_grain from mediagrains.cogenums import CogFrameFormat, CogFrameLayout, CogAudioFormat from mediatimestamp.immutable import Timestamp, TimeOffset from datetime import datetime from fractions import Fraction from io import BytesIO from mediagrains.utils.asyncbinaryio import AsyncBytesIO from frozendict import frozendict from os import SEEK_SET from fixtures import suppress_deprecation_warnings with open('examples/video.gsf', 'rb') as f: VIDEO_DATA = f.read() with open('examples/coded_video.gsf', 'rb') as f: CODED_VIDEO_DATA = f.read() with open('examples/audio.gsf', 'rb') as f: AUDIO_DATA = f.read() with open('examples/coded_audio.gsf', 'rb') as f: CODED_AUDIO_DATA = f.read() with open('examples/event.gsf', 'rb') as f: EVENT_DATA = f.read() with open('examples/interleaved.gsf', 'rb') as f: INTERLEAVED_DATA = f.read() class TestGSFDumps(TestCase): def test_dumps_no_grains(self): uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([], tags=[('potato', 'harvest')], segment_tags=[('upside', 'down')])) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(head['tags'], [('potato', 'harvest')]) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 0) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertNotIn(head['segments'][0]['id'], [head['id']]) self.assertIn('tags', head['segments'][0]) self.assertEqual(head['segments'][0]['tags'], [('upside', 'down')]) if len(segments) > 0: self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), 0) async def test_async_encode_no_grains(self): uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): f = BytesIO() async with GSFEncoder(f, tags=[('potato', 'harvest')], segments=[{'tags': [('upside', 'down')]}]): pass (head, segments) = loads(f.getvalue()) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(head['tags'], [('potato', 'harvest')]) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 0) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertNotIn(head['segments'][0]['id'], [head['id']]) self.assertIn('tags', head['segments'][0]) self.assertEqual(head['segments'][0]['tags'], [('upside', 'down')]) if len(segments) > 0: self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), 0) def test_dumps_videograin(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) for i in range(0, len(grain.data)): grain.data[i] = i & 0xFF grain.source_aspect_ratio = Fraction(16, 9) grain.pixel_aspect_ratio = 1 uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([grain])) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 1) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'video') self.assertEqual(segments[1][0].format, CogFrameFormat.S16_422_10BIT) self.assertEqual(segments[1][0].width, 1920) self.assertEqual(segments[1][0].height, 1080) self.assertEqual(segments[1][0].source_aspect_ratio, Fraction(16, 9)) self.assertEqual(segments[1][0].pixel_aspect_ratio, Fraction(1, 1)) self.assertEqual(segments[1][0].data, grain.data) async def test_async_encode_videograin(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) for i in range(0, len(grain.data)): grain.data[i] = i & 0xFF grain.source_aspect_ratio = Fraction(16, 9) grain.pixel_aspect_ratio = 1 uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): f = BytesIO() async with GSFEncoder(f) as enc: await enc.add_grain(grain) (head, segments) = loads(f.getvalue()) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 1) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'video') self.assertEqual(segments[1][0].format, CogFrameFormat.S16_422_10BIT) self.assertEqual(segments[1][0].width, 1920) self.assertEqual(segments[1][0].height, 1080) self.assertEqual(segments[1][0].source_aspect_ratio, Fraction(16, 9)) self.assertEqual(segments[1][0].pixel_aspect_ratio, Fraction(1, 1)) self.assertEqual(segments[1][0].data, grain.data) def test_dumps_videograins(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) grain1 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) for i in range(0, len(grain0.data)): grain0.data[i] = i & 0xFF for i in range(0, len(grain1.data)): grain1.data[i] = 0xFF - (i & 0xFF) grain0.source_aspect_ratio = Fraction(16, 9) grain0.pixel_aspect_ratio = 1 uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([grain0, grain1])) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'video') self.assertEqual(segments[1][0].format, CogFrameFormat.S16_422_10BIT) self.assertEqual(segments[1][0].width, 1920) self.assertEqual(segments[1][0].height, 1080) self.assertEqual(segments[1][0].source_aspect_ratio, Fraction(16, 9)) self.assertEqual(segments[1][0].pixel_aspect_ratio, Fraction(1, 1)) self.assertEqual(segments[1][0].data, grain0.data) self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'video') self.assertEqual(segments[1][1].format, CogFrameFormat.S16_422_10BIT) self.assertEqual(segments[1][1].width, 1920) self.assertEqual(segments[1][1].height, 1080) self.assertIsNone(segments[1][1].source_aspect_ratio) self.assertIsNone(segments[1][1].pixel_aspect_ratio) self.assertEqual(segments[1][1].data, grain1.data) async def test_async_encode_videograins(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) grain1 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) for i in range(0, len(grain0.data)): grain0.data[i] = i & 0xFF for i in range(0, len(grain1.data)): grain1.data[i] = 0xFF - (i & 0xFF) grain0.source_aspect_ratio = Fraction(16, 9) grain0.pixel_aspect_ratio = 1 uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): f = BytesIO() async with GSFEncoder(f) as enc: await enc.add_grains([grain0, grain1]) (head, segments) = loads(f.getvalue()) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'video') self.assertEqual(segments[1][0].format, CogFrameFormat.S16_422_10BIT) self.assertEqual(segments[1][0].width, 1920) self.assertEqual(segments[1][0].height, 1080) self.assertEqual(segments[1][0].source_aspect_ratio, Fraction(16, 9)) self.assertEqual(segments[1][0].pixel_aspect_ratio, Fraction(1, 1)) self.assertEqual(segments[1][0].data, grain0.data) self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'video') self.assertEqual(segments[1][1].format, CogFrameFormat.S16_422_10BIT) self.assertEqual(segments[1][1].width, 1920) self.assertEqual(segments[1][1].height, 1080) self.assertIsNone(segments[1][1].source_aspect_ratio) self.assertIsNone(segments[1][1].pixel_aspect_ratio) self.assertEqual(segments[1][1].data, grain1.data) def test_dumps_audiograins(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = AudioGrain(src_id, flow_id, cog_audio_format=CogAudioFormat.S16_PLANES, samples=1920, sample_rate=48000) grain1 = AudioGrain(src_id, flow_id, cog_audio_format=CogAudioFormat.S16_PLANES, samples=1920, sample_rate=48000) for i in range(0, len(grain0.data)): grain0.data[i] = i & 0xFF for i in range(0, len(grain1.data)): grain1.data[i] = 0xFF - (i & 0xFF) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([grain0, grain1])) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'audio') self.assertEqual(segments[1][0].format, CogAudioFormat.S16_PLANES) self.assertEqual(segments[1][0].samples, 1920) self.assertEqual(segments[1][0].sample_rate, 48000) self.assertEqual(segments[1][0].data, grain0.data) self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'audio') self.assertEqual(segments[1][1].format, CogAudioFormat.S16_PLANES) self.assertEqual(segments[1][1].samples, 1920) self.assertEqual(segments[1][1].sample_rate, 48000) self.assertEqual(segments[1][1].data, grain1.data) async def test_async_encode_audiograins(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = AudioGrain(src_id, flow_id, cog_audio_format=CogAudioFormat.S16_PLANES, samples=1920, sample_rate=48000) grain1 = AudioGrain(src_id, flow_id, cog_audio_format=CogAudioFormat.S16_PLANES, samples=1920, sample_rate=48000) for i in range(0, len(grain0.data)): grain0.data[i] = i & 0xFF for i in range(0, len(grain1.data)): grain1.data[i] = 0xFF - (i & 0xFF) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): f = BytesIO() async with GSFEncoder(f) as enc: await enc.add_grains([grain0, grain1]) (head, segments) = loads(f.getvalue()) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'audio') self.assertEqual(segments[1][0].format, CogAudioFormat.S16_PLANES) self.assertEqual(segments[1][0].samples, 1920) self.assertEqual(segments[1][0].sample_rate, 48000) self.assertEqual(segments[1][0].data, grain0.data) self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'audio') self.assertEqual(segments[1][1].format, CogAudioFormat.S16_PLANES) self.assertEqual(segments[1][1].samples, 1920) self.assertEqual(segments[1][1].sample_rate, 48000) self.assertEqual(segments[1][1].data, grain1.data) def test_dumps_codedvideograins(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = CodedVideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.VC2, origin_width=1920, origin_height=1080, coded_width=1920, coded_height=1088, is_key_frame=True, temporal_offset=-23, length=1024, unit_offsets=[5, 15, 105]) grain1 = CodedVideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.VC2, origin_width=1920, origin_height=1080, coded_width=1920, coded_height=1088, temporal_offset=17, length=256) for i in range(0, len(grain0.data)): grain0.data[i] = i & 0xFF for i in range(0, len(grain1.data)): grain1.data[i] = 0xFF - (i & 0xFF) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([grain0, grain1])) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'coded_video') self.assertEqual(segments[1][0].format, CogFrameFormat.VC2) self.assertEqual(segments[1][0].origin_width, 1920) self.assertEqual(segments[1][0].origin_height, 1080) self.assertEqual(segments[1][0].coded_width, 1920) self.assertEqual(segments[1][0].coded_height, 1088) self.assertEqual(segments[1][0].temporal_offset, -23) self.assertEqual(segments[1][0].unit_offsets, [5, 15, 105]) self.assertTrue(segments[1][0].is_key_frame) self.assertEqual(segments[1][0].data, grain0.data) self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'coded_video') self.assertEqual(segments[1][1].format, CogFrameFormat.VC2) self.assertEqual(segments[1][1].origin_width, 1920) self.assertEqual(segments[1][1].origin_height, 1080) self.assertEqual(segments[1][1].coded_width, 1920) self.assertEqual(segments[1][1].coded_height, 1088) self.assertEqual(segments[1][1].temporal_offset, 17) self.assertEqual(segments[1][1].unit_offsets, []) self.assertFalse(segments[1][1].is_key_frame) self.assertEqual(segments[1][1].data, grain1.data) async def test_async_encode_codedvideograins(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = CodedVideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.VC2, origin_width=1920, origin_height=1080, coded_width=1920, coded_height=1088, is_key_frame=True, temporal_offset=-23, length=1024, unit_offsets=[5, 15, 105]) grain1 = CodedVideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.VC2, origin_width=1920, origin_height=1080, coded_width=1920, coded_height=1088, temporal_offset=17, length=256) for i in range(0, len(grain0.data)): grain0.data[i] = i & 0xFF for i in range(0, len(grain1.data)): grain1.data[i] = 0xFF - (i & 0xFF) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): f = BytesIO() async with GSFEncoder(f) as enc: await enc.add_grains([grain0, grain1]) (head, segments) = loads(f.getvalue()) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'coded_video') self.assertEqual(segments[1][0].format, CogFrameFormat.VC2) self.assertEqual(segments[1][0].origin_width, 1920) self.assertEqual(segments[1][0].origin_height, 1080) self.assertEqual(segments[1][0].coded_width, 1920) self.assertEqual(segments[1][0].coded_height, 1088) self.assertEqual(segments[1][0].temporal_offset, -23) self.assertEqual(segments[1][0].unit_offsets, [5, 15, 105]) self.assertTrue(segments[1][0].is_key_frame) self.assertEqual(segments[1][0].data, grain0.data) self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'coded_video') self.assertEqual(segments[1][1].format, CogFrameFormat.VC2) self.assertEqual(segments[1][1].origin_width, 1920) self.assertEqual(segments[1][1].origin_height, 1080) self.assertEqual(segments[1][1].coded_width, 1920) self.assertEqual(segments[1][1].coded_height, 1088) self.assertEqual(segments[1][1].temporal_offset, 17) self.assertEqual(segments[1][1].unit_offsets, []) self.assertFalse(segments[1][1].is_key_frame) self.assertEqual(segments[1][1].data, grain1.data) def test_dumps_codedaudiograins(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = CodedAudioGrain(src_id, flow_id, cog_audio_format=CogAudioFormat.AAC, samples=1920, sample_rate=48000, priming=23, remainder=17, length=1024) grain1 = CodedAudioGrain(src_id, flow_id, cog_audio_format=CogAudioFormat.AAC, samples=1920, sample_rate=48000, priming=5, remainder=104, length=1500) for i in range(0, len(grain0.data)): grain0.data[i] = i & 0xFF for i in range(0, len(grain1.data)): grain1.data[i] = 0xFF - (i & 0xFF) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([grain0, grain1])) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'coded_audio') self.assertEqual(segments[1][0].format, CogAudioFormat.AAC) self.assertEqual(segments[1][0].samples, 1920) self.assertEqual(segments[1][0].sample_rate, 48000) self.assertEqual(segments[1][0].priming, 23) self.assertEqual(segments[1][0].remainder, 17) self.assertEqual(segments[1][0].data, grain0.data) self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'coded_audio') self.assertEqual(segments[1][1].format, CogAudioFormat.AAC) self.assertEqual(segments[1][1].samples, 1920) self.assertEqual(segments[1][1].sample_rate, 48000) self.assertEqual(segments[1][1].priming, 5) self.assertEqual(segments[1][1].remainder, 104) self.assertEqual(segments[1][1].data, grain1.data) async def test_async_encode_codedaudiograins(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = CodedAudioGrain(src_id, flow_id, cog_audio_format=CogAudioFormat.AAC, samples=1920, sample_rate=48000, priming=23, remainder=17, length=1024) grain1 = CodedAudioGrain(src_id, flow_id, cog_audio_format=CogAudioFormat.AAC, samples=1920, sample_rate=48000, priming=5, remainder=104, length=1500) for i in range(0, len(grain0.data)): grain0.data[i] = i & 0xFF for i in range(0, len(grain1.data)): grain1.data[i] = 0xFF - (i & 0xFF) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): f = BytesIO() async with GSFEncoder(f) as enc: await enc.add_grains([grain0, grain1]) (head, segments) = loads(f.getvalue()) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'coded_audio') self.assertEqual(segments[1][0].format, CogAudioFormat.AAC) self.assertEqual(segments[1][0].samples, 1920) self.assertEqual(segments[1][0].sample_rate, 48000) self.assertEqual(segments[1][0].priming, 23) self.assertEqual(segments[1][0].remainder, 17) self.assertEqual(segments[1][0].data, grain0.data) self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'coded_audio') self.assertEqual(segments[1][1].format, CogAudioFormat.AAC) self.assertEqual(segments[1][1].samples, 1920) self.assertEqual(segments[1][1].sample_rate, 48000) self.assertEqual(segments[1][1].priming, 5) self.assertEqual(segments[1][1].remainder, 104) self.assertEqual(segments[1][1].data, grain1.data) def test_dumps_eventgrains(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = EventGrain(src_id, flow_id) grain0.event_type = "urn:x-testing:stupid/type" grain0.topic = "/watashi" grain0.append("/inu", post="desu") grain1 = EventGrain(src_id, flow_id) grain1.event_type = "urn:x-testing:clever/type" grain1.topic = "/inu" grain1.append("/sukimono", pre="da") uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([grain0, grain1])) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'event') self.assertEqual(segments[1][0].event_type, "urn:x-testing:stupid/type") self.assertEqual(segments[1][0].topic, "/watashi") self.assertEqual(len(segments[1][0].event_data), 1) self.assertEqual(segments[1][0].event_data[0].path, "/inu") self.assertIsNone(segments[1][0].event_data[0].pre) self.assertEqual(segments[1][0].event_data[0].post, "desu") self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'event') self.assertEqual(segments[1][1].event_type, "urn:x-testing:clever/type") self.assertEqual(segments[1][1].topic, "/inu") self.assertEqual(len(segments[1][1].event_data), 1) self.assertEqual(segments[1][1].event_data[0].path, "/sukimono") self.assertEqual(segments[1][1].event_data[0].pre, "da") self.assertIsNone(segments[1][1].event_data[0].post) async def test_async_encode_eventgrains(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = EventGrain(src_id, flow_id) grain0.event_type = "urn:x-testing:stupid/type" grain0.topic = "/watashi" grain0.append("/inu", post="desu") grain1 = EventGrain(src_id, flow_id) grain1.event_type = "urn:x-testing:clever/type" grain1.topic = "/inu" grain1.append("/sukimono", pre="da") uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): f = BytesIO() async with GSFEncoder(f) as enc: await enc.add_grains([grain0, grain1]) (head, segments) = loads(f.getvalue()) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'event') self.assertEqual(segments[1][0].event_type, "urn:x-testing:stupid/type") self.assertEqual(segments[1][0].topic, "/watashi") self.assertEqual(len(segments[1][0].event_data), 1) self.assertEqual(segments[1][0].event_data[0].path, "/inu") self.assertIsNone(segments[1][0].event_data[0].pre) self.assertEqual(segments[1][0].event_data[0].post, "desu") self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'event') self.assertEqual(segments[1][1].event_type, "urn:x-testing:clever/type") self.assertEqual(segments[1][1].topic, "/inu") self.assertEqual(len(segments[1][1].event_data), 1) self.assertEqual(segments[1][1].event_data[0].path, "/sukimono") self.assertEqual(segments[1][1].event_data[0].pre, "da") self.assertIsNone(segments[1][1].event_data[0].post) def test_dumps_emptygrains(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = Grain(src_id, flow_id) grain0.timelabels = [{ 'tag': 'tiggle', 'timelabel': { 'frames_since_midnight': 7, 'frame_rate_numerator': 300, 'frame_rate_denominator': 1, 'drop_frame': False } }] grain1 = Grain(src_id, flow_id) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([grain0, grain1])) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'empty') self.assertEqual(segments[1][0].timelabels, [{ 'tag': 'tiggle', 'timelabel': { 'frames_since_midnight': 7, 'frame_rate_numerator': 300, 'frame_rate_denominator': 1, 'drop_frame': False } }]) self.assertIsNone(segments[1][0].data) self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'empty') self.assertIsNone(segments[1][1].data) async def test_async_encode_emptygrains(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = Grain(src_id, flow_id) grain0.timelabels = [{ 'tag': 'tiggle', 'timelabel': { 'frames_since_midnight': 7, 'frame_rate_numerator': 300, 'frame_rate_denominator': 1, 'drop_frame': False } }] grain1 = Grain(src_id, flow_id) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): f = BytesIO() async with GSFEncoder(f) as enc: await enc.add_grains([grain0, grain1]) (head, segments) = loads(f.getvalue()) self.assertIn('id', head) self.assertIn(head['id'], uuids) self.assertIn('tags', head) self.assertEqual(len(head['tags']), 0) self.assertIn('created', head) self.assertEqual(head['created'], created) self.assertIn('segments', head) self.assertEqual(len(head['segments']), 1) self.assertIn('count', head['segments'][0]) self.assertEqual(head['segments'][0]['count'], 2) self.assertIn('local_id', head['segments'][0]) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertIn('id', head['segments'][0]) self.assertIn(head['segments'][0]['id'], uuids) self.assertIn('tags', head['segments'][0]) self.assertEqual(len(head['segments'][0]['tags']), 0) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), head['segments'][0]['count']) self.assertEqual(segments[1][0].source_id, src_id) self.assertEqual(segments[1][0].flow_id, flow_id) self.assertEqual(segments[1][0].grain_type, 'empty') self.assertEqual(segments[1][0].timelabels, [{ 'tag': 'tiggle', 'timelabel': { 'frames_since_midnight': 7, 'frame_rate_numerator': 300, 'frame_rate_denominator': 1, 'drop_frame': False } }]) self.assertIsNone(segments[1][0].data) self.assertEqual(segments[1][1].source_id, src_id) self.assertEqual(segments[1][1].flow_id, flow_id) self.assertEqual(segments[1][1].grain_type, 'empty') self.assertIsNone(segments[1][1].data) def test_dumps_invalidgrains(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain = Grain(src_id, flow_id) grain.grain_type = "invalid" uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with self.assertRaises(GSFEncodeError): with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([grain])) async def test_async_encode_invalidgrains(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain = Grain(src_id, flow_id) grain.grain_type = "invalid" uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with self.assertRaises(GSFEncodeError): with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): async with GSFEncoder(BytesIO()) as enc: await enc.add_grains([grain]) @suppress_deprecation_warnings def test_dump_progressively__deprecated(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) grain1 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) file = BytesIO() with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): enc = GSFEncoder(file) enc.add_segment() self.assertEqual(len(file.getvalue()), 0) enc.start_dump() dump0 = file.getvalue() (head0, segments0) = loads(dump0) enc.add_grain(grain0) dump1 = file.getvalue() (head1, segments1) = loads(dump1) enc.add_grain(grain1, segment_local_id=1) dump2 = file.getvalue() (head2, segments2) = loads(dump2) enc.end_dump() dump3 = file.getvalue() (head3, segments3) = loads(dump3) self.assertEqual(head0['segments'][0]['count'], -1) self.assertEqual(head1['segments'][0]['count'], -1) self.assertEqual(head2['segments'][0]['count'], -1) self.assertEqual(head3['segments'][0]['count'], 2) if 1 in segments0: self.assertEqual(len(segments0[1]), 0) self.assertEqual(len(segments1[1]), 1) self.assertEqual(len(segments2[1]), 2) self.assertEqual(len(segments3[1]), 2) @suppress_deprecation_warnings def test_dump_progressively_with_segments__deprecated(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) grain1 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) file = BytesIO() with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): enc = GSFEncoder(file) seg = enc.add_segment() self.assertEqual(len(file.getvalue()), 0) enc.start_dump() dump0 = file.getvalue() (head0, segments0) = loads(dump0) seg.add_grain(grain0) dump1 = file.getvalue() (head1, segments1) = loads(dump1) seg.add_grain(grain1, segment_local_id=1) dump2 = file.getvalue() (head2, segments2) = loads(dump2) enc.end_dump() dump3 = file.getvalue() (head3, segments3) = loads(dump3) self.assertEqual(head0['segments'][0]['count'], -1) self.assertEqual(head1['segments'][0]['count'], -1) self.assertEqual(head2['segments'][0]['count'], -1) self.assertEqual(head3['segments'][0]['count'], 2) if 1 in segments0: self.assertEqual(len(segments0[1]), 0) self.assertEqual(len(segments1[1]), 1) self.assertEqual(len(segments2[1]), 2) self.assertEqual(len(segments3[1]), 2) def test_dump_progressively(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) grain1 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) file = BytesIO() with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): enc = GSFEncoder(file, streaming=True) enc.add_segment() self.assertEqual(len(file.getvalue()), 0) with enc: dump0 = file.getvalue() (head0, segments0) = loads(dump0) enc.add_grain(grain0) dump1 = file.getvalue() (head1, segments1) = loads(dump1) enc.add_grain(grain1, segment_local_id=1) dump2 = file.getvalue() (head2, segments2) = loads(dump2) dump3 = file.getvalue() (head3, segments3) = loads(dump3) self.assertEqual(head0['segments'][0]['count'], -1) self.assertEqual(head1['segments'][0]['count'], -1) self.assertEqual(head2['segments'][0]['count'], -1) self.assertEqual(head3['segments'][0]['count'], 2) if 1 in segments0: self.assertEqual(len(segments0[1]), 0) self.assertEqual(len(segments1[1]), 1) self.assertEqual(len(segments2[1]), 2) self.assertEqual(len(segments3[1]), 2) async def test_async_encode_progressively(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) grain1 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) file = BytesIO() with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): enc = GSFEncoder(file, streaming=True, segments=[{}]) self.assertEqual(len(file.getvalue()), 0) async with enc as enc: dump0 = file.getvalue() (head0, segments0) = loads(dump0) await enc.add_grain(grain0) dump1 = file.getvalue() (head1, segments1) = loads(dump1) await enc.add_grain(grain1, segment_local_id=1) dump2 = file.getvalue() (head2, segments2) = loads(dump2) dump3 = file.getvalue() (head3, segments3) = loads(dump3) self.assertEqual(head0['segments'][0]['count'], -1) self.assertEqual(head1['segments'][0]['count'], -1) self.assertEqual(head2['segments'][0]['count'], -1) self.assertEqual(head3['segments'][0]['count'], 2) if 1 in segments0: self.assertEqual(len(segments0[1]), 0) self.assertEqual(len(segments1[1]), 1) self.assertEqual(len(segments2[1]), 2) self.assertEqual(len(segments3[1]), 2) @suppress_deprecation_warnings def test_end_dump_without_start_does_nothing(self): uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) file = BytesIO() with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): enc = GSFEncoder(file) enc.add_segment() dump0 = file.getvalue() enc.end_dump() dump1 = file.getvalue() self.assertEqual(dump0, dump1) def test_dumps_fails_with_invalid_tags(self): uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with self.assertRaises(GSFEncodeError): with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([], tags=[None, None])) def test_dumps_can_set_tags(self): uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): (head, segments) = loads(dumps([], tags=[('potato', 'harvest')], segment_tags=[('rainbow', 'dash')])) self.assertEqual(len(head['tags']), 1) self.assertIn(('potato', 'harvest'), head['tags']) self.assertEqual(len(head['segments'][0]['tags']), 1) self.assertIn(('rainbow', 'dash'), head['segments'][0]['tags']) def test_encoder_access_methods(self): uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): enc = GSFEncoder([], tags=[('potato', 'harvest')]) enc.add_segment(tags=[('rainbow', 'dash')]) self.assertEqual(enc.tags, (('potato', 'harvest'),)) self.assertIsInstance(enc.segments, frozendict) self.assertEqual(enc.segments[1].tags, (('rainbow', 'dash'),)) @suppress_deprecation_warnings def test_encoder_raises_when_adding_to_active_encode__deprecated(self): uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) file = BytesIO() with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): enc = GSFEncoder(file, tags=[('potato', 'harvest')]) seg = enc.add_segment(tags=[('rainbow', 'dash')]) with self.assertRaises(GSFEncodeError): enc.add_segment(local_id=1) with self.assertRaises(GSFEncodeError): enc.add_segment(tags=[None]) enc.start_dump() with self.assertRaises(GSFEncodeAddToActiveDump): enc.add_tag('upside', 'down') with self.assertRaises(GSFEncodeAddToActiveDump): enc.add_segment() with self.assertRaises(GSFEncodeAddToActiveDump): seg.add_tag('upside', 'down') def test_encoder_raises_when_adding_to_active_encode(self): uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) file = BytesIO() with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): enc = GSFEncoder(file, tags=[('potato', 'harvest')], streaming=True) seg = enc.add_segment(tags=[('rainbow', 'dash')]) with self.assertRaises(GSFEncodeError): enc.add_segment(local_id=1) with self.assertRaises(GSFEncodeError): enc.add_segment(tags=[None]) with enc: with self.assertRaises(GSFEncodeAddToActiveDump): enc.add_tag('upside', 'down') with self.assertRaises(GSFEncodeAddToActiveDump): enc.add_segment() with self.assertRaises(GSFEncodeAddToActiveDump): seg.add_tag('upside', 'down') def test_encoder_can_add_grains_to_nonexistent_segment(self): src_id = UUID('e14e9d58-1567-11e8-8dd3-831a068eb034') flow_id = UUID('ee1eed58-1567-11e8-a971-3b901a2dd8ab') grain0 = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080) uuids = [UUID('7920b394-1565-11e8-86e0-8b42d4647ba8'), UUID('80af875c-1565-11e8-8f44-87ef081b48cd')] created = datetime(1983, 3, 29, 15, 15) file = BytesIO() with mock.patch('mediagrains.gsf.datetime', side_effect=datetime, now=mock.MagicMock(return_value=created)): with mock.patch('mediagrains.gsf.uuid1', side_effect=uuids): enc = GSFEncoder(file, tags=[('potato', 'harvest')]) enc.add_grain(grain0, segment_local_id=2) self.assertEqual(enc.segments[2]._grains[0], grain0) class TestGSFBlock(TestCase): """Test the GSF decoder block handler correctly parses various types""" async def test_read_uint(self): test_number = 4132 test_data = b"\x24\x10\x00\x00" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) self.assertEqual(test_number, await UUT.read_uint(4)) async def test_read_bool(self): test_data = b"\x00\x01\x02" # False, True (0x01 != 0), True (0x02 != 0) async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) self.assertFalse(await UUT.read_bool()) self.assertTrue(await UUT.read_bool()) self.assertTrue(await UUT.read_bool()) async def test_read_sint(self): test_number = -12856 test_data = b"\xC8\xCD\xFF" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) self.assertEqual(test_number, await UUT.read_sint(3)) async def test_read_string(self): """Test we can read a string, with Unicode characters""" test_string = u"Strings😁✔" test_data = b"Strings\xf0\x9f\x98\x81\xe2\x9c\x94" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) self.assertEqual(test_string, await UUT.read_string(14)) async def test_read_varstring(self): test_string = u"Strings😁✔" test_data = b"\x0e\x00Strings\xf0\x9f\x98\x81\xe2\x9c\x94" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) self.assertEqual(test_string, await UUT.read_varstring()) async def test_read_uuid(self): test_uuid = UUID("b06c65c8-51ac-4ad1-a839-2ef37107cc16") test_data = b"\xb0\x6c\x65\xc8\x51\xac\x4a\xd1\xa8\x39\x2e\xf3\x71\x07\xcc\x16" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) self.assertEqual(test_uuid, await UUT.read_uuid()) async def test_read_timestamp(self): test_timestamp = datetime(2018, 9, 8, 16, 0, 0) test_data = b"\xe2\x07\x09\x08\x10\x00\x00" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) self.assertEqual(test_timestamp, await UUT.read_timestamp()) async def test_read_ippts(self): test_timestamp = Timestamp(1536422400, 500) test_data = b"\x00\xf2\x93\x5b\x00\x00\xf4\x01\x00\x00" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) self.assertEqual(test_timestamp, await UUT.read_ippts()) async def test_read_rational(self): test_fraction = Fraction(4, 3) test_data = b"\x04\x00\x00\x00\x03\x00\x00\x00" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) self.assertEqual(test_fraction, await UUT.read_rational()) async def test_read_rational_zero_denominator(self): """Ensure the reader considers a Rational with zero denominator to be 0, not an error""" test_data = b"\x04\x00\x00\x00\x00\x00\x00\x00" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) self.assertEqual(Fraction(0), await UUT.read_rational()) async def test_read_uint_past_eof(self): """read_uint calls read() directly - test it raises EOFError correctly""" test_data = b"\x04\x00" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) with self.assertRaises(EOFError): await UUT.read_uint(4) async def test_read_string_past_eof(self): """read_string() calls read() directly - test it raises EOFError correctly""" test_data = b"Strin" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) with self.assertRaises(EOFError): await UUT.read_string(6) async def test_read_uuid_past_eof(self): """read_uuid() calls read() directly - test it raises EOFError correctly""" test_data = b"\xb0\x6c\x65\xc8\x51\xac\x4a\xd1\xa8\x39\x2e" async with AsyncBytesIO(test_data) as fp: UUT = AsyncGSFBlock(fp) with self.assertRaises(EOFError): await UUT.read_uuid() def _make_sample_stream(self, post_child_len=0): """Generate a stream of sample blocks for testing the context manager Structure looks like: blok (28 bytes + post_child_len) chil (12 bytes) chil (8 bytes) `post_child_len` additional bytes of data blok (8 bytes) :param post_child_len: Number of bytes to include after last child block - must be <256 :returns: AsyncBytesIO containing some blocks """ first_block_length = 28 + post_child_len test_stream = BytesIO() test_stream.write(b"blok") test_stream.write(bytes((first_block_length,))) test_stream.write(b"\x00\x00\x00") test_stream.write(b"chil\x0c\x00\x00\x00\x08\x09\x0a\x0b") test_stream.write(b"chil\x08\x00\x00\x00") for i in range(0, post_child_len): test_stream.write(b"\x42") test_stream.write(b"blk2\x08\x00\x00\x00") test_stream.seek(0, SEEK_SET) return AsyncBytesIO(test_stream.getvalue()) async def test_block_start_recorded(self): """Test that a AsyncGSFBlock records the block start point""" async with self._make_sample_stream() as test_stream: test_stream.seek(28, SEEK_SET) UUT = AsyncGSFBlock(test_stream) self.assertEqual(28, UUT.block_start) async def test_contextmanager_read_tag_size(self): """Test that the block tag and size are read when used in a context manager""" async with self._make_sample_stream() as test_stream: async with AsyncGSFBlock(test_stream) as UUT: self.assertEqual("blok", UUT.tag) self.assertEqual(28, UUT.size) async def test_contextmanager_skips_unwanted_blocks(self): """Test that AsyncGSFBlock() seeks over unwanted blocks""" async with self._make_sample_stream() as test_stream: async with AsyncGSFBlock(test_stream, want_tag="blk2") as UUT: self.assertEqual("blk2", UUT.tag) # blok is 28 bytes long, we should skip it, plus 8 bytes of blk2 self.assertEqual(28 + 8, test_stream.tell()) async def test_contextmanager_errors_unwanted_blocks(self): """Test that AsyncGSFBlock() raises GSFDecodeError when finding an unwanted block""" async with self._make_sample_stream() as test_stream: with self.assertRaises(GSFDecodeError): async with AsyncGSFBlock(test_stream, want_tag="chil", raise_on_wrong_tag=True): pass async def test_contextmanager_seeks_on_exit(self): """Test that the context manager seeks to the end of a block on exit""" async with self._make_sample_stream() as test_stream: async with AsyncGSFBlock(test_stream): pass # First block is 28 bytes long, so we should be zero-index position 28 afterwards self.assertEqual(28, test_stream.tell()) async def test_contextmanager_get_remaining(self): """Test the context manager gets the number of bytes left in the block correctly""" async with self._make_sample_stream() as test_stream: async with AsyncGSFBlock(test_stream) as UUT: await UUT.read_uint(4) # Use a read to skip ahead a bit self.assertEqual(28 - 8 - 4, UUT.get_remaining()) # Block was 28 bytes, 8 bytes header, 4 bytes read_uint async def test_contextmanager_has_child(self): """Test the context manager detects whether another child is present correctly""" async with self._make_sample_stream() as test_stream: async with AsyncGSFBlock(test_stream) as UUT: self.assertTrue(UUT.has_child_block()) await test_stream.read(12) self.assertTrue(UUT.has_child_block()) await test_stream.read(8) self.assertFalse(UUT.has_child_block()) async def test_contextmanager_has_child_strict_blocks(self): """Ensure that when strict mode is enabled, has_child errors on partial blocks""" async with self._make_sample_stream(post_child_len=4) as test_stream: async with AsyncGSFBlock(test_stream) as UUT: await test_stream.read(12 + 8) # Read to the end of the child blocks with self.assertRaises(GSFDecodeError): UUT.has_child_block(strict_blocks=True) async def test_contextmanager_has_child_no_strict_blocks(self): """Ensure that when strict mode is off, has_child doesn't error on partial blocks""" async with self._make_sample_stream(post_child_len=4) as test_stream: async with AsyncGSFBlock(test_stream) as UUT: await test_stream.read(12 + 8) # Read to the end of the child blocks self.assertFalse(UUT.has_child_block(strict_blocks=False)) async def test_contextmanager_child_blocks_generator(self): """Ensure the child blocks generator returns a block, and seeks afterwards""" async with self._make_sample_stream() as test_stream: async with AsyncGSFBlock(test_stream) as UUT: loop_count = 0 child_bytes_consumed = 0 async for block in UUT.child_blocks(): child_bytes_consumed += block.size loop_count += 1 # Did we get both child blocks? self.assertEqual(2, loop_count) # Did we seek on exit from each loop iteration self.assertEqual(child_bytes_consumed + UUT.block_start + 8, test_stream.tell()) class TestGSFDecoder(TestCase): """Tests for the GSFDecoder in its more object-oriented mode Note that very little testing of the decoded data happens here, that's handled by TestGSFLoads() """ def test_decode_headers(self): video_data_stream = BytesIO(VIDEO_DATA) with GSFDecoder(file_data=video_data_stream) as dec: head = dec.file_headers self.assertEqual(head['created'], datetime(2018, 2, 7, 10, 38, 22)) self.assertEqual(head['id'], UUID('163fd9b7-bef4-4d92-8488-31f3819be008')) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['id'], UUID('c6a3d3ff-74c0-446d-b59e-de1041f27e8a')) def test_generate_grains(self): """Test that the generator yields each grain""" video_data_stream = BytesIO(VIDEO_DATA) with GSFDecoder(file_data=video_data_stream) as dec: grain_count = 0 for (grain, local_id) in dec.grains(): self.assertIsInstance(grain, VIDEOGRAIN) self.assertEqual(grain.source_id, UUID('49578552-fb9e-4d3e-a197-3e3c437a895d')) self.assertEqual(grain.flow_id, UUID('6e55f251-f75a-4d56-b3af-edb8b7993c3c')) grain_count += 1 self.assertEqual(10, grain_count) # There are 10 grains in the file async def test_async_decode_headers(self): video_data_stream = AsyncBytesIO(VIDEO_DATA) async with GSFDecoder(file_data=video_data_stream) as dec: head = dec.file_headers self.assertEqual(head['created'], datetime(2018, 2, 7, 10, 38, 22)) self.assertEqual(head['id'], UUID('163fd9b7-bef4-4d92-8488-31f3819be008')) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['id'], UUID('c6a3d3ff-74c0-446d-b59e-de1041f27e8a')) async def test_async_generate_grains(self): """Test that the generator yields each grain""" video_data_stream = AsyncBytesIO(VIDEO_DATA) async with GSFDecoder(file_data=video_data_stream) as dec: grain_count = 0 async for (grain, local_id) in dec.grains(loading_mode=GrainDataLoadingMode.LOAD_IMMEDIATELY): self.assertIsInstance(grain, VIDEOGRAIN) self.assertEqual(grain.source_id, UUID('49578552-fb9e-4d3e-a197-3e3c437a895d')) self.assertEqual(grain.flow_id, UUID('6e55f251-f75a-4d56-b3af-edb8b7993c3c')) grain_count += 1 self.assertEqual(10, grain_count) # There are 10 grains in the file async def test_async_to_sync_generate_grains(self): """Test that the generator yields each grain when run snchronously from asynchronous code""" video_data_stream = BytesIO(VIDEO_DATA) with GSFDecoder(file_data=video_data_stream) as dec: grain_count = 0 for (grain, local_id) in dec.grains(loading_mode=GrainDataLoadingMode.LOAD_IMMEDIATELY): self.assertIsInstance(grain, VIDEOGRAIN) self.assertEqual(grain.source_id, UUID('49578552-fb9e-4d3e-a197-3e3c437a895d')) self.assertEqual(grain.flow_id, UUID('6e55f251-f75a-4d56-b3af-edb8b7993c3c')) grain_count += 1 self.assertEqual(10, grain_count) # There are 10 grains in the file async def test_async_generate_grains_load_lazily(self): """Test that the generator yields each grain""" video_data_stream = AsyncBytesIO(VIDEO_DATA) async with GSFDecoder(file_data=video_data_stream) as dec: grain_count = 0 async for (grain, local_id) in dec.grains(loading_mode=GrainDataLoadingMode.ALWAYS_DEFER_LOAD_IF_POSSIBLE): self.assertIsInstance(grain, VIDEOGRAIN) self.assertEqual(grain.source_id, UUID('49578552-fb9e-4d3e-a197-3e3c437a895d')) self.assertEqual(grain.flow_id, UUID('6e55f251-f75a-4d56-b3af-edb8b7993c3c')) self.assertIsNone(grain.data) await grain self.assertIsNotNone(grain.data) grain_count += 1 self.assertEqual(10, grain_count) # There are 10 grains in the file @suppress_deprecation_warnings def test_decode_headers__deprecated(self): video_data_stream = BytesIO(VIDEO_DATA) UUT = GSFDecoder(file_data=video_data_stream) head = UUT.decode_file_headers() self.assertEqual(head['created'], datetime(2018, 2, 7, 10, 38, 22)) self.assertEqual(head['id'], UUID('163fd9b7-bef4-4d92-8488-31f3819be008')) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['id'], UUID('c6a3d3ff-74c0-446d-b59e-de1041f27e8a')) @suppress_deprecation_warnings def test_generate_grains__deprecated(self): """Test that the generator yields each grain""" video_data_stream = BytesIO(VIDEO_DATA) UUT = GSFDecoder(file_data=video_data_stream) UUT.decode_file_headers() grain_count = 0 for (grain, local_id) in UUT.grains(): self.assertIsInstance(grain, VIDEOGRAIN) self.assertEqual(grain.source_id, UUID('49578552-fb9e-4d3e-a197-3e3c437a895d')) self.assertEqual(grain.flow_id, UUID('6e55f251-f75a-4d56-b3af-edb8b7993c3c')) grain_count += 1 self.assertEqual(10, grain_count) # There are 10 grains in the file async def test_async_comparison_of_lazy_loaded_grains(self): async with GSFDecoder(file_data=AsyncBytesIO(VIDEO_DATA)) as dec: grains = [grain async for (grain, local_id) in dec.grains(loading_mode=GrainDataLoadingMode.LOAD_IMMEDIATELY)] # Restart the decoder async with GSFDecoder(file_data=AsyncBytesIO(VIDEO_DATA)) as dec: # Annoyingly anext isn't a global in python 3.6 grain = (await dec.grains(loading_mode=GrainDataLoadingMode.ALWAYS_DEFER_LOAD_IF_POSSIBLE).__anext__())[0] await grain comp = compare_grain(grains[0], grain) self.assertTrue(comp, msg="{!r}".format(comp)) def test_comparison_of_lazy_loaded_grains(self): video_data_stream = BytesIO(VIDEO_DATA) with GSFDecoder(file_data=video_data_stream) as dec: grains = [grain for (grain, local_id) in dec.grains(loading_mode=GrainDataLoadingMode.LOAD_IMMEDIATELY)] # Restart the decoder video_data_stream.seek(0) with GSFDecoder(file_data=video_data_stream) as dec: comp = compare_grain(grains[0], next(dec.grains(loading_mode=GrainDataLoadingMode.ALWAYS_DEFER_LOAD_IF_POSSIBLE))[0]) self.assertTrue(comp, msg="{!s}".format(comp)) @suppress_deprecation_warnings def test_comparison_of_lazy_loaded_grains__deprecated(self): video_data_stream = BytesIO(VIDEO_DATA) UUT = GSFDecoder(file_data=video_data_stream) UUT.decode_file_headers() grains = [grain for (grain, local_id) in UUT.grains(loading_mode=GrainDataLoadingMode.LOAD_IMMEDIATELY)] # Restart the decoder video_data_stream.seek(0) UUT = GSFDecoder(file_data=video_data_stream) UUT.decode_file_headers() self.assertTrue(compare_grain(grains[0], next(UUT.grains(load_lazily=True))[0])) async def test_async_local_id_filtering(self): interleaved_data_stream = AsyncBytesIO(INTERLEAVED_DATA) async with GSFDecoder(file_data=interleaved_data_stream) as dec: local_ids = set() flow_ids = set() async for (grain, local_id) in dec.grains(): local_ids.add(local_id) flow_ids.add(grain.flow_id) self.assertEqual(local_ids, set([1, 2])) self.assertEqual(flow_ids, set([UUID('28e4e09e-3517-11e9-8da2-5065f34ed007'), UUID('2472f38e-3517-11e9-8da2-5065f34ed007')])) async with GSFDecoder(file_data=interleaved_data_stream) as dec: async for (grain, local_id) in dec.grains(local_ids=[1]): self.assertIsInstance(grain, AUDIOGRAIN) self.assertEqual(grain.source_id, UUID('1f8fd27e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(grain.flow_id, UUID('28e4e09e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(local_id, 1) async with GSFDecoder(file_data=interleaved_data_stream) as dec: async for (grain, local_id) in dec.grains(local_ids=[2]): self.assertIsInstance(grain, VIDEOGRAIN) self.assertEqual(grain.source_id, UUID('1f8fd27e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(grain.flow_id, UUID('2472f38e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(local_id, 2) def test_local_id_filtering(self): interleaved_data_stream = BytesIO(INTERLEAVED_DATA) with GSFDecoder(file_data=interleaved_data_stream) as dec: local_ids = set() flow_ids = set() for (grain, local_id) in dec.grains(): local_ids.add(local_id) flow_ids.add(grain.flow_id) self.assertEqual(local_ids, set([1, 2])) self.assertEqual(flow_ids, set([UUID('28e4e09e-3517-11e9-8da2-5065f34ed007'), UUID('2472f38e-3517-11e9-8da2-5065f34ed007')])) interleaved_data_stream.seek(0) with GSFDecoder(file_data=interleaved_data_stream) as dec: for (grain, local_id) in dec.grains(local_ids=[1]): self.assertIsInstance(grain, AUDIOGRAIN) self.assertEqual(grain.source_id, UUID('1f8fd27e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(grain.flow_id, UUID('28e4e09e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(local_id, 1) interleaved_data_stream.seek(0) with GSFDecoder(file_data=interleaved_data_stream) as dec: for (grain, local_id) in dec.grains(local_ids=[2]): self.assertIsInstance(grain, VIDEOGRAIN) self.assertEqual(grain.source_id, UUID('1f8fd27e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(grain.flow_id, UUID('2472f38e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(local_id, 2) @suppress_deprecation_warnings def test_local_id_filtering__deprecated(self): interleaved_data_stream = BytesIO(INTERLEAVED_DATA) UUT = GSFDecoder(file_data=interleaved_data_stream) UUT.decode_file_headers() local_ids = set() flow_ids = set() for (grain, local_id) in UUT.grains(): local_ids.add(local_id) flow_ids.add(grain.flow_id) self.assertEqual(local_ids, set([1, 2])) self.assertEqual(flow_ids, set([UUID('28e4e09e-3517-11e9-8da2-5065f34ed007'), UUID('2472f38e-3517-11e9-8da2-5065f34ed007')])) interleaved_data_stream.seek(0) UUT.decode_file_headers() for (grain, local_id) in UUT.grains(local_ids=[1]): self.assertIsInstance(grain, AUDIOGRAIN) self.assertEqual(grain.source_id, UUID('1f8fd27e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(grain.flow_id, UUID('28e4e09e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(local_id, 1) interleaved_data_stream.seek(0) UUT.decode_file_headers() for (grain, local_id) in UUT.grains(local_ids=[2]): self.assertIsInstance(grain, VIDEOGRAIN) self.assertEqual(grain.source_id, UUID('1f8fd27e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(grain.flow_id, UUID('2472f38e-3517-11e9-8da2-5065f34ed007')) self.assertEqual(local_id, 2) @suppress_deprecation_warnings def test_skip_grain_data__deprecated(self): """Test that the `skip_data` parameter causes grain data to be seeked over""" grain_size = 194612 # Parsed from examples/video.gsf hex dump grdt_block_size = 194408 # Parsed from examples/video.gsf hex dump grain_header_size = grain_size - grdt_block_size video_data_stream = BytesIO(VIDEO_DATA) UUT = GSFDecoder(file_data=video_data_stream) UUT.decode_file_headers() reader_mock = mock.MagicMock(side_effect=video_data_stream.read) with mock.patch.object(video_data_stream, "read", new=reader_mock): for (grain, local_id) in UUT.grains(skip_data=True): self.assertEqual(0, len(grain.data)) # Add up the bytes read for this grain, then reset the read counter bytes_read = 0 for args, _ in reader_mock.call_args_list: bytes_read += args[0] reader_mock.reset_mock() # No more than the number of bytes in the header should have been read # However some header bytes may be seeked over instead self.assertLessEqual(bytes_read, grain_header_size) def test_lazy_load_grain_data(self): """Test that the `load_lazily` parameter causes grain data to be seeked over, but then loaded invisibly when needed later""" grain_size = 194612 # Parsed from examples/video.gsf hex dump grdt_block_size = 194408 # Parsed from examples/video.gsf hex dump grain_header_size = grain_size - grdt_block_size grain_data_size = grdt_block_size - 8 video_data_stream = BytesIO(VIDEO_DATA) reader_mock = mock.MagicMock(side_effect=video_data_stream.read) with mock.patch.object(video_data_stream, "read", new=reader_mock): grains = [] with GSFDecoder(file_data=video_data_stream) as dec: reader_mock.reset_mock() for (grain, local_id) in dec.grains(loading_mode=GrainDataLoadingMode.ALWAYS_DEFER_LOAD_IF_POSSIBLE): grains.append(grain) # Add up the bytes read for this grain, then reset the read counter bytes_read = 0 for args, _ in reader_mock.call_args_list: bytes_read += args[0] reader_mock.reset_mock() # No more than the number of bytes in the header should have been read # However some header bytes may be seeked over instead self.assertGreater(bytes_read, 0) self.assertLessEqual(bytes_read, grain_header_size) for grain in grains: reader_mock.reset_mock() self.assertEqual(grain.length, grain_data_size) reader_mock.assert_not_called() x = grain.data[grain_data_size-1] # noqa: F841 bytes_read = 0 for (args, _) in reader_mock.call_args_list: bytes_read += args[0] self.assertEqual(bytes_read, grain_data_size) self.assertEqual(grain.length, grain_data_size) @suppress_deprecation_warnings def test_lazy_load_grain_data__deprecated(self): """Test that the `load_lazily` parameter causes grain data to be seeked over, but then loaded invisibly when needed later""" grain_size = 194612 # Parsed from examples/video.gsf hex dump grdt_block_size = 194408 # Parsed from examples/video.gsf hex dump grain_header_size = grain_size - grdt_block_size grain_data_size = grdt_block_size - 8 video_data_stream = BytesIO(VIDEO_DATA) UUT = GSFDecoder(file_data=video_data_stream) UUT.decode_file_headers() grains = [] reader_mock = mock.MagicMock(side_effect=video_data_stream.read) with mock.patch.object(video_data_stream, "read", new=reader_mock): for (grain, local_id) in UUT.grains(load_lazily=True): grains.append(grain) # Add up the bytes read for this grain, then reset the read counter bytes_read = 0 for args, _ in reader_mock.call_args_list: bytes_read += args[0] reader_mock.reset_mock() # No more than the number of bytes in the header should have been read # However some header bytes may be seeked over instead self.assertGreater(bytes_read, 0) self.assertLessEqual(bytes_read, grain_header_size) for grain in grains: reader_mock.reset_mock() self.assertEqual(grain.length, grain_data_size) reader_mock.assert_not_called() x = grain.data[grain_data_size-1] # noqa: F841 bytes_read = 0 for (args, _) in reader_mock.call_args_list: bytes_read += args[0] self.assertEqual(bytes_read, grain_data_size) self.assertEqual(grain.length, grain_data_size) class TestGSFLoads(TestCase): def test_loads_video(self): (head, segments) = loads(VIDEO_DATA) self.assertEqual(head['created'], datetime(2018, 2, 7, 10, 38, 22)) self.assertEqual(head['id'], UUID('163fd9b7-bef4-4d92-8488-31f3819be008')) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['id'], UUID('c6a3d3ff-74c0-446d-b59e-de1041f27e8a')) self.assertIn(head['segments'][0]['local_id'], segments) self.assertEqual(len(segments[head['segments'][0]['local_id']]), head['segments'][0]['count']) ots = Timestamp(1420102800, 0) for grain in segments[head['segments'][0]['local_id']]: self.assertIsInstance(grain, VIDEOGRAIN) self.assertEqual(grain.grain_type, "video") self.assertEqual(grain.source_id, UUID('49578552-fb9e-4d3e-a197-3e3c437a895d')) self.assertEqual(grain.flow_id, UUID('6e55f251-f75a-4d56-b3af-edb8b7993c3c')) self.assertEqual(grain.origin_timestamp, ots) ots += TimeOffset.from_nanosec(20000000) self.assertEqual(grain.format, CogFrameFormat.U8_420) self.assertEqual(grain.layout, CogFrameLayout.FULL_FRAME) self.assertEqual(grain.width, 480) self.assertEqual(grain.height, 270) self.assertEqual(len(grain.components), 3) self.assertEqual(grain.components[0].width, 480) self.assertEqual(grain.components[0].height, 270) self.assertEqual(grain.components[0].stride, 480) self.assertEqual(grain.components[0].length, 480*270) self.assertEqual(grain.components[0].offset, 0) self.assertEqual(grain.components[1].width, 240) self.assertEqual(grain.components[1].height, 135) self.assertEqual(grain.components[1].stride, 240) self.assertEqual(grain.components[1].length, 240*135) self.assertEqual(grain.components[1].offset, 480*270) self.assertEqual(grain.components[2].width, 240) self.assertEqual(grain.components[2].height, 135) self.assertEqual(grain.components[2].stride, 240) self.assertEqual(grain.components[2].length, 240*135) self.assertEqual(grain.components[2].offset, 480*270 + 240*135) self.assertEqual(len(grain.data), grain.components[0].length + grain.components[1].length + grain.components[2].length) def test_load_video(self): file = BytesIO(VIDEO_DATA) (head, segments) = load(file) self.assertEqual(head['created'], datetime(2018, 2, 7, 10, 38, 22)) self.assertEqual(head['id'], UUID('163fd9b7-bef4-4d92-8488-31f3819be008')) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['id'], UUID('c6a3d3ff-74c0-446d-b59e-de1041f27e8a')) self.assertIn(head['segments'][0]['local_id'], segments) self.assertEqual(len(segments[head['segments'][0]['local_id']]), head['segments'][0]['count']) ots = Timestamp(1420102800, 0) for grain in segments[head['segments'][0]['local_id']]: self.assertIsInstance(grain, VIDEOGRAIN) self.assertEqual(grain.grain_type, "video") self.assertEqual(grain.source_id, UUID('49578552-fb9e-4d3e-a197-3e3c437a895d')) self.assertEqual(grain.flow_id, UUID('6e55f251-f75a-4d56-b3af-edb8b7993c3c')) self.assertEqual(grain.origin_timestamp, ots) ots += TimeOffset.from_nanosec(20000000) self.assertEqual(grain.format, CogFrameFormat.U8_420) self.assertEqual(grain.layout, CogFrameLayout.FULL_FRAME) self.assertEqual(grain.width, 480) self.assertEqual(grain.height, 270) self.assertEqual(len(grain.components), 3) self.assertEqual(grain.components[0].width, 480) self.assertEqual(grain.components[0].height, 270) self.assertEqual(grain.components[0].stride, 480) self.assertEqual(grain.components[0].length, 480*270) self.assertEqual(grain.components[1].width, 240) self.assertEqual(grain.components[1].height, 135) self.assertEqual(grain.components[1].stride, 240) self.assertEqual(grain.components[1].length, 240*135) self.assertEqual(grain.components[2].width, 240) self.assertEqual(grain.components[2].height, 135) self.assertEqual(grain.components[2].stride, 240) self.assertEqual(grain.components[2].length, 240*135) self.assertEqual(len(grain.data), grain.components[0].length + grain.components[1].length + grain.components[2].length) def test_load_uses_custom_grain_function(self): file = BytesIO(VIDEO_DATA) grain_parser = mock.MagicMock(name="grain_parser") (head, segments) = load(file, parse_grain=grain_parser) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), 10) self.assertEqual(grain_parser.call_count, 10) async def test_async_load_uses_custom_grain_function(self): file = AsyncBytesIO(VIDEO_DATA) grain_parser = mock.MagicMock(name="grain_parser") (head, segments) = await load(file, parse_grain=grain_parser) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), 10) self.assertEqual(grain_parser.call_count, 10) def test_loads_uses_custom_grain_function(self): s = VIDEO_DATA grain_parser = mock.MagicMock(name="grain_parser") (head, segments) = loads(s, parse_grain=grain_parser) self.assertEqual(len(segments), 1) self.assertIn(1, segments) self.assertEqual(len(segments[1]), 10) self.assertEqual(grain_parser.call_count, 10) def test_loads_audio(self): (head, segments) = loads(AUDIO_DATA) self.assertEqual(head['created'], datetime(2018, 2, 7, 10, 37, 50)) self.assertEqual(head['id'], UUID('781fb6c5-d22f-4df5-ba69-69059efd5ced')) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['id'], UUID('fc4c5533-3fad-4437-93c0-8668cb876578')) self.assertIn(head['segments'][0]['local_id'], segments) self.assertEqual(len(segments[head['segments'][0]['local_id']]), head['segments'][0]['count']) start_ots = Timestamp(1420102800, 0) ots = start_ots total_samples = 0 for grain in segments[head['segments'][0]['local_id']]: self.assertIsInstance(grain, AUDIOGRAIN) self.assertEqual(grain.grain_type, "audio") self.assertEqual(grain.source_id, UUID('38bfd902-b35f-40d6-9ecf-dc95869130cf')) self.assertEqual(grain.flow_id, UUID('f1c8c095-5739-46f4-9bbc-3d7050c9ba23')) self.assertEqual(grain.origin_timestamp, ots) self.assertEqual(grain.format, CogAudioFormat.S24_INTERLEAVED) self.assertEqual(grain.channels, 2) self.assertEqual(grain.samples, 1024) self.assertEqual(grain.sample_rate, 48000) self.assertEqual(len(grain.data), 6144) total_samples += grain.samples ots = start_ots + TimeOffset.from_count(total_samples, grain.sample_rate) def test_loads_coded_video(self): (head, segments) = loads(CODED_VIDEO_DATA) self.assertEqual(head['created'], datetime(2018, 2, 7, 10, 38, 41)) self.assertEqual(head['id'], UUID('8875f02c-2528-4566-9e9a-23efc3a9bbe5')) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['id'], UUID('bdfa1343-0a20-4a98-92f5-0f7f0eb75479')) self.assertIn(head['segments'][0]['local_id'], segments) self.assertEqual(len(segments[head['segments'][0]['local_id']]), head['segments'][0]['count']) ots = Timestamp(1420102800, 0) unit_offsets = [ ([0, 6, 34, 42, 711, 719], 36114), ([0, 6, 14], 380), ([0, 6, 14], 8277), ([0, 6, 14], 4914), ([0, 6, 14], 4961), ([0, 6, 14], 3777), ([0, 6, 14], 1950), ([0, 6, 14], 31), ([0, 6, 14], 25), ([0, 6, 14], 6241)] for grain in segments[head['segments'][0]['local_id']]: self.assertIsInstance(grain, CODEDVIDEOGRAIN) self.assertEqual(grain.grain_type, "coded_video") self.assertEqual(grain.source_id, UUID('49578552-fb9e-4d3e-a197-3e3c437a895d')) self.assertEqual(grain.flow_id, UUID('b6b05efb-6067-4ff8-afac-ec735a85674e')) self.assertEqual(grain.origin_timestamp, ots) ots += TimeOffset.from_nanosec(20000000) self.assertEqual(grain.format, CogFrameFormat.H264) self.assertEqual(grain.layout, CogFrameLayout.FULL_FRAME) self.assertEqual(grain.origin_width, 1920) self.assertEqual(grain.origin_height, 1080) self.assertEqual(grain.coded_width, 0) self.assertEqual(grain.coded_height, 0) self.assertEqual(grain.length, unit_offsets[0][1]) self.assertEqual(grain.temporal_offset, 0) self.assertEqual(grain.unit_offsets, unit_offsets[0][0]) unit_offsets.pop(0) def test_loads_rejects_incorrect_type_file(self): with self.assertRaises(GSFDecodeBadFileTypeError) as cm: loads(b"POTATO23\x07\x00\x00\x00") self.assertEqual(cm.exception.offset, 0) self.assertEqual(cm.exception.filetype, "POTATO23") def test_loads_rejects_incorrect_version_file(self): with self.assertRaises(GSFDecodeBadVersionError) as cm: loads(b"SSBBgrsg\x08\x00\x03\x00") self.assertEqual(cm.exception.offset, 0) self.assertEqual(cm.exception.major, 8) self.assertEqual(cm.exception.minor, 3) def test_loads_rejects_bad_head_tag(self): with self.assertRaises(GSFDecodeError) as cm: loads(b"SSBBgrsg\x07\x00\x00\x00" + b"\xff\xff\xff\xff\x00\x00\x00\x00") self.assertEqual(cm.exception.offset, 12) def test_loads_raises_exception_without_head(self): with self.assertRaises(GSFDecodeError) as cm: loads(b"SSBBgrsg\x07\x00\x00\x00") self.assertEqual(cm.exception.offset, 12) def test_loads_skips_unknown_block_before_head(self): (head, segments) = loads(b"SSBBgrsg\x07\x00\x00\x00" + b"dumy\x08\x00\x00\x00" + b"head\x1f\x00\x00\x00" + b"\xd1\x9c\x0b\x91\x15\x90\x11\xe8\x85\x80\xdc\xa9\x04\x82N\xec" + b"\xbf\x07\x03\x1d\x0f\x0f\x0f") self.assertEqual(head['id'], UUID('d19c0b91-1590-11e8-8580-dca904824eec')) self.assertEqual(head['created'], datetime(1983, 3, 29, 15, 15, 15)) self.assertEqual(head['segments'], []) self.assertEqual(head['tags'], []) def test_loads_skips_unknown_block_instead_of_segm(self): (head, segments) = loads(b"SSBBgrsg\x07\x00\x00\x00" + b"head\x27\x00\x00\x00" + b"\xd1\x9c\x0b\x91\x15\x90\x11\xe8\x85\x80\xdc\xa9\x04\x82N\xec" + b"\xbf\x07\x03\x1d\x0f\x0f\x0f" + b"dumy\x08\x00\x00\x00") self.assertEqual(head['id'], UUID('d19c0b91-1590-11e8-8580-dca904824eec')) self.assertEqual(head['created'], datetime(1983, 3, 29, 15, 15, 15)) self.assertEqual(head['segments'], []) self.assertEqual(head['tags'], []) def test_loads_skips_unknown_block_before_segm(self): (head, segments) = loads(b"SSBBgrsg\x07\x00\x00\x00" + (b"head\x49\x00\x00\x00" + b"\xd1\x9c\x0b\x91\x15\x90\x11\xe8\x85\x80\xdc\xa9\x04\x82N\xec" + b"\xbf\x07\x03\x1d\x0f\x0f\x0f" + (b"dumy\x08\x00\x00\x00") + (b"segm\x22\x00\x00\x00" + b"\x01\x00" + b"\xd3\xe1\x91\xf0\x15\x94\x11\xe8\x91\xac\xdc\xa9\x04\x82N\xec" + b"\x00\x00\x00\x00\x00\x00\x00\x00"))) self.assertEqual(head['id'], UUID('d19c0b91-1590-11e8-8580-dca904824eec')) self.assertEqual(head['created'], datetime(1983, 3, 29, 15, 15, 15)) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertEqual(head['segments'][0]['id'], UUID('d3e191f0-1594-11e8-91ac-dca904824eec')) self.assertEqual(head['segments'][0]['tags'], []) self.assertEqual(head['segments'][0]['count'], 0) self.assertEqual(head['tags'], []) def test_loads_raises_when_head_too_small(self): with self.assertRaises(GSFDecodeError) as cm: (head, segments) = loads(b"SSBBgrsg\x07\x00\x00\x00" + (b"head\x29\x00\x00\x00" + b"\xd1\x9c\x0b\x91\x15\x90\x11\xe8\x85\x80\xdc\xa9\x04\x82N\xec" + b"\xbf\x07\x03\x1d\x0f\x0f\x0f" + (b"dumy\x08\x00\x00\x00") + (b"segm\x22\x00\x00\x00" + b"\x01\x00" + b"\xd3\xe1\x91\xf0\x15\x94\x11\xe8\x91\xac\xdc\xa9\x04\x82N\xec" + b"\x00\x00\x00\x00\x00\x00\x00\x00"))) self.assertEqual(cm.exception.offset, 51) def test_loads_raises_when_segm_too_small(self): with self.assertRaises(GSFDecodeError) as cm: (head, segments) = loads(b"SSBBgrsg\x07\x00\x00\x00" + (b"head\x41\x00\x00\x00" + b"\xd1\x9c\x0b\x91\x15\x90\x11\xe8\x85\x80\xdc\xa9\x04\x82N\xec" + b"\xbf\x07\x03\x1d\x0f\x0f\x0f" + (b"segm\x21\x00\x00\x00" + b"\x01\x00" + b"\xd3\xe1\x91\xf0\x15\x94\x11\xe8\x91\xac\xdc\xa9\x04\x82N\xec" + b"\x00\x00\x00\x00\x00\x00\x00\x00"))) self.assertEqual(cm.exception.offset, 77) def test_loads_decodes_tils(self): src_id = UUID('c707d64c-1596-11e8-a3fb-dca904824eec') flow_id = UUID('da78668a-1596-11e8-a577-dca904824eec') (head, segments) = loads(b"SSBBgrsg\x07\x00\x00\x00" + (b"head\x41\x00\x00\x00" + b"\xd1\x9c\x0b\x91\x15\x90\x11\xe8\x85\x80\xdc\xa9\x04\x82N\xec" + b"\xbf\x07\x03\x1d\x0f\x0f\x0f" + (b"segm\x22\x00\x00\x00" + b"\x01\x00" + b"\xd3\xe1\x91\xf0\x15\x94\x11\xe8\x91\xac\xdc\xa9\x04\x82N\xec" + b"\x01\x00\x00\x00\x00\x00\x00\x00")) + (b"grai\x8d\x00\x00\x00" + b"\x01\x00" + (b"gbhd\x83\x00\x00\x00" + src_id.bytes + flow_id.bytes + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00" + (b"tils\x27\x00\x00\x00" + b"\x01\x00" + b"dummy timecode\x00\x00" + b"\x07\x00\x00\x00" + b"\x19\x00\x00\x00\x01\x00\x00\x00" + b"\x00"))) + (b"grai\x08\x00\x00\x00")) self.assertEqual(head['id'], UUID('d19c0b91-1590-11e8-8580-dca904824eec')) self.assertEqual(head['created'], datetime(1983, 3, 29, 15, 15, 15)) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertEqual(head['segments'][0]['id'], UUID('d3e191f0-1594-11e8-91ac-dca904824eec')) self.assertEqual(head['segments'][0]['tags'], []) self.assertEqual(head['segments'][0]['count'], 1) self.assertEqual(head['tags'], []) self.assertEqual(segments[1][0].timelabels, [{'tag': 'dummy timecode', 'timelabel': {'frames_since_midnight': 7, 'frame_rate_numerator': 25, 'frame_rate_denominator': 1, 'drop_frame': False}}]) async def test_async_load_decodes_tils(self): src_id = UUID('c707d64c-1596-11e8-a3fb-dca904824eec') flow_id = UUID('da78668a-1596-11e8-a577-dca904824eec') fp = AsyncBytesIO(b"SSBBgrsg\x07\x00\x00\x00" + (b"head\x41\x00\x00\x00" + b"\xd1\x9c\x0b\x91\x15\x90\x11\xe8\x85\x80\xdc\xa9\x04\x82N\xec" + b"\xbf\x07\x03\x1d\x0f\x0f\x0f" + (b"segm\x22\x00\x00\x00" + b"\x01\x00" + b"\xd3\xe1\x91\xf0\x15\x94\x11\xe8\x91\xac\xdc\xa9\x04\x82N\xec" + b"\x01\x00\x00\x00\x00\x00\x00\x00")) + (b"grai\x8d\x00\x00\x00" + b"\x01\x00" + (b"gbhd\x83\x00\x00\x00" + src_id.bytes + flow_id.bytes + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00" + (b"tils\x27\x00\x00\x00" + b"\x01\x00" + b"dummy timecode\x00\x00" + b"\x07\x00\x00\x00" + b"\x19\x00\x00\x00\x01\x00\x00\x00" + b"\x00"))) + (b"grai\x08\x00\x00\x00")) (head, segments) = await load(fp) self.assertEqual(head['id'], UUID('d19c0b91-1590-11e8-8580-dca904824eec')) self.assertEqual(head['created'], datetime(1983, 3, 29, 15, 15, 15)) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['local_id'], 1) self.assertEqual(head['segments'][0]['id'], UUID('d3e191f0-1594-11e8-91ac-dca904824eec')) self.assertEqual(head['segments'][0]['tags'], []) self.assertEqual(head['segments'][0]['count'], 1) self.assertEqual(head['tags'], []) self.assertEqual(segments[1][0].timelabels, [{'tag': 'dummy timecode', 'timelabel': {'frames_since_midnight': 7, 'frame_rate_numerator': 25, 'frame_rate_denominator': 1, 'drop_frame': False}}]) def test_loads_raises_when_grain_type_unknown(self): with self.assertRaises(GSFDecodeError) as cm: src_id = UUID('c707d64c-1596-11e8-a3fb-dca904824eec') flow_id = UUID('da78668a-1596-11e8-a577-dca904824eec') (head, segments) = loads(b"SSBBgrsg\x07\x00\x00\x00" + (b"head\x41\x00\x00\x00" + b"\xd1\x9c\x0b\x91\x15\x90\x11\xe8\x85\x80\xdc\xa9\x04\x82N\xec" + b"\xbf\x07\x03\x1d\x0f\x0f\x0f" + (b"segm\x22\x00\x00\x00" + b"\x01\x00" + b"\xd3\xe1\x91\xf0\x15\x94\x11\xe8\x91\xac\xdc\xa9\x04\x82N\xec" + b"\x01\x00\x00\x00\x00\x00\x00\x00")) + (b"grai\x8d\x00\x00\x00" + b"\x01\x00" + (b"gbhd\x83\x00\x00\x00" + src_id.bytes + flow_id.bytes + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00" + (b"dumy\x08\x00\x00\x00")))) self.assertEqual(cm.exception.offset, 179) def test_loads_decodes_empty_grains(self): src_id = UUID('c707d64c-1596-11e8-a3fb-dca904824eec') flow_id = UUID('da78668a-1596-11e8-a577-dca904824eec') (head, segments) = loads(b"SSBBgrsg\x07\x00\x00\x00" + (b"head\x41\x00\x00\x00" + b"\xd1\x9c\x0b\x91\x15\x90\x11\xe8\x85\x80\xdc\xa9\x04\x82N\xec" + b"\xbf\x07\x03\x1d\x0f\x0f\x0f" + (b"segm\x22\x00\x00\x00" + b"\x01\x00" + b"\xd3\xe1\x91\xf0\x15\x94\x11\xe8\x91\xac\xdc\xa9\x04\x82N\xec" + b"\x02\x00\x00\x00\x00\x00\x00\x00")) + (b"grai\x66\x00\x00\x00" + b"\x01\x00" + (b"gbhd\x5c\x00\x00\x00" + src_id.bytes + flow_id.bytes + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00")) + (b"dumy\x08\x00\x00\x00") + (b"grai\x6E\x00\x00\x00" + b"\x01\x00" + (b"gbhd\x5c\x00\x00\x00" + src_id.bytes + flow_id.bytes + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00") + (b"grdt\x08\x00\x00\x00")) + (b"dumy\x08\x00\x00\x00")) self.assertEqual(len(segments[1]), 2) self.assertEqual(segments[1][0].grain_type, "empty") self.assertIsNone(segments[1][0].data) self.assertEqual(segments[1][1].grain_type, "empty") self.assertIsNone(segments[1][1].data) def test_loads_coded_audio(self): (head, segments) = loads(CODED_AUDIO_DATA) self.assertEqual(head['created'], datetime(2018, 2, 7, 10, 38, 5)) self.assertEqual(head['id'], UUID('2dbc5889-15f1-427c-b727-5201dd3b053c')) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['id'], UUID('6ca3a217-f2c2-4344-832b-6ea87bc5ddb8')) self.assertIn(head['segments'][0]['local_id'], segments) self.assertEqual(len(segments[head['segments'][0]['local_id']]), head['segments'][0]['count']) start_ots = Timestamp(1420102800, 0) ots = start_ots total_samples = 0 lengths = [603, 690, 690, 689, 690, 690, 689, 690, 690, 689] for grain in segments[head['segments'][0]['local_id']]: self.assertIsInstance(grain, CODEDAUDIOGRAIN) self.assertEqual(grain.grain_type, "coded_audio") self.assertEqual(grain.source_id, UUID('38bfd902-b35f-40d6-9ecf-dc95869130cf')) self.assertEqual(grain.flow_id, UUID('e615296b-ff40-4d95-8398-6a4082305f3a')) self.assertEqual(grain.origin_timestamp, ots) self.assertEqual(grain.format, CogAudioFormat.AAC) self.assertEqual(grain.channels, 2) self.assertEqual(grain.samples, 1024) self.assertEqual(grain.priming, 0) self.assertEqual(grain.remainder, 0) self.assertEqual(grain.sample_rate, 48000) self.assertEqual(len(grain.data), lengths[0]) lengths.pop(0) total_samples += grain.samples ots = start_ots + TimeOffset.from_count(total_samples, grain.sample_rate) def test_loads_event(self): self.maxDiff = None (head, segments) = loads(EVENT_DATA) self.assertEqual(head['created'], datetime(2018, 2, 7, 10, 37, 35)) self.assertEqual(head['id'], UUID('3c45f8b5-1853-4723-808a-ab5cbf598ccc')) self.assertEqual(len(head['segments']), 1) self.assertEqual(head['segments'][0]['id'], UUID('db095cb5-050b-4b8c-92e8-31351422e93a')) self.assertIn(head['segments'][0]['local_id'], segments) self.assertEqual(len(segments[head['segments'][0]['local_id']]), head['segments'][0]['count']) start_ots = Timestamp(1447176512, 400000000) ots = start_ots line = '' seqnum = 3107787894242499264 for grain in segments[head['segments'][0]['local_id']]: self.assertIsInstance(grain, EVENTGRAIN) self.assertEqual(grain.grain_type, "event") self.assertEqual(grain.source_id, UUID('2db4268e-82ef-49f9-bc0f-1726e8352d76')) self.assertEqual(grain.flow_id, UUID('5333bae9-0768-4e31-be1c-fbd5dc2e34ac')) self.assertEqual(grain.origin_timestamp, ots) self.assertEqual(grain.event_type, 'urn:x-ipstudio:format:event.ttext.ebuttlive') self.assertEqual(grain.topic, '') self.assertEqual(len(grain.event_data), 1) self.assertEqual(grain.event_data[0].path, 'Subs') self.assertEqual(grain.event_data[0].pre, line) line = '<?xml version="1.0" encoding="UTF-8"?>\n<tt:tt ttp:timeBase="clock" ttp:clockMode="utc" xml:lang="en" xmlns:tt="http://www.w3.org/ns/ttml" xmlns:ebuttExt="urn:ebu:tt:extension" xmlns:ttp="http://www.w3.org/ns/ttml#parameter" xmlns:tts="http://www.w3.org/ns/ttml#styling" ttp:cellResolution="50 30" xmlns:ebuttm="urn:ebu:tt:metadata" tts:extent="1920px 1080px" ttp:dropMode="nonDrop" ttp:markerMode="discontinuous" ebuttm:sequenceIdentifier="5333bae9-0768-4e31-be1c-fbd5dc2e34ac" ebuttm:sequenceNumber="' + str(seqnum) + '"><tt:head><tt:metadata><ebuttm:documentMetadata><ebuttm:documentEbuttVersion>v1.0</ebuttm:documentEbuttVersion><ebuttm:documentTotalNumberOfSubtitles>1</ebuttm:documentTotalNumberOfSubtitles><ebuttm:documentMaximumNumberOfDisplayableCharacterInAnyRow>40</ebuttm:documentMaximumNumberOfDisplayableCharacterInAnyRow><ebuttm:documentCountryOfOrigin>gb</ebuttm:documentCountryOfOrigin></ebuttm:documentMetadata></tt:metadata><tt:styling><tt:style xml:id="defaultStyle" tts:fontFamily="monospaceSansSerif" tts:fontSize="1c 1c" tts:lineHeight="normal" tts:textAlign="center" tts:color="white" tts:backgroundColor="transparent" tts:fontStyle="normal" tts:fontWeight="normal" tts:textDecoration="none" /><tt:style xml:id="WhiteOnBlack" tts:color="white" tts:backgroundColor="black" tts:fontSize="1c 2c"/><tt:style xml:id="textCenter" tts:textAlign="center"/></tt:styling><tt:layout><tt:region xml:id="bottom" tts:origin="10% 10%" tts:extent="80% 80%" tts:padding="0c" tts:displayAlign="after" tts:writingMode="lrtb"/></tt:layout></tt:head><tt:body dur="00:00:10"><tt:div style="defaultStyle"><tt:p xml:id="sub2" style="textCenter" region="bottom"><tt:span style="WhiteOnBlack">' + ots.to_iso8601_utc() + '</tt:span></tt:p></tt:div></tt:body></tt:tt>' # NOQA self.assertEqual(grain.event_data[0].post, line, msg="\n\nExpected:\n\n{!r}\n\nGot:\n\n{!r}\n\n".format(line, grain.event_data[0].post)) ots = ots + TimeOffset.from_nanosec(20000000) seqnum += 20000000
49.881497
1,794
0.611262
14,681
119,965
4.869627
0.059601
0.131345
0.039907
0.037096
0.88737
0.864305
0.850065
0.834539
0.830594
0.822929
0
0.098011
0.249865
119,965
2,404
1,795
49.902246
0.696377
0.025624
0
0.8111
0
0.013747
0.164749
0.110055
0
0
0.000901
0
0.455703
1
0.025967
false
0.001527
0.010183
0
0.038697
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
9
1d5e9f6612705f0d8efcb244956759377c128a5f
33,082
py
Python
mnisiscom/brain_cmaps.py
jeremymoreau/mnisiscom
52cc9bd1bdb5d54f1a61b6c1fdde4bc85e7652b2
[ "Apache-2.0", "CC-BY-4.0" ]
2
2021-08-23T20:19:48.000Z
2022-01-10T18:27:32.000Z
mnisiscom/brain_cmaps.py
jeremymoreau/mnisiscom
52cc9bd1bdb5d54f1a61b6c1fdde4bc85e7652b2
[ "Apache-2.0", "CC-BY-4.0" ]
4
2021-03-19T10:07:33.000Z
2022-01-13T02:02:25.000Z
mnisiscom/brain_cmaps.py
jeremymoreau/mnisiscom
52cc9bd1bdb5d54f1a61b6c1fdde4bc85e7652b2
[ "Apache-2.0", "CC-BY-4.0" ]
null
null
null
import matplotlib from matplotlib.colors import LinearSegmentedColormap pet_cm_data =[(0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.054901960784313725, 1.0), (0.0, 0.0, 0.06274509803921569, 1.0), (0.0, 0.0, 0.06274509803921569, 1.0), (0.0, 0.0, 0.11372549019607843, 1.0), (0.0, 0.0, 0.12549019607843137, 1.0), (0.0, 0.0, 0.12549019607843137, 1.0), (0.0, 0.0, 0.12549019607843137, 1.0), (0.0, 0.0, 0.17254901960784313, 1.0), (0.0, 0.0, 0.19607843137254902, 1.0), (0.0, 0.0, 0.19607843137254902, 1.0), (0.0, 0.0, 0.19607843137254902, 1.0), (0.0, 0.0, 0.25882352941176473, 1.0), (0.0, 0.0, 0.25882352941176473, 1.0), (0.0, 0.0, 0.25882352941176473, 1.0), (0.0, 0.0, 0.2784313725490196, 1.0), (0.0, 0.0, 0.3215686274509804, 1.0), (0.0, 0.0, 0.3215686274509804, 1.0), (0.0, 0.0, 0.3215686274509804, 1.0), (0.0, 0.0, 0.32941176470588235, 1.0), (0.0, 0.0, 0.3843137254901961, 1.0), (0.0, 0.0, 0.3843137254901961, 1.0), (0.0, 0.0, 0.3843137254901961, 1.0), (0.0, 0.0, 0.4470588235294118, 1.0), (0.0, 0.0, 0.4470588235294118, 1.0), (0.0, 0.0, 0.4470588235294118, 1.0), (0.0, 0.0, 0.4470588235294118, 1.0), (0.0, 0.01568627450980392, 0.5098039215686274, 1.0), (0.0, 0.01568627450980392, 0.5176470588235295, 1.0), (0.0, 0.01568627450980392, 0.5176470588235295, 1.0), (0.0, 0.01568627450980392, 0.5176470588235295, 1.0), (0.0, 0.047058823529411764, 0.5647058823529412, 1.0), (0.0, 0.054901960784313725, 0.5803921568627451, 1.0), (0.0, 0.054901960784313725, 0.5803921568627451, 1.0), (0.0, 0.08235294117647059, 0.6196078431372549, 1.0), (0.0, 0.09803921568627451, 0.6431372549019608, 1.0), (0.0, 0.09803921568627451, 0.6431372549019608, 1.0), (0.0, 0.09803921568627451, 0.6431372549019608, 1.0), (0.0, 0.12156862745098039, 0.6745098039215687, 1.0), (0.0, 0.1411764705882353, 0.7058823529411765, 1.0), (0.0, 0.1411764705882353, 0.7058823529411765, 1.0), (0.0, 0.1411764705882353, 0.7058823529411765, 1.0), (0.0, 0.1607843137254902, 0.7294117647058823, 1.0), (0.0, 0.1843137254901961, 0.7686274509803922, 1.0), (0.0, 0.1843137254901961, 0.7686274509803922, 1.0), (0.0, 0.18823529411764706, 0.7764705882352941, 1.0), (0.0, 0.2235294117647059, 0.8352941176470589, 1.0), (0.0, 0.2235294117647059, 0.8352941176470589, 1.0), (0.0, 0.2235294117647059, 0.8352941176470589, 1.0), (0.0, 0.2235294117647059, 0.8352941176470589, 1.0), (0.0, 0.26666666666666666, 0.9019607843137255, 1.0), (0.0, 0.26666666666666666, 0.9019607843137255, 1.0), (0.0, 0.26666666666666666, 0.9019607843137255, 1.0), (0.0, 0.3058823529411765, 0.9568627450980393, 1.0), (0.0, 0.30980392156862746, 0.9647058823529412, 1.0), (0.0, 0.30980392156862746, 0.9647058823529412, 1.0), (0.0, 0.30980392156862746, 0.9647058823529412, 1.0), (0.0196078431372549, 0.3411764705882353, 0.9686274509803922, 1.0), (0.027450980392156862, 0.35294117647058826, 0.9725490196078431, 1.0), (0.027450980392156862, 0.35294117647058826, 0.9725490196078431, 1.0), (0.027450980392156862, 0.35294117647058826, 0.9725490196078431, 1.0), (0.07058823529411765, 0.3764705882352941, 0.9333333333333333, 1.0), (0.09019607843137255, 0.39215686274509803, 0.9098039215686274, 1.0), (0.09019607843137255, 0.39215686274509803, 0.9098039215686274, 1.0), (0.12549019607843137, 0.41568627450980394, 0.8784313725490196, 1.0), (0.15294117647058825, 0.4392156862745098, 0.8470588235294118, 1.0), (0.15294117647058825, 0.4392156862745098, 0.8470588235294118, 1.0), (0.15294117647058825, 0.4392156862745098, 0.8470588235294118, 1.0), (0.1803921568627451, 0.4549019607843137, 0.8235294117647058, 1.0), (0.2196078431372549, 0.47843137254901963, 0.7803921568627451, 1.0), (0.2196078431372549, 0.47843137254901963, 0.7803921568627451, 1.0), (0.2196078431372549, 0.47843137254901963, 0.7803921568627451, 1.0), (0.23921568627450981, 0.49019607843137253, 0.7647058823529411, 1.0), (0.2823529411764706, 0.5215686274509804, 0.7176470588235294, 1.0), (0.2823529411764706, 0.5215686274509804, 0.7176470588235294, 1.0), (0.2823529411764706, 0.5215686274509804, 0.7176470588235294, 1.0), (0.34509803921568627, 0.5607843137254902, 0.6549019607843137, 1.0), (0.34509803921568627, 0.5607843137254902, 0.6549019607843137, 1.0), (0.34509803921568627, 0.5607843137254902, 0.6549019607843137, 1.0), (0.34509803921568627, 0.5607843137254902, 0.6549019607843137, 1.0), (0.4, 0.6, 0.6, 1.0), (0.40784313725490196, 0.6039215686274509, 0.592156862745098, 1.0), (0.40784313725490196, 0.6039215686274509, 0.592156862745098, 1.0), (0.4549019607843137, 0.6392156862745098, 0.5450980392156862, 1.0), (0.47058823529411764, 0.6470588235294118, 0.5294117647058824, 1.0), (0.47058823529411764, 0.6470588235294118, 0.5294117647058824, 1.0), (0.47058823529411764, 0.6470588235294118, 0.5294117647058824, 1.0), (0.5098039215686274, 0.6705882352941176, 0.49019607843137253, 1.0), (0.5333333333333333, 0.6862745098039216, 0.4666666666666667, 1.0), (0.5333333333333333, 0.6862745098039216, 0.4666666666666667, 1.0), (0.5333333333333333, 0.6862745098039216, 0.4666666666666667, 1.0), (0.5647058823529412, 0.7098039215686275, 0.43529411764705883, 1.0), (0.596078431372549, 0.7333333333333333, 0.403921568627451, 1.0), (0.596078431372549, 0.7333333333333333, 0.403921568627451, 1.0), (0.6196078431372549, 0.7490196078431373, 0.3803921568627451, 1.0), (0.6588235294117647, 0.7725490196078432, 0.3411764705882353, 1.0), (0.6588235294117647, 0.7725490196078432, 0.3411764705882353, 1.0), (0.6588235294117647, 0.7725490196078432, 0.3411764705882353, 1.0), (0.6745098039215687, 0.7843137254901961, 0.3254901960784314, 1.0), (0.7215686274509804, 0.8156862745098039, 0.2784313725490196, 1.0), (0.7215686274509804, 0.8156862745098039, 0.2784313725490196, 1.0), (0.7215686274509804, 0.8156862745098039, 0.2784313725490196, 1.0), (0.7294117647058823, 0.8196078431372549, 0.27058823529411763, 1.0), (0.7843137254901961, 0.8588235294117647, 0.21568627450980393, 1.0), (0.7843137254901961, 0.8588235294117647, 0.21568627450980393, 1.0), (0.7843137254901961, 0.8588235294117647, 0.21568627450980393, 1.0), (0.8431372549019608, 0.8941176470588236, 0.1568627450980392, 1.0), (0.8509803921568627, 0.8980392156862745, 0.14901960784313725, 1.0), (0.8509803921568627, 0.8980392156862745, 0.14901960784313725, 1.0), (0.8509803921568627, 0.8980392156862745, 0.14901960784313725, 1.0), (0.8980392156862745, 0.9294117647058824, 0.10588235294117647, 1.0), (0.9137254901960784, 0.9411764705882353, 0.08627450980392157, 1.0), (0.9137254901960784, 0.9411764705882353, 0.08627450980392157, 1.0), (0.9137254901960784, 0.9411764705882353, 0.08627450980392157, 1.0), (0.9764705882352941, 0.984313725490196, 0.023529411764705882, 1.0), (0.9764705882352941, 0.984313725490196, 0.023529411764705882, 1.0), (0.9764705882352941, 0.984313725490196, 0.023529411764705882, 1.0), (0.9882352941176471, 0.9725490196078431, 0.011764705882352941, 1.0), (1.0, 0.9607843137254902, 0.0, 1.0), (1.0, 0.9607843137254902, 0.0, 1.0), (1.0, 0.9607843137254902, 0.0, 1.0), (1.0, 0.9372549019607843, 0.0, 1.0), (1.0, 0.8980392156862745, 0.0, 1.0), (1.0, 0.8980392156862745, 0.0, 1.0), (1.0, 0.8823529411764706, 0.0, 1.0), (1.0, 0.8352941176470589, 0.0, 1.0), (1.0, 0.8352941176470589, 0.0, 1.0), (1.0, 0.8352941176470589, 0.0, 1.0), (1.0, 0.8274509803921568, 0.0, 1.0), (1.0, 0.7686274509803922, 0.0, 1.0), (1.0, 0.7686274509803922, 0.0, 1.0), (1.0, 0.7686274509803922, 0.0, 1.0), (1.0, 0.7686274509803922, 0.0, 1.0), (1.0, 0.7058823529411765, 0.0, 1.0), (1.0, 0.7058823529411765, 0.0, 1.0), (1.0, 0.7058823529411765, 0.0, 1.0), (1.0, 0.6588235294117647, 0.0, 1.0), (1.0, 0.6431372549019608, 0.0, 1.0), (1.0, 0.6431372549019608, 0.0, 1.0), (1.0, 0.6431372549019608, 0.0, 1.0), (1.0, 0.6039215686274509, 0.0, 1.0), (1.0, 0.5803921568627451, 0.0, 1.0), (1.0, 0.5803921568627451, 0.0, 1.0), (1.0, 0.5803921568627451, 0.0, 1.0), (1.0, 0.5490196078431373, 0.0, 1.0), (1.0, 0.5176470588235295, 0.0, 1.0), (1.0, 0.5176470588235295, 0.0, 1.0), (1.0, 0.49411764705882355, 0.0, 1.0), (1.0, 0.4549019607843137, 0.0, 1.0), (1.0, 0.4549019607843137, 0.0, 1.0), (1.0, 0.4549019607843137, 0.0, 1.0), (1.0, 0.4392156862745098, 0.0, 1.0), (1.0, 0.39215686274509803, 0.0, 1.0), (1.0, 0.39215686274509803, 0.0, 1.0), (1.0, 0.39215686274509803, 0.0, 1.0), (1.0, 0.32941176470588235, 0.0, 1.0), (1.0, 0.32941176470588235, 0.0, 1.0), (1.0, 0.32941176470588235, 0.0, 1.0), (1.0, 0.32941176470588235, 0.0, 1.0), (1.0, 0.26666666666666666, 0.0, 1.0), (1.0, 0.26666666666666666, 0.0, 1.0), (1.0, 0.26666666666666666, 0.0, 1.0), (1.0, 0.26666666666666666, 0.0, 1.0), (1.0, 0.21176470588235294, 0.0, 1.0), (1.0, 0.20392156862745098, 0.0, 1.0), (1.0, 0.20392156862745098, 0.0, 1.0), (1.0, 0.16470588235294117, 0.0, 1.0), (1.0, 0.13725490196078433, 0.0, 1.0), (1.0, 0.13725490196078433, 0.0, 1.0), (1.0, 0.13725490196078433, 0.0, 1.0), (1.0, 0.10980392156862745, 0.0, 1.0), (1.0, 0.07450980392156863, 0.0, 1.0), (1.0, 0.07450980392156863, 0.0, 1.0), (1.0, 0.07450980392156863, 0.0, 1.0), (1.0, 0.054901960784313725, 0.0, 1.0), (1.0, 0.011764705882352941, 0.0, 1.0), (1.0, 0.011764705882352941, 0.0, 1.0), (1.0, 0.00784313725490196, 0.011764705882352941, 1.0), (1.0, 0.0, 0.050980392156862744, 1.0), (1.0, 0.0, 0.050980392156862744, 1.0), (1.0, 0.0, 0.050980392156862744, 1.0), (1.0, 0.0, 0.06274509803921569, 1.0), (1.0, 0.0, 0.11372549019607843, 1.0), (1.0, 0.0, 0.11372549019607843, 1.0), (1.0, 0.0, 0.11372549019607843, 1.0), (1.0, 0.0, 0.1803921568627451, 1.0), (1.0, 0.0, 0.1803921568627451, 1.0), (1.0, 0.0, 0.1803921568627451, 1.0), (1.0, 0.0, 0.1803921568627451, 1.0), (1.0, 0.0, 0.23529411764705882, 1.0), (1.0, 0.0, 0.24313725490196078, 1.0), (1.0, 0.0, 0.24313725490196078, 1.0), (1.0, 0.0, 0.24313725490196078, 1.0), (1.0, 0.0, 0.2901960784313726, 1.0), (1.0, 0.0, 0.3058823529411765, 1.0), (1.0, 0.0, 0.3058823529411765, 1.0), (1.0, 0.0, 0.33725490196078434, 1.0), (1.0, 0.0, 0.3686274509803922, 1.0), (1.0, 0.0, 0.3686274509803922, 1.0), (1.0, 0.0, 0.3686274509803922, 1.0), (1.0, 0.0, 0.39215686274509803, 1.0), (1.0, 0.0, 0.43137254901960786, 1.0), (1.0, 0.0, 0.43137254901960786, 1.0), (1.0, 0.0, 0.43137254901960786, 1.0), (1.0, 0.0, 0.4470588235294118, 1.0), (1.0, 0.0, 0.49411764705882355, 1.0), (1.0, 0.0, 0.49411764705882355, 1.0), (1.0, 0.0, 0.5019607843137255, 1.0), (1.0, 0.0, 0.5568627450980392, 1.0), (1.0, 0.0, 0.5568627450980392, 1.0), (1.0, 0.0, 0.5568627450980392, 1.0), (1.0, 0.0, 0.5568627450980392, 1.0), (1.0, 0.0, 0.6196078431372549, 1.0), (1.0, 0.0, 0.6196078431372549, 1.0), (1.0, 0.0, 0.6196078431372549, 1.0), (1.0, 0.0, 0.6196078431372549, 1.0), (1.0, 0.0, 0.6823529411764706, 1.0), (1.0, 0.0, 0.6823529411764706, 1.0), (1.0, 0.0, 0.6823529411764706, 1.0), (1.0, 0.0, 0.7294117647058823, 1.0), (1.0, 0.0, 0.7450980392156863, 1.0), (1.0, 0.0, 0.7450980392156863, 1.0), (1.0, 0.0, 0.7450980392156863, 1.0), (1.0, 0.0, 0.7843137254901961, 1.0), (1.0, 0.0, 0.807843137254902, 1.0), (1.0, 0.0, 0.807843137254902, 1.0), (1.0, 0.0, 0.8352941176470589, 1.0), (1.0, 0.0, 0.8745098039215686, 1.0), (1.0, 0.0, 0.8745098039215686, 1.0), (1.0, 0.0, 0.8745098039215686, 1.0), (1.0, 0.0, 0.8901960784313725, 1.0), (1.0, 0.0, 0.9372549019607843, 1.0), (1.0, 0.0, 0.9372549019607843, 1.0), (1.0, 0.0, 0.9372549019607843, 1.0), (1.0, 0.0, 0.9450980392156862, 1.0), (1.0, 0.0, 1.0, 1.0), (1.0, 0.0, 1.0, 1.0)] ge_cm_data = [(0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0), (0.0, 0.027450980392156862, 0.023529411764705882, 1.0), (0.0, 0.03137254901960784, 0.027450980392156862, 1.0), (0.0, 0.03137254901960784, 0.027450980392156862, 1.0), (0.0, 0.03137254901960784, 0.027450980392156862, 1.0), (0.0, 0.054901960784313725, 0.050980392156862744, 1.0), (0.0, 0.06274509803921569, 0.058823529411764705, 1.0), (0.0, 0.06274509803921569, 0.058823529411764705, 1.0), (0.0, 0.06274509803921569, 0.058823529411764705, 1.0), (0.0, 0.08235294117647059, 0.0784313725490196, 1.0), (0.0, 0.09411764705882353, 0.09019607843137255, 1.0), (0.0, 0.09411764705882353, 0.09019607843137255, 1.0), (0.0, 0.10980392156862745, 0.10588235294117647, 1.0), (0.0, 0.12549019607843137, 0.12156862745098039, 1.0), (0.0, 0.12549019607843137, 0.12156862745098039, 1.0), (0.0, 0.12549019607843137, 0.12156862745098039, 1.0), (0.0, 0.13333333333333333, 0.12941176470588237, 1.0), (0.0, 0.1568627450980392, 0.15294117647058825, 1.0), (0.0, 0.1568627450980392, 0.15294117647058825, 1.0), (0.0, 0.1568627450980392, 0.15294117647058825, 1.0), (0.0, 0.1607843137254902, 0.1568627450980392, 1.0), (0.0, 0.19215686274509805, 0.18823529411764706, 1.0), (0.0, 0.19215686274509805, 0.18823529411764706, 1.0), (0.0, 0.19215686274509805, 0.18823529411764706, 1.0), (0.0, 0.2235294117647059, 0.2196078431372549, 1.0), (0.0, 0.2235294117647059, 0.2196078431372549, 1.0), (0.0, 0.2235294117647059, 0.2196078431372549, 1.0), (0.0, 0.2235294117647059, 0.2196078431372549, 1.0), (0.0, 0.2549019607843137, 0.24705882352941178, 1.0), (0.0, 0.25882352941176473, 0.25098039215686274, 1.0), (0.0, 0.25882352941176473, 0.25098039215686274, 1.0), (0.0, 0.25882352941176473, 0.25098039215686274, 1.0), (0.0, 0.2823529411764706, 0.27450980392156865, 1.0), (0.0, 0.2901960784313726, 0.2823529411764706, 1.0), (0.0, 0.2901960784313726, 0.2823529411764706, 1.0), (0.0, 0.30980392156862746, 0.30196078431372547, 1.0), (0.0, 0.3215686274509804, 0.3137254901960784, 1.0), (0.0, 0.3215686274509804, 0.3137254901960784, 1.0), (0.0, 0.3215686274509804, 0.3137254901960784, 1.0), (0.0, 0.33725490196078434, 0.32941176470588235, 1.0), (0.0, 0.35294117647058826, 0.34509803921568627, 1.0), (0.0, 0.35294117647058826, 0.34509803921568627, 1.0), (0.0, 0.35294117647058826, 0.34509803921568627, 1.0), (0.0, 0.36470588235294116, 0.3568627450980392, 1.0), (0.0, 0.3843137254901961, 0.3764705882352941, 1.0), (0.0, 0.3843137254901961, 0.3764705882352941, 1.0), (0.0, 0.38823529411764707, 0.3803921568627451, 1.0), (0.0, 0.41568627450980394, 0.40784313725490196, 1.0), (0.0, 0.41568627450980394, 0.40784313725490196, 1.0), (0.0, 0.41568627450980394, 0.40784313725490196, 1.0), (0.0, 0.41568627450980394, 0.40784313725490196, 1.0), (0.0, 0.4470588235294118, 0.4392156862745098, 1.0), (0.0, 0.4470588235294118, 0.4392156862745098, 1.0), (0.0, 0.4470588235294118, 0.4392156862745098, 1.0), (0.0, 0.4470588235294118, 0.4392156862745098, 1.0), (0.0, 0.4745098039215686, 0.4666666666666667, 1.0), (0.0, 0.47843137254901963, 0.47058823529411764, 1.0), (0.0, 0.47843137254901963, 0.47058823529411764, 1.0), (0.00784313725490196, 0.4823529411764706, 0.4980392156862745, 1.0), (0.011764705882352941, 0.48627450980392156, 0.5058823529411764, 1.0), (0.011764705882352941, 0.48627450980392156, 0.5058823529411764, 1.0), (0.011764705882352941, 0.48627450980392156, 0.5058823529411764, 1.0), (0.03137254901960784, 0.4666666666666667, 0.5254901960784314, 1.0), (0.043137254901960784, 0.4549019607843137, 0.5372549019607843, 1.0), (0.043137254901960784, 0.4549019607843137, 0.5372549019607843, 1.0), (0.043137254901960784, 0.4549019607843137, 0.5372549019607843, 1.0), (0.058823529411764705, 0.4392156862745098, 0.5529411764705883, 1.0), (0.07450980392156863, 0.4235294117647059, 0.5686274509803921, 1.0), (0.07450980392156863, 0.4235294117647059, 0.5686274509803921, 1.0), (0.08627450980392157, 0.4117647058823529, 0.5803921568627451, 1.0), (0.10588235294117647, 0.39215686274509803, 0.6, 1.0), (0.10588235294117647, 0.39215686274509803, 0.6, 1.0), (0.10588235294117647, 0.39215686274509803, 0.6, 1.0), (0.11372549019607843, 0.3843137254901961, 0.6078431372549019, 1.0), (0.13725490196078433, 0.3607843137254902, 0.6313725490196078, 1.0), (0.13725490196078433, 0.3607843137254902, 0.6313725490196078, 1.0), (0.13725490196078433, 0.3607843137254902, 0.6313725490196078, 1.0), (0.13725490196078433, 0.3607843137254902, 0.6313725490196078, 1.0), (0.16862745098039217, 0.32941176470588235, 0.6627450980392157, 1.0), (0.16862745098039217, 0.32941176470588235, 0.6627450980392157, 1.0), (0.16862745098039217, 0.32941176470588235, 0.6627450980392157, 1.0), (0.19607843137254902, 0.30196078431372547, 0.6901960784313725, 1.0), (0.2, 0.2980392156862745, 0.6941176470588235, 1.0), (0.2, 0.2980392156862745, 0.6941176470588235, 1.0), (0.2, 0.2980392156862745, 0.6941176470588235, 1.0), (0.2235294117647059, 0.27450980392156865, 0.7176470588235294, 1.0), (0.23137254901960785, 0.26666666666666666, 0.7254901960784313, 1.0), (0.23137254901960785, 0.26666666666666666, 0.7254901960784313, 1.0), (0.23137254901960785, 0.26666666666666666, 0.7254901960784313, 1.0), (0.25098039215686274, 0.25098039215686274, 0.7450980392156863, 1.0), (0.2627450980392157, 0.23921568627450981, 0.7568627450980392, 1.0), (0.2627450980392157, 0.23921568627450981, 0.7568627450980392, 1.0), (0.2784313725490196, 0.2235294117647059, 0.7725490196078432, 1.0), (0.29411764705882354, 0.20784313725490197, 0.788235294117647, 1.0), (0.29411764705882354, 0.20784313725490197, 0.788235294117647, 1.0), (0.29411764705882354, 0.20784313725490197, 0.788235294117647, 1.0), (0.3058823529411765, 0.19607843137254902, 0.8, 1.0), (0.3254901960784314, 0.17647058823529413, 0.8196078431372549, 1.0), (0.3254901960784314, 0.17647058823529413, 0.8196078431372549, 1.0), (0.3254901960784314, 0.17647058823529413, 0.8196078431372549, 1.0), (0.3333333333333333, 0.16862745098039217, 0.8274509803921568, 1.0), (0.3568627450980392, 0.1411764705882353, 0.8549019607843137, 1.0), (0.3568627450980392, 0.1411764705882353, 0.8549019607843137, 1.0), (0.3607843137254902, 0.13725490196078433, 0.8588235294117647, 1.0), (0.38823529411764707, 0.10980392156862745, 0.8862745098039215, 1.0), (0.38823529411764707, 0.10980392156862745, 0.8862745098039215, 1.0), (0.38823529411764707, 0.10980392156862745, 0.8862745098039215, 1.0), (0.38823529411764707, 0.10980392156862745, 0.8862745098039215, 1.0), (0.41568627450980394, 0.08235294117647059, 0.9137254901960784, 1.0), (0.4196078431372549, 0.0784313725490196, 0.9176470588235294, 1.0), (0.4196078431372549, 0.0784313725490196, 0.9176470588235294, 1.0), (0.4196078431372549, 0.0784313725490196, 0.9176470588235294, 1.0), (0.44313725490196076, 0.054901960784313725, 0.9411764705882353, 1.0), (0.45098039215686275, 0.047058823529411764, 0.9490196078431372, 1.0), (0.45098039215686275, 0.047058823529411764, 0.9490196078431372, 1.0), (0.47058823529411764, 0.027450980392156862, 0.9686274509803922, 1.0), (0.4823529411764706, 0.01568627450980392, 0.9803921568627451, 1.0), (0.4823529411764706, 0.01568627450980392, 0.9803921568627451, 1.0), (0.4823529411764706, 0.01568627450980392, 0.9803921568627451, 1.0), (0.4980392156862745, 0.01568627450980392, 0.9803921568627451, 1.0), (0.5137254901960784, 0.011764705882352941, 0.9803921568627451, 1.0), (0.5137254901960784, 0.011764705882352941, 0.9803921568627451, 1.0), (0.5137254901960784, 0.011764705882352941, 0.9803921568627451, 1.0), (0.5254901960784314, 0.023529411764705882, 0.9568627450980393, 1.0), (0.5450980392156862, 0.043137254901960784, 0.9176470588235294, 1.0), (0.5450980392156862, 0.043137254901960784, 0.9176470588235294, 1.0), (0.5529411764705883, 0.050980392156862744, 0.9019607843137255, 1.0), (0.5764705882352941, 0.07450980392156863, 0.8549019607843137, 1.0), (0.5764705882352941, 0.07450980392156863, 0.8549019607843137, 1.0), (0.5764705882352941, 0.07450980392156863, 0.8549019607843137, 1.0), (0.5803921568627451, 0.0784313725490196, 0.8470588235294118, 1.0), (0.6078431372549019, 0.10588235294117647, 0.788235294117647, 1.0), (0.6078431372549019, 0.10588235294117647, 0.788235294117647, 1.0), (0.6078431372549019, 0.10588235294117647, 0.788235294117647, 1.0), (0.6078431372549019, 0.10588235294117647, 0.788235294117647, 1.0), (0.6392156862745098, 0.13725490196078433, 0.7254901960784313, 1.0), (0.6392156862745098, 0.13725490196078433, 0.7254901960784313, 1.0), (0.6392156862745098, 0.13725490196078433, 0.7254901960784313, 1.0), (0.6627450980392157, 0.16470588235294117, 0.6784313725490196, 1.0), (0.6705882352941176, 0.17254901960784313, 0.6627450980392157, 1.0), (0.6705882352941176, 0.17254901960784313, 0.6627450980392157, 1.0), (0.6705882352941176, 0.17254901960784313, 0.6627450980392157, 1.0), (0.6901960784313725, 0.19215686274509805, 0.6235294117647059, 1.0), (0.7019607843137254, 0.20392156862745098, 0.6, 1.0), (0.7019607843137254, 0.20392156862745098, 0.6, 1.0), (0.7019607843137254, 0.20392156862745098, 0.6, 1.0), (0.7176470588235294, 0.2196078431372549, 0.5686274509803921, 1.0), (0.7333333333333333, 0.23529411764705882, 0.5372549019607843, 1.0), (0.7333333333333333, 0.23529411764705882, 0.5372549019607843, 1.0), (0.7450980392156863, 0.24705882352941178, 0.5137254901960784, 1.0), (0.7647058823529411, 0.26666666666666666, 0.47058823529411764, 1.0), (0.7647058823529411, 0.26666666666666666, 0.47058823529411764, 1.0), (0.7647058823529411, 0.26666666666666666, 0.47058823529411764, 1.0), (0.7725490196078432, 0.27450980392156865, 0.4549019607843137, 1.0), (0.796078431372549, 0.2980392156862745, 0.40784313725490196, 1.0), (0.796078431372549, 0.2980392156862745, 0.40784313725490196, 1.0), (0.796078431372549, 0.2980392156862745, 0.40784313725490196, 1.0), (0.8, 0.30196078431372547, 0.4, 1.0), (0.8274509803921568, 0.32941176470588235, 0.34509803921568627, 1.0), (0.8274509803921568, 0.32941176470588235, 0.34509803921568627, 1.0), (0.8274509803921568, 0.32941176470588235, 0.34509803921568627, 1.0), (0.8588235294117647, 0.3607843137254902, 0.2823529411764706, 1.0), (0.8588235294117647, 0.3607843137254902, 0.2823529411764706, 1.0), (0.8588235294117647, 0.3607843137254902, 0.2823529411764706, 1.0), (0.8588235294117647, 0.3607843137254902, 0.2823529411764706, 1.0), (0.8862745098039215, 0.38823529411764707, 0.22745098039215686, 1.0), (0.8901960784313725, 0.39215686274509803, 0.2196078431372549, 1.0), (0.8901960784313725, 0.39215686274509803, 0.2196078431372549, 1.0), (0.8901960784313725, 0.39215686274509803, 0.2196078431372549, 1.0), (0.9098039215686274, 0.4117647058823529, 0.1803921568627451, 1.0), (0.9215686274509803, 0.4235294117647059, 0.15294117647058825, 1.0), (0.9215686274509803, 0.4235294117647059, 0.15294117647058825, 1.0), (0.9372549019607843, 0.4392156862745098, 0.12549019607843137, 1.0), (0.9529411764705882, 0.4549019607843137, 0.09019607843137255, 1.0), (0.9529411764705882, 0.4549019607843137, 0.09019607843137255, 1.0), (0.9529411764705882, 0.4549019607843137, 0.09019607843137255, 1.0), (0.9607843137254902, 0.4666666666666667, 0.07058823529411765, 1.0), (0.984313725490196, 0.48627450980392156, 0.027450980392156862, 1.0), (0.984313725490196, 0.48627450980392156, 0.027450980392156862, 1.0), (0.984313725490196, 0.48627450980392156, 0.027450980392156862, 1.0), (0.9882352941176471, 0.49411764705882355, 0.03137254901960784, 1.0), (1.0, 0.5215686274509804, 0.03529411764705882, 1.0), (1.0, 0.5215686274509804, 0.03529411764705882, 1.0), (1.0, 0.5215686274509804, 0.03529411764705882, 1.0), (1.0, 0.5529411764705883, 0.09803921568627451, 1.0), (1.0, 0.5529411764705883, 0.09803921568627451, 1.0), (1.0, 0.5529411764705883, 0.09803921568627451, 1.0), (1.0, 0.5529411764705883, 0.09803921568627451, 1.0), (1.0, 0.5843137254901961, 0.16470588235294117, 1.0), (1.0, 0.5843137254901961, 0.16470588235294117, 1.0), (1.0, 0.5843137254901961, 0.16470588235294117, 1.0), (1.0, 0.5843137254901961, 0.16470588235294117, 1.0), (1.0, 0.611764705882353, 0.2196078431372549, 1.0), (1.0, 0.615686274509804, 0.22745098039215686, 1.0), (1.0, 0.615686274509804, 0.22745098039215686, 1.0), (1.0, 0.6392156862745098, 0.27450980392156865, 1.0), (1.0, 0.6470588235294118, 0.2901960784313726, 1.0), (1.0, 0.6470588235294118, 0.2901960784313726, 1.0), (1.0, 0.6470588235294118, 0.2901960784313726, 1.0), (1.0, 0.6627450980392157, 0.3254901960784314, 1.0), (1.0, 0.6784313725490196, 0.3568627450980392, 1.0), (1.0, 0.6784313725490196, 0.3568627450980392, 1.0), (1.0, 0.6784313725490196, 0.3568627450980392, 1.0), (1.0, 0.6901960784313725, 0.3803921568627451, 1.0), (1.0, 0.7098039215686275, 0.4196078431372549, 1.0), (1.0, 0.7098039215686275, 0.4196078431372549, 1.0), (1.0, 0.7098039215686275, 0.4196078431372549, 1.0), (1.0, 0.7411764705882353, 0.4823529411764706, 1.0), (1.0, 0.7411764705882353, 0.4823529411764706, 1.0), (1.0, 0.7411764705882353, 0.4823529411764706, 1.0), (1.0, 0.7450980392156863, 0.49019607843137253, 1.0), (1.0, 0.7725490196078432, 0.5490196078431373, 1.0), (1.0, 0.7725490196078432, 0.5490196078431373, 1.0), (1.0, 0.7725490196078432, 0.5490196078431373, 1.0), (1.0, 0.7725490196078432, 0.5490196078431373, 1.0), (1.0, 0.803921568627451, 0.611764705882353, 1.0), (1.0, 0.803921568627451, 0.611764705882353, 1.0), (1.0, 0.803921568627451, 0.611764705882353, 1.0), (1.0, 0.8352941176470589, 0.6705882352941176, 1.0), (1.0, 0.8392156862745098, 0.6784313725490196, 1.0), (1.0, 0.8392156862745098, 0.6784313725490196, 1.0), (1.0, 0.8392156862745098, 0.6784313725490196, 1.0), (1.0, 0.8627450980392157, 0.7254901960784313, 1.0), (1.0, 0.8705882352941177, 0.7411764705882353, 1.0), (1.0, 0.8705882352941177, 0.7411764705882353, 1.0), (1.0, 0.8705882352941177, 0.7411764705882353, 1.0), (1.0, 0.8901960784313725, 0.7803921568627451, 1.0), (1.0, 0.9019607843137255, 0.803921568627451, 1.0), (1.0, 0.9019607843137255, 0.803921568627451, 1.0), (1.0, 0.9019607843137255, 0.803921568627451, 1.0), (1.0, 0.9333333333333333, 0.8705882352941177, 1.0), (1.0, 0.9333333333333333, 0.8705882352941177, 1.0), (1.0, 0.9333333333333333, 0.8705882352941177, 1.0), (1.0, 0.9411764705882353, 0.8862745098039215, 1.0), (1.0, 0.9647058823529412, 0.9333333333333333, 1.0), (1.0, 0.9647058823529412, 0.9333333333333333, 1.0), (1.0, 0.9647058823529412, 0.9333333333333333, 1.0), (1.0, 0.9686274509803922, 0.9411764705882353, 1.0), (1.0, 1.0, 1.0, 1.0), (1.0, 1.0, 1.0, 1.0), (1.0, 1.0, 1.0, 1.0), (1.0, 1.0, 1.0, 1.0)] def pet_cmap(): cmap = matplotlib.colors.ListedColormap(pet_cm_data) return cmap def ge_cmap(): cmap = matplotlib.colors.ListedColormap(ge_cm_data) return cmap
62.774194
82
0.58092
4,133
33,082
4.647472
0.056859
0.125364
0.104956
0.104123
0.856414
0.81414
0.811537
0.79597
0.79597
0.79597
0
0.780081
0.263678
33,082
526
83
62.893536
0.008457
0
0
0.776923
0
0
0
0
0
0
0
0
0
1
0.003846
false
0
0.003846
0
0.011538
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
1d5fdb079c2c02cfe086cc5cba7853c3141c3c89
892
py
Python
llvm/utils/lit/tests/xfail-cl.py
val-verde/llvm-project
a5d4e884dad341ff80fbbdec6e7516b9c58c9eb0
[ "Apache-2.0" ]
null
null
null
llvm/utils/lit/tests/xfail-cl.py
val-verde/llvm-project
a5d4e884dad341ff80fbbdec6e7516b9c58c9eb0
[ "Apache-2.0" ]
null
null
null
llvm/utils/lit/tests/xfail-cl.py
val-verde/llvm-project
a5d4e884dad341ff80fbbdec6e7516b9c58c9eb0
[ "Apache-2.0" ]
null
null
null
# Check that XFAILing works via command line or env var. # RUN: %{lit} --xfail 'false.txt;false2.txt;top-level-suite :: b :: test.txt' \ # RUN: %{inputs}/xfail-cl \ # RUN: | FileCheck --check-prefix=CHECK-FILTER %s # RUN: env LIT_XFAIL='false.txt;false2.txt;top-level-suite :: b :: test.txt' \ # RUN: %{lit} %{inputs}/xfail-cl \ # RUN: | FileCheck --check-prefix=CHECK-FILTER %s # END. # CHECK-FILTER: Testing: 7 tests, {{[1-7]}} workers # CHECK-FILTER-DAG: {{^}}PASS: top-level-suite :: a :: test.txt # CHECK-FILTER-DAG: {{^}}XFAIL: top-level-suite :: b :: test.txt # CHECK-FILTER-DAG: {{^}}XFAIL: top-level-suite :: a :: false.txt # CHECK-FILTER-DAG: {{^}}XFAIL: top-level-suite :: b :: false.txt # CHECK-FILTER-DAG: {{^}}XFAIL: top-level-suite :: false.txt # CHECK-FILTER-DAG: {{^}}XFAIL: top-level-suite :: false2.txt # CHECK-FILTER-DAG: {{^}}PASS: top-level-suite :: true.txt
42.47619
79
0.63565
136
892
4.161765
0.272059
0.194346
0.206714
0.180212
0.805654
0.805654
0.805654
0.696113
0.696113
0.342756
0
0.007782
0.13565
892
20
80
44.6
0.726329
0.95852
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
7
1d7bfb6f0cbf948854ca2ce687022208ce9a4fd8
92
py
Python
parameters_8000.py
jolivaresc/corpus
1d2f3885778c29cb56dd1447140376e3e7cd5831
[ "BSD-3-Clause" ]
1
2017-07-25T20:15:56.000Z
2017-07-25T20:15:56.000Z
parameters_8000.py
jolivaresc/corpus
1d2f3885778c29cb56dd1447140376e3e7cd5831
[ "BSD-3-Clause" ]
null
null
null
parameters_8000.py
jolivaresc/corpus
1d2f3885778c29cb56dd1447140376e3e7cd5831
[ "BSD-3-Clause" ]
null
null
null
password="pbkdf2(1000,20,sha512)$9a97de4fea5fd4a8$4f92995b84d1521961581e0c3418a40ebdabd9bb"
46
91
0.891304
7
92
11.714286
1
0
0
0
0
0
0
0
0
0
0
0.472527
0.01087
92
1
92
92
0.428571
0
0
0
0
0
0.869565
0.869565
0
0
0
0
0
1
0
false
1
0
0
0
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
d554fbdba8aac92fdcc8f75d4f9c59521a9bb175
141
py
Python
bigcommerce/__init__.py
Anmol-Gulati/bigcommerce-api-python
a20e9bdb27fb6357426da0be91b9b9fb21da755f
[ "MIT" ]
69
2015-01-09T03:00:19.000Z
2022-03-30T07:27:20.000Z
bigcommerce/__init__.py
Anmol-Gulati/bigcommerce-api-python
a20e9bdb27fb6357426da0be91b9b9fb21da755f
[ "MIT" ]
70
2015-01-04T20:27:45.000Z
2022-02-14T23:34:10.000Z
bigcommerce/__init__.py
CUCWD/bigcommerce-api-python
e8fc7df64dfe776bb49895d2bcdec853e30f3134
[ "MIT" ]
71
2015-01-08T06:30:53.000Z
2022-03-11T11:58:22.000Z
import bigcommerce.resources import bigcommerce.api from bigcommerce.customer_login_token import CustomerLoginTokens as customer_login_token
35.25
88
0.907801
17
141
7.294118
0.588235
0.274194
0.290323
0
0
0
0
0
0
0
0
0
0.070922
141
3
89
47
0.946565
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
893483f56372cdde9e667615dc74899710ac51ff
71
py
Python
common/spaces/__init__.py
HaxThePlanet/nexus-wars-training
2f784264afc1297a205b5c1849fbda7c3d87d9c3
[ "Apache-2.0" ]
813
2017-08-20T14:22:23.000Z
2022-02-11T19:03:54.000Z
common/spaces/__init__.py
HaxThePlanet/nexus-wars-training
2f784264afc1297a205b5c1849fbda7c3d87d9c3
[ "Apache-2.0" ]
32
2017-08-25T00:22:08.000Z
2021-07-29T20:48:49.000Z
common/spaces/__init__.py
HaxThePlanet/nexus-wars-training
2f784264afc1297a205b5c1849fbda7c3d87d9c3
[ "Apache-2.0" ]
451
2017-08-21T14:43:30.000Z
2022-03-23T08:18:30.000Z
from common.spaces import prng from common.spaces import multi_discrete
35.5
40
0.873239
11
71
5.545455
0.636364
0.327869
0.52459
0.721311
0
0
0
0
0
0
0
0
0.098592
71
2
40
35.5
0.953125
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
896cde04711f770c7cf725112f19fecad52d49a7
131
py
Python
pytest_test.py
s1990i/tdd
5746234bb5603c96be050b0213b968500fd7c4a1
[ "Unlicense" ]
null
null
null
pytest_test.py
s1990i/tdd
5746234bb5603c96be050b0213b968500fd7c4a1
[ "Unlicense" ]
null
null
null
pytest_test.py
s1990i/tdd
5746234bb5603c96be050b0213b968500fd7c4a1
[ "Unlicense" ]
null
null
null
""" This mdule is meant to test functions """ def test_assert_true(): """ This is not a real test """ assert True
13.1
37
0.59542
19
131
4
0.684211
0.263158
0.368421
0
0
0
0
0
0
0
0
0
0.290076
131
9
38
14.555556
0.817204
0.465649
0
0
0
0
0
0
0
0
0
0
1
1
0.5
true
0
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
7
983d9152859e69ad397572b21e6c8989a3f022d5
197
py
Python
turkey/errors.py
geokala/quizify
1096423880dd50b935fcd31fc6ffbbe1cca65834
[ "BSD-3-Clause" ]
null
null
null
turkey/errors.py
geokala/quizify
1096423880dd50b935fcd31fc6ffbbe1cca65834
[ "BSD-3-Clause" ]
null
null
null
turkey/errors.py
geokala/quizify
1096423880dd50b935fcd31fc6ffbbe1cca65834
[ "BSD-3-Clause" ]
null
null
null
from turkey.utils import render_turkey def not_found_view(error): return render_turkey('errors/404.html'), 404 def not_allowed_view(error): return render_turkey('errors/403.html'), 403
19.7
48
0.761421
30
197
4.766667
0.533333
0.251748
0.20979
0.293706
0.461538
0.461538
0
0
0
0
0
0.070175
0.13198
197
9
49
21.888889
0.766082
0
0
0
0
0
0.152284
0
0
0
0
0
0
1
0.4
false
0
0.2
0.4
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
7
983e8493789609a395a481a71d9fd8457ff2f249
31,306
py
Python
gnosis/eth/tests/mocks/mock_internal_txs.py
titandac/gnosis-py
cf0af4f25e64b22256eabb415d0f3fe3a6180b14
[ "MIT" ]
64
2018-09-26T19:56:50.000Z
2022-03-18T21:45:59.000Z
gnosis/eth/tests/mocks/mock_internal_txs.py
zhanghao-ic/gnosis-py
d2a5912547b7d1b576c826909f4c1d0155db536f
[ "MIT" ]
151
2018-09-10T21:42:05.000Z
2022-03-31T12:33:31.000Z
gnosis/eth/tests/mocks/mock_internal_txs.py
zhanghao-ic/gnosis-py
d2a5912547b7d1b576c826909f4c1d0155db536f
[ "MIT" ]
50
2018-12-13T20:43:46.000Z
2022-03-30T09:32:32.000Z
from hexbytes import HexBytes internal_txs_errored = [ { "action": { "from": "0x667dEb5A98f77052cf561658575cF1530Ee42C7a", "gas": 60066, "value": 0, "callType": "call", "input": HexBytes( "0x6a76120200000000000000000000000090c6e02acc0ff725c0127feac32a53c4b10b03b700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000092000000000000000000000000000000000000000000000000000000000000007a44f84885b000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000007600000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000022000000000000000000000000000000000000000000000000000000000000003200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006200000000000000000000000002eaa9d77ae4d8f9cdd9faacd44016e746485bddb00000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000006d7f0754ffeb405d23c51ce938289d4835be3b1400000000000000000000000052201ff1720134bbbbb2f6bc97bf3715490ec19b000000000000000000000000ebf1a11532b93a529b5bc942b4baa98647913002000000000000000000000000ebe09eb3411d18f4ff8d859e096c533cac5c6b60000000000000000000000000d6801a1dffcd0a410336ef88def4320d6df1883e0000000000000000000000005b281a6dda0b271e91ae35de655ad301c976edb10000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000002448ee2641d78cc42d7ad76498917359d961a78300000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000006d7f0754ffeb405d23c51ce938289d4835be3b14000000000000000000000000000000000000000000000000000000000000012c0000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000012c0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000048e13085b1efda7283d958100e4cbfacb0ce5012000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000012c00000000000000000000000000000000000000000000000000000000000000020000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000041000000000000000000000000667deb5a98f77052cf561658575cf1530ee42c7a00000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000" ), "to": "0x47F61944efdB020829caead65AfF8AC024600580", }, "blockHash": "0x4c49052fc99be82b91f8a35320826304dfe278dbd7d756edde000d331606358f", "blockNumber": 4735890, "result": { "gasUsed": 21838, "output": HexBytes( "0x0000000000000000000000000000000000000000000000000000000000000000" ), }, "subtraces": 1, "traceAddress": [], "transactionHash": "0xf097d5e5dd39a6799fc13dfa49732a115b457386520dc92f99f0135a1d196851", "transactionPosition": 3, "type": "call", }, { "action": { "from": "0x47F61944efdB020829caead65AfF8AC024600580", "gas": 57726, "value": 0, "callType": "delegatecall", "input": HexBytes( "0x6a76120200000000000000000000000090c6e02acc0ff725c0127feac32a53c4b10b03b700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000092000000000000000000000000000000000000000000000000000000000000007a44f84885b000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000007600000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000022000000000000000000000000000000000000000000000000000000000000003200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006200000000000000000000000002eaa9d77ae4d8f9cdd9faacd44016e746485bddb00000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000006d7f0754ffeb405d23c51ce938289d4835be3b1400000000000000000000000052201ff1720134bbbbb2f6bc97bf3715490ec19b000000000000000000000000ebf1a11532b93a529b5bc942b4baa98647913002000000000000000000000000ebe09eb3411d18f4ff8d859e096c533cac5c6b60000000000000000000000000d6801a1dffcd0a410336ef88def4320d6df1883e0000000000000000000000005b281a6dda0b271e91ae35de655ad301c976edb10000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000002448ee2641d78cc42d7ad76498917359d961a78300000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000006d7f0754ffeb405d23c51ce938289d4835be3b14000000000000000000000000000000000000000000000000000000000000012c0000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000012c0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000048e13085b1efda7283d958100e4cbfacb0ce5012000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000012c00000000000000000000000000000000000000000000000000000000000000020000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000041000000000000000000000000667deb5a98f77052cf561658575cf1530ee42c7a00000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000" ), "to": "0xb6029EA3B2c51D09a50B53CA8012FeEB05bDa35A", }, "blockHash": "0x4c49052fc99be82b91f8a35320826304dfe278dbd7d756edde000d331606358f", "blockNumber": 4735890, "result": { "gasUsed": 20369, "output": HexBytes( "0x0000000000000000000000000000000000000000000000000000000000000000" ), }, "subtraces": 1, "traceAddress": [0], "transactionHash": "0xf097d5e5dd39a6799fc13dfa49732a115b457386520dc92f99f0135a1d196851", "transactionPosition": 3, "type": "call", }, { "action": { "from": "0x47F61944efdB020829caead65AfF8AC024600580", "gas": 44984, "value": 0, "callType": "call", "input": HexBytes( "0x4f84885b000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000007600000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000022000000000000000000000000000000000000000000000000000000000000003200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006200000000000000000000000002eaa9d77ae4d8f9cdd9faacd44016e746485bddb00000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000006d7f0754ffeb405d23c51ce938289d4835be3b1400000000000000000000000052201ff1720134bbbbb2f6bc97bf3715490ec19b000000000000000000000000ebf1a11532b93a529b5bc942b4baa98647913002000000000000000000000000ebe09eb3411d18f4ff8d859e096c533cac5c6b60000000000000000000000000d6801a1dffcd0a410336ef88def4320d6df1883e0000000000000000000000005b281a6dda0b271e91ae35de655ad301c976edb10000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000002448ee2641d78cc42d7ad76498917359d961a78300000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000006d7f0754ffeb405d23c51ce938289d4835be3b14000000000000000000000000000000000000000000000000000000000000012c0000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000012c0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000048e13085b1efda7283d958100e4cbfacb0ce5012000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000012c00000000000000000000000000000000000000000000000000000000000000020000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000" ), "to": "0x90c6e02ACC0fF725C0127fEAc32a53c4B10b03b7", }, "blockHash": "0x4c49052fc99be82b91f8a35320826304dfe278dbd7d756edde000d331606358f", "blockNumber": 4735890, "error": "Reverted", "subtraces": 1, "traceAddress": [0, 0], "transactionHash": "0xf097d5e5dd39a6799fc13dfa49732a115b457386520dc92f99f0135a1d196851", "transactionPosition": 3, "type": "call", }, { "action": { "from": "0x90c6e02ACC0fF725C0127fEAc32a53c4B10b03b7", "gas": 42978, "value": 0, "callType": "delegatecall", "input": HexBytes( "0x4f84885b000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000007600000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000022000000000000000000000000000000000000000000000000000000000000003200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006200000000000000000000000002eaa9d77ae4d8f9cdd9faacd44016e746485bddb00000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000006d7f0754ffeb405d23c51ce938289d4835be3b1400000000000000000000000052201ff1720134bbbbb2f6bc97bf3715490ec19b000000000000000000000000ebf1a11532b93a529b5bc942b4baa98647913002000000000000000000000000ebe09eb3411d18f4ff8d859e096c533cac5c6b60000000000000000000000000d6801a1dffcd0a410336ef88def4320d6df1883e0000000000000000000000005b281a6dda0b271e91ae35de655ad301c976edb10000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000002448ee2641d78cc42d7ad76498917359d961a78300000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000006d7f0754ffeb405d23c51ce938289d4835be3b14000000000000000000000000000000000000000000000000000000000000012c0000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000012c0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000048e13085b1efda7283d958100e4cbfacb0ce5012000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000012c00000000000000000000000000000000000000000000000000000000000000020000000000000000000000001a32b1734d964b039320c7712aa65b43c826d4dd0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000" ), "to": "0x40e2688D8f60d4504b3Ea3bE4e8187CFF702b561", }, "blockHash": "0x4c49052fc99be82b91f8a35320826304dfe278dbd7d756edde000d331606358f", "blockNumber": 4735890, "error": "Reverted", "subtraces": 1, "traceAddress": [0, 0, 0], "transactionHash": "0xf097d5e5dd39a6799fc13dfa49732a115b457386520dc92f99f0135a1d196851", "transactionPosition": 3, "type": "call", }, { "action": { "from": "0x90c6e02ACC0fF725C0127fEAc32a53c4B10b03b7", "gas": 39238, "value": 0, "callType": "staticcall", "input": HexBytes( "0x2f54bf6e00000000000000000000000047f61944efdb020829caead65aff8ac024600580" ), "to": "0x47F61944efdB020829caead65AfF8AC024600580", }, "blockHash": "0x4c49052fc99be82b91f8a35320826304dfe278dbd7d756edde000d331606358f", "blockNumber": 4735890, "result": { "gasUsed": 1793, "output": HexBytes( "0x0000000000000000000000000000000000000000000000000000000000000000" ), }, "subtraces": 1, "traceAddress": [0, 0, 0, 0], "transactionHash": "0xf097d5e5dd39a6799fc13dfa49732a115b457386520dc92f99f0135a1d196851", "transactionPosition": 3, "type": "call", }, { "action": { "from": "0x47F61944efdB020829caead65AfF8AC024600580", "gas": 37680, "value": 0, "callType": "delegatecall", "input": HexBytes( "0x2f54bf6e00000000000000000000000047f61944efdb020829caead65aff8ac024600580" ), "to": "0xb6029EA3B2c51D09a50B53CA8012FeEB05bDa35A", }, "blockHash": "0x4c49052fc99be82b91f8a35320826304dfe278dbd7d756edde000d331606358f", "blockNumber": 4735890, "result": { "gasUsed": 788, "output": HexBytes( "0x0000000000000000000000000000000000000000000000000000000000000000" ), }, "subtraces": 0, "traceAddress": [0, 0, 0, 0, 0], "transactionHash": "0xf097d5e5dd39a6799fc13dfa49732a115b457386520dc92f99f0135a1d196851", "transactionPosition": 3, "type": "call", }, ] creation_internal_txs = [ { "action": { "from": "0xcA9b328e2dc54cf4d92dc5eB4af8D8CCfa30D28c", "gas": 301835, "value": 0, "init": HexBytes( "0x608060405234801561001057600080fd5b5060405161060a38038061060a833981018060405281019080805190602001909291908051820192919060200180519060200190929190805190602001909291908051906020019092919050505084848160008173ffffffffffffffffffffffffffffffffffffffff1614151515610116576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260248152602001807f496e76616c6964206d617374657220636f707920616464726573732070726f7681526020017f696465640000000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b806000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550506000815111156101a35773ffffffffffffffffffffffffffffffffffffffff60005416600080835160208501846127105a03f46040513d6000823e600082141561019f573d81fd5b5050505b5050600081111561036d57600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614156102b7578273ffffffffffffffffffffffffffffffffffffffff166108fc829081150290604051600060405180830381858888f1935050505015156102b2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f206574686572000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b61036c565b6102d1828483610377640100000000026401000000009004565b151561036b576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f20746f6b656e000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b5b5b5050505050610490565b600060608383604051602401808373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001828152602001925050506040516020818303038152906040527fa9059cbb000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff838183161783525050505090506000808251602084016000896127105a03f16040513d6000823e3d60008114610473576020811461047b5760009450610485565b829450610485565b8151158315171594505b505050509392505050565b61016b8061049f6000396000f30060806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d0410800290000000000000000000000008942595a2dc5181df0465af0d7be08c8f23c93af00000000000000000000000000000000000000000000000000000000000000a000000000000000000000000007f455f30e862e13e3e3d960762cb11c4f744d5200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010ee50b34f319300000000000000000000000000000000000000000000000000000000000001640ec78d9e00000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000d3c45affc8dc980b38f699dae37f0c9e15f60167000000000000000000000000b16b692679532b6f6836182ca668e5be9c5dff4e00000000000000000000000008f8c46f9f71e301ba41f59c253c412f1a129dad000000000000000000000000fd311da6e2304e01a2c1e0768e6b4fa18a6ee6d90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" ), }, "blockHash": "0x14113f6fe5c2f017a027f19e5a4f8cd3ae8d76c138b50d3137a90ee7752734b2", "blockNumber": 7567042, "result": { "gasUsed": 288831, "code": HexBytes( "0x60806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d041080029" ), "address": "0x84443F61efc60D10DA9F9a2398980CD5748394BB", }, "subtraces": 2, "traceAddress": [], "transactionHash": "0xa77bdf0ad7b96aadd332257af6557ee2fa760c0c6e7338c5f7dce9963d66a3e8", "transactionPosition": 23, "type": "create", }, { "action": { "from": "0x84443F61efc60D10DA9F9a2398980CD5748394BB", "gas": 270906, "value": 0, "callType": "delegatecall", "input": HexBytes( "0x0ec78d9e00000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000d3c45affc8dc980b38f699dae37f0c9e15f60167000000000000000000000000b16b692679532b6f6836182ca668e5be9c5dff4e00000000000000000000000008f8c46f9f71e301ba41f59c253c412f1a129dad000000000000000000000000fd311da6e2304e01a2c1e0768e6b4fa18a6ee6d900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" ), "to": "0x8942595A2dC5181Df0465AF0D7be08c8f23C93af", }, "blockHash": "0x14113f6fe5c2f017a027f19e5a4f8cd3ae8d76c138b50d3137a90ee7752734b2", "blockNumber": 7567042, "result": {"gasUsed": 186895, "output": HexBytes("0x")}, "subtraces": 0, "traceAddress": [0], "transactionHash": "0xa77bdf0ad7b96aadd332257af6557ee2fa760c0c6e7338c5f7dce9963d66a3e8", "transactionPosition": 23, "type": "call", }, { "action": { "from": "0x84443F61efc60D10DA9F9a2398980CD5748394BB", "gas": 2300, "value": 4765630000476563, "callType": "call", "input": HexBytes("0x"), "to": "0x07F455F30e862E13E3E3D960762cB11c4F744d52", }, "blockHash": "0x14113f6fe5c2f017a027f19e5a4f8cd3ae8d76c138b50d3137a90ee7752734b2", "blockNumber": 7567042, "result": {"gasUsed": 0, "output": HexBytes("0x")}, "subtraces": 0, "traceAddress": [1], "transactionHash": "0xa77bdf0ad7b96aadd332257af6557ee2fa760c0c6e7338c5f7dce9963d66a3e8", "transactionPosition": 23, "type": "call", }, { "action": { "from": "0x84443F61efc60D10DA9F9a2398980CD5748394BB", "gas": 2300, "value": 4765630000476563, "callType": "call", "input": HexBytes("0x"), "to": "0x07F455F30e862E13E3E3D960762cB11c4F744d52", }, "blockHash": "0x14113f6fe5c2f017a027f19e5a4f8cd3ae8d76c138b50d3137a90ee7752734b2", "blockNumber": 7567042, "result": {"gasUsed": 0, "output": HexBytes("0x")}, "subtraces": 0, "traceAddress": [1, 0], "transactionHash": "0xa77bdf0ad7b96aadd332257af6557ee2fa760c0c6e7338c5f7dce9963d66a3e8", "transactionPosition": 23, "type": "call", }, ]
137.307018
4,956
0.888871
349
31,306
79.722063
0.217765
0.003594
0.003522
0.004529
0.130036
0.125759
0.105273
0.093915
0.07235
0.07235
0
0.809875
0.080655
31,306
227
4,957
137.911894
0.15684
0
0
0.671111
0
0
0.874657
0.829362
0
1
0.829362
0
0
1
0
false
0
0.004444
0
0.004444
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
1
1
0
0
0
0
0
1
1
null
1
1
0
0
0
0
0
0
0
0
0
0
0
9
7f7bbee887437d9defb8472e28c5da5a67fcedc7
185
py
Python
python/ctranslate2/converters/__init__.py
scotfang/CTranslate2
6f1bfb229a7785987d632947f373e145ed601e05
[ "MIT" ]
null
null
null
python/ctranslate2/converters/__init__.py
scotfang/CTranslate2
6f1bfb229a7785987d632947f373e145ed601e05
[ "MIT" ]
null
null
null
python/ctranslate2/converters/__init__.py
scotfang/CTranslate2
6f1bfb229a7785987d632947f373e145ed601e05
[ "MIT" ]
null
null
null
from ctranslate2.converters.converter import Converter from ctranslate2.converters.opennmt_py import OpenNMTPyConverter from ctranslate2.converters.opennmt_tf import OpenNMTTFConverter
46.25
64
0.902703
20
185
8.25
0.5
0.272727
0.454545
0.387879
0
0
0
0
0
0
0
0.017341
0.064865
185
3
65
61.666667
0.936416
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
7f7d6a287f24e5a730eabe1a87eb5ab59b645782
22,089
py
Python
python/speech/python-client/swagger_client/api/voice_models_api.py
harvinchou/cognitive-services-test
51a342bf4e05871e0d04bc5e1cdf714c7c2701ac
[ "MIT" ]
null
null
null
python/speech/python-client/swagger_client/api/voice_models_api.py
harvinchou/cognitive-services-test
51a342bf4e05871e0d04bc5e1cdf714c7c2701ac
[ "MIT" ]
null
null
null
python/speech/python-client/swagger_client/api/voice_models_api.py
harvinchou/cognitive-services-test
51a342bf4e05871e0d04bc5e1cdf714c7c2701ac
[ "MIT" ]
null
null
null
# coding: utf-8 """ Speech Services API v2.0 Speech Services API v2.0. # noqa: E501 OpenAPI spec version: v2.0 Contact: crservice@microsoft.com Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from swagger_client.api_client import ApiClient class VoiceModelsApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def create_voice_model(self, model_definition, **kwargs): # noqa: E501 """Creates a new voice model object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_voice_model(model_definition, async_req=True) >>> result = thread.get() :param async_req bool :param IModelDefinitionV2 model_definition: (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_voice_model_with_http_info(model_definition, **kwargs) # noqa: E501 else: (data) = self.create_voice_model_with_http_info(model_definition, **kwargs) # noqa: E501 return data def create_voice_model_with_http_info(self, model_definition, **kwargs): # noqa: E501 """Creates a new voice model object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_voice_model_with_http_info(model_definition, async_req=True) >>> result = thread.get() :param async_req bool :param IModelDefinitionV2 model_definition: (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['model_definition'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_voice_model" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'model_definition' is set if ('model_definition' not in params or params['model_definition'] is None): raise ValueError("Missing the required parameter `model_definition` when calling `create_voice_model`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'model_definition' in params: body_params = params['model_definition'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['subscription_key', 'token'] # noqa: E501 return self.api_client.call_api( '/api/texttospeech/v2.0/models', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_voice_model(self, id, **kwargs): # noqa: E501 """Deletes the voice model with the given id. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_voice_model(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: The identifier of the voice model. (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_voice_model_with_http_info(id, **kwargs) # noqa: E501 else: (data) = self.delete_voice_model_with_http_info(id, **kwargs) # noqa: E501 return data def delete_voice_model_with_http_info(self, id, **kwargs): # noqa: E501 """Deletes the voice model with the given id. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_voice_model_with_http_info(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: The identifier of the voice model. (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_voice_model" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'id' is set if ('id' not in params or params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `delete_voice_model`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in params: path_params['id'] = params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['subscription_key', 'token'] # noqa: E501 return self.api_client.call_api( '/api/texttospeech/v2.0/models/{id}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_supported_locales_for_voice_models(self, **kwargs): # noqa: E501 """Gets a list of supported locales for custom voice models. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_supported_locales_for_voice_models(async_req=True) >>> result = thread.get() :param async_req bool :return: list[str] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_supported_locales_for_voice_models_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_supported_locales_for_voice_models_with_http_info(**kwargs) # noqa: E501 return data def get_supported_locales_for_voice_models_with_http_info(self, **kwargs): # noqa: E501 """Gets a list of supported locales for custom voice models. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_supported_locales_for_voice_models_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: list[str] If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_supported_locales_for_voice_models" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['subscription_key', 'token'] # noqa: E501 return self.api_client.call_api( '/api/texttospeech/v2.0/models/locales', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[str]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_voice_model(self, id, **kwargs): # noqa: E501 """Gets specified voice model details. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_voice_model(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: (required) :return: Model If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_voice_model_with_http_info(id, **kwargs) # noqa: E501 else: (data) = self.get_voice_model_with_http_info(id, **kwargs) # noqa: E501 return data def get_voice_model_with_http_info(self, id, **kwargs): # noqa: E501 """Gets specified voice model details. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_voice_model_with_http_info(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: (required) :return: Model If the method is called asynchronously, returns the request thread. """ all_params = ['id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_voice_model" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'id' is set if ('id' not in params or params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `get_voice_model`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in params: path_params['id'] = params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['subscription_key', 'token'] # noqa: E501 return self.api_client.call_api( '/api/texttospeech/v2.0/models/{id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Model', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_voice_models(self, **kwargs): # noqa: E501 """Gets a list of voice model details. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_voice_models(async_req=True) >>> result = thread.get() :param async_req bool :return: list[Model] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_voice_models_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_voice_models_with_http_info(**kwargs) # noqa: E501 return data def get_voice_models_with_http_info(self, **kwargs): # noqa: E501 """Gets a list of voice model details. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_voice_models_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: list[Model] If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_voice_models" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['subscription_key', 'token'] # noqa: E501 return self.api_client.call_api( '/api/texttospeech/v2.0/models', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[Model]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_voice_model(self, id, model_update, **kwargs): # noqa: E501 """Updates the metadata of the voice model identified by the given ID. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_voice_model(id, model_update, async_req=True) >>> result = thread.get() :param async_req bool :param str id: The identifier of the voice model. (required) :param ModelUpdate model_update: The updated values for the voice model. (required) :return: Model If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_voice_model_with_http_info(id, model_update, **kwargs) # noqa: E501 else: (data) = self.update_voice_model_with_http_info(id, model_update, **kwargs) # noqa: E501 return data def update_voice_model_with_http_info(self, id, model_update, **kwargs): # noqa: E501 """Updates the metadata of the voice model identified by the given ID. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_voice_model_with_http_info(id, model_update, async_req=True) >>> result = thread.get() :param async_req bool :param str id: The identifier of the voice model. (required) :param ModelUpdate model_update: The updated values for the voice model. (required) :return: Model If the method is called asynchronously, returns the request thread. """ all_params = ['id', 'model_update'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_voice_model" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'id' is set if ('id' not in params or params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `update_voice_model`") # noqa: E501 # verify the required parameter 'model_update' is set if ('model_update' not in params or params['model_update'] is None): raise ValueError("Missing the required parameter `model_update` when calling `update_voice_model`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in params: path_params['id'] = params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'model_update' in params: body_params = params['model_update'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['subscription_key', 'token'] # noqa: E501 return self.api_client.call_api( '/api/texttospeech/v2.0/models/{id}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Model', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
37.758974
129
0.606592
2,571
22,089
4.94788
0.068067
0.044651
0.026413
0.03396
0.950318
0.941828
0.925556
0.911957
0.909362
0.898593
0
0.015525
0.303047
22,089
584
130
37.82363
0.810783
0.316175
0
0.78481
1
0
0.179006
0.045181
0
0
0
0
0
1
0.041139
false
0
0.012658
0
0.113924
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
7f876d55316d943107b2195873b36e81403b9c05
1,433
py
Python
backend/auth1/serializers.py
NurlykhanKairly/bus-for-everyone
04f57d73ceeeef0e90e969683409ac177f20955b
[ "MIT" ]
null
null
null
backend/auth1/serializers.py
NurlykhanKairly/bus-for-everyone
04f57d73ceeeef0e90e969683409ac177f20955b
[ "MIT" ]
null
null
null
backend/auth1/serializers.py
NurlykhanKairly/bus-for-everyone
04f57d73ceeeef0e90e969683409ac177f20955b
[ "MIT" ]
1
2021-09-08T04:42:23.000Z
2021-09-08T04:42:23.000Z
from rest_framework import serializers from auth1.models import * from django.db import transaction import logging class UserRegisterSerializer(serializers.ModelSerializer): password = serializers.CharField(write_only=True) class Meta: model = User fields = ('username', 'email', 'password', 'token') def create(self, validated_data): user = User.objects.create_user(**validated_data) return user class UserSerializer(serializers.ModelSerializer): password = serializers.CharField(write_only=True) class Meta: model = User fields = ('username', 'email', 'password') class DriverGetSerializer(serializers.ModelSerializer): password = serializers.CharField(write_only=True) class Meta: model = User fields = UserSerializer.Meta.fields + ('years',) read_only_fields = UserSerializer.Meta.fields + ('years',) def create(self, validated_data): user = User.objects.create_user(**validated_data) return user class PassengerGetSerializer(serializers.ModelSerializer): password = serializers.CharField(write_only=True) class Meta: model = User fields = UserSerializer.Meta.fields + ('isSpecial',) read_only_fields = UserSerializer.Meta.fields + ('isSpecial',) def create(self, validated_data): user = User.objects.create_user(**validated_data) return user
28.66
70
0.695743
147
1,433
6.659864
0.265306
0.079673
0.138917
0.183861
0.806946
0.778345
0.700715
0.700715
0.700715
0.700715
0
0.000879
0.205862
1,433
50
71
28.66
0.859402
0
0
0.6
0
0
0.052301
0
0
0
0
0
0
1
0.085714
false
0.2
0.114286
0
0.628571
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
9
f695957281eece26b2895737be3a430f9cae8b75
3,972
py
Python
tests/test_setImp.py
EthanArbuckle/TweakInspect
aae567b3b67df3a062ca2521b04d1d997dfe54b2
[ "Unlicense" ]
9
2022-03-03T17:09:29.000Z
2022-03-31T15:49:06.000Z
tests/test_setImp.py
EthanArbuckle/TweakInspect
aae567b3b67df3a062ca2521b04d1d997dfe54b2
[ "Unlicense" ]
null
null
null
tests/test_setImp.py
EthanArbuckle/TweakInspect
aae567b3b67df3a062ca2521b04d1d997dfe54b2
[ "Unlicense" ]
null
null
null
from tweakinspect.executable import Executable from tests.compiler import SnippetCompiler class TestSetImplementation: def test_one_hook_no_args_nsselectorfromstring(self) -> None: source_code = """ #import <Foundation/Foundation.h> void new_viewDidLoad(id _self, SEL __cmd) {} %ctor { Class viewClass = objc_getClass("UIView"); Method methodToHook = class_getInstanceMethod(viewClass, NSSelectorFromString(@"viewDidLoad")); method_setImplementation(methodToHook, (IMP)new_viewDidLoad); } """ with SnippetCompiler(source_code=source_code, generator="internal") as compiled_binary: exec = Executable(file_path=compiled_binary) assert exec.get_hooks() == ["%hook [UIView viewDidLoad]"] def test_one_hook_no_args_sel_registername(self) -> None: source_code = """ void new_method(id _self, SEL __cmd) {} %ctor { Class viewClass = objc_getClass("UIView"); Method methodToHook = class_getInstanceMethod(viewClass, sel_registerName("removeFromSuperview")); method_setImplementation(methodToHook, (IMP)new_method); } """ with SnippetCompiler(source_code=source_code, generator="internal") as compiled_binary: exec = Executable(file_path=compiled_binary) assert exec.get_hooks() == ["%hook [UIView removeFromSuperview]"] def test_multiple_hooks_no_args_nsselectorfromstring(self) -> None: source_code = """ #import <Foundation/Foundation.h> void new_viewDidLoad(id _self, SEL __cmd) {} void new_removeFromSuperview(id _self, SEL __cmd) {} %ctor { Class viewClass = objc_getClass("UIView"); Method methodToHook = class_getInstanceMethod(viewClass, NSSelectorFromString(@"viewDidLoad")); method_setImplementation(methodToHook, (IMP)new_viewDidLoad); Method methodToHook2 = class_getInstanceMethod(viewClass, NSSelectorFromString(@"removeFromSuperview")); method_setImplementation(methodToHook2, (IMP)new_removeFromSuperview); Class SBClass = objc_getClass("SpringBoard"); methodToHook = class_getInstanceMethod(SBClass, NSSelectorFromString(@"init")); method_setImplementation(methodToHook, (IMP)new_removeFromSuperview); } """ with SnippetCompiler(source_code=source_code, generator="internal") as compiled_binary: exec = Executable(file_path=compiled_binary) assert set(exec.get_hooks()) == set( ["%hook [UIView viewDidLoad]", "%hook [UIView removeFromSuperview]", "%hook [SpringBoard init]"] ) def test_multiple_hooks_no_args_selregistername(self) -> None: source_code = """ void new_viewDidLoad(id _self, SEL __cmd) {} void new_removeFromSuperview(id _self, SEL __cmd) {} %ctor { Class viewClass = objc_getClass("UIView"); Method methodToHook = class_getInstanceMethod(viewClass, sel_registerName("viewDidLoad")); method_setImplementation(methodToHook, (IMP)new_viewDidLoad); Method methodToHook2 = class_getInstanceMethod(viewClass, sel_registerName("removeFromSuperview")); method_setImplementation(methodToHook2, (IMP)new_removeFromSuperview); Class SBClass = objc_getClass("SpringBoard"); methodToHook = class_getInstanceMethod(SBClass, sel_registerName("init")); method_setImplementation(methodToHook, (IMP)new_removeFromSuperview); } """ with SnippetCompiler(source_code=source_code, generator="internal") as compiled_binary: exec = Executable(file_path=compiled_binary) assert set(exec.get_hooks()) == set( ["%hook [UIView viewDidLoad]", "%hook [UIView removeFromSuperview]", "%hook [SpringBoard init]"] )
49.65
116
0.672457
359
3,972
7.142061
0.164345
0.046802
0.021061
0.028081
0.909126
0.902106
0.854134
0.854134
0.831903
0.831903
0
0.001306
0.229104
3,972
79
117
50.278481
0.836055
0
0
0.666667
0
0
0.657603
0.329809
0
0
0
0
0.057971
1
0.057971
false
0
0.057971
0
0.130435
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
f6b2354ca0730cc559376e4f20706a7ba6f0639a
84,613
py
Python
src/conductor/client/http/api/workflow_resource_api.py
conductor-sdk/conductor-python
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
[ "Apache-2.0" ]
3
2022-03-10T18:24:46.000Z
2022-03-22T20:49:30.000Z
src/conductor/client/http/api/workflow_resource_api.py
conductor-sdk/conductor-python
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
[ "Apache-2.0" ]
6
2022-03-08T17:48:28.000Z
2022-03-30T00:39:22.000Z
src/conductor/client/http/api/workflow_resource_api.py
conductor-sdk/conductor-python
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
[ "Apache-2.0" ]
null
null
null
from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from conductor.client.http.api_client import ApiClient class WorkflowResourceApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def decide(self, workflow_id, **kwargs): # noqa: E501 """Starts the decision task for a workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.decide(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.decide_with_http_info(workflow_id, **kwargs) # noqa: E501 else: (data) = self.decide_with_http_info(workflow_id, **kwargs) # noqa: E501 return data def decide_with_http_info(self, workflow_id, **kwargs): # noqa: E501 """Starts the decision task for a workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.decide_with_http_info(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['workflow_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method decide" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `decide`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/decide/{workflowId}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete(self, workflow_id, **kwargs): # noqa: E501 """Removes the workflow from the system # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param bool archive_workflow: :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_with_http_info(workflow_id, **kwargs) # noqa: E501 else: (data) = self.delete_with_http_info(workflow_id, **kwargs) # noqa: E501 return data def delete_with_http_info(self, workflow_id, **kwargs): # noqa: E501 """Removes the workflow from the system # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_with_http_info(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param bool archive_workflow: :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['workflow_id', 'archive_workflow'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `delete`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 query_params = [] if 'archive_workflow' in params: query_params.append(('archiveWorkflow', params['archive_workflow'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{workflowId}/remove', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_execution_status(self, workflow_id, **kwargs): # noqa: E501 """Gets the workflow by workflow id # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_execution_status(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param bool include_tasks: :return: Workflow If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_execution_status_with_http_info(workflow_id, **kwargs) # noqa: E501 else: (data) = self.get_execution_status_with_http_info(workflow_id, **kwargs) # noqa: E501 return data def get_execution_status_with_http_info(self, workflow_id, **kwargs): # noqa: E501 """Gets the workflow by workflow id # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_execution_status_with_http_info(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param bool include_tasks: :return: Workflow If the method is called asynchronously, returns the request thread. """ all_params = ['workflow_id', 'include_tasks'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_execution_status" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `get_execution_status`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 query_params = [] if 'include_tasks' in params: query_params.append(('includeTasks', params['include_tasks'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{workflowId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Workflow', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_external_storage_location(self, path, operation, payload_type, **kwargs): # noqa: E501 """Get the uri and path of the external storage where the workflow payload is to be stored # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_external_storage_location(path, operation, payload_type, async_req=True) >>> result = thread.get() :param async_req bool :param str path: (required) :param str operation: (required) :param str payload_type: (required) :return: ExternalStorageLocation If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_external_storage_location_with_http_info(path, operation, payload_type, **kwargs) # noqa: E501 else: (data) = self.get_external_storage_location_with_http_info(path, operation, payload_type, **kwargs) # noqa: E501 return data def get_external_storage_location_with_http_info(self, path, operation, payload_type, **kwargs): # noqa: E501 """Get the uri and path of the external storage where the workflow payload is to be stored # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_external_storage_location_with_http_info(path, operation, payload_type, async_req=True) >>> result = thread.get() :param async_req bool :param str path: (required) :param str operation: (required) :param str payload_type: (required) :return: ExternalStorageLocation If the method is called asynchronously, returns the request thread. """ all_params = ['path', 'operation', 'payload_type'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_external_storage_location" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'path' is set if ('path' not in params or params['path'] is None): raise ValueError("Missing the required parameter `path` when calling `get_external_storage_location`") # noqa: E501 # verify the required parameter 'operation' is set if ('operation' not in params or params['operation'] is None): raise ValueError("Missing the required parameter `operation` when calling `get_external_storage_location`") # noqa: E501 # verify the required parameter 'payload_type' is set if ('payload_type' not in params or params['payload_type'] is None): raise ValueError("Missing the required parameter `payload_type` when calling `get_external_storage_location`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'path' in params: query_params.append(('path', params['path'])) # noqa: E501 if 'operation' in params: query_params.append(('operation', params['operation'])) # noqa: E501 if 'payload_type' in params: query_params.append(('payloadType', params['payload_type'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/externalstoragelocation', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ExternalStorageLocation', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_running_workflow(self, name, **kwargs): # noqa: E501 """Retrieve all the running workflows # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_running_workflow(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: (required) :param int version: :param int start_time: :param int end_time: :return: list[str] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_running_workflow_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.get_running_workflow_with_http_info(name, **kwargs) # noqa: E501 return data def get_running_workflow_with_http_info(self, name, **kwargs): # noqa: E501 """Retrieve all the running workflows # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_running_workflow_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: (required) :param int version: :param int start_time: :param int end_time: :return: list[str] If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'version', 'start_time', 'end_time'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_running_workflow" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `get_running_workflow`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'version' in params: query_params.append(('version', params['version'])) # noqa: E501 if 'start_time' in params: query_params.append(('startTime', params['start_time'])) # noqa: E501 if 'end_time' in params: query_params.append(('endTime', params['end_time'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/running/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[str]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_workflows(self, body, name, **kwargs): # noqa: E501 """Lists workflows for the given correlation id list # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_workflows(body, name, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :param str name: (required) :param bool include_closed: :param bool include_tasks: :return: dict(str, list[Workflow]) If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_workflows_with_http_info(body, name, **kwargs) # noqa: E501 else: (data) = self.get_workflows_with_http_info(body, name, **kwargs) # noqa: E501 return data def get_workflows_with_http_info(self, body, name, **kwargs): # noqa: E501 """Lists workflows for the given correlation id list # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_workflows_with_http_info(body, name, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :param str name: (required) :param bool include_closed: :param bool include_tasks: :return: dict(str, list[Workflow]) If the method is called asynchronously, returns the request thread. """ all_params = ['body', 'name', 'include_closed', 'include_tasks'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_workflows" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `get_workflows`") # noqa: E501 # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `get_workflows`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'include_closed' in params: query_params.append(('includeClosed', params['include_closed'])) # noqa: E501 if 'include_tasks' in params: query_params.append(('includeTasks', params['include_tasks'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{name}/correlated', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='dict(str, list[Workflow])', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_workflows1(self, name, correlation_id, **kwargs): # noqa: E501 """Lists workflows for the given correlation id # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_workflows1(name, correlation_id, async_req=True) >>> result = thread.get() :param async_req bool :param str name: (required) :param str correlation_id: (required) :param bool include_closed: :param bool include_tasks: :return: list[Workflow] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_workflows1_with_http_info(name, correlation_id, **kwargs) # noqa: E501 else: (data) = self.get_workflows1_with_http_info(name, correlation_id, **kwargs) # noqa: E501 return data def get_workflows1_with_http_info(self, name, correlation_id, **kwargs): # noqa: E501 """Lists workflows for the given correlation id # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_workflows1_with_http_info(name, correlation_id, async_req=True) >>> result = thread.get() :param async_req bool :param str name: (required) :param str correlation_id: (required) :param bool include_closed: :param bool include_tasks: :return: list[Workflow] If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'correlation_id', 'include_closed', 'include_tasks'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_workflows1" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `get_workflows1`") # noqa: E501 # verify the required parameter 'correlation_id' is set if ('correlation_id' not in params or params['correlation_id'] is None): raise ValueError("Missing the required parameter `correlation_id` when calling `get_workflows1`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 if 'correlation_id' in params: path_params['correlationId'] = params['correlation_id'] # noqa: E501 query_params = [] if 'include_closed' in params: query_params.append(('includeClosed', params['include_closed'])) # noqa: E501 if 'include_tasks' in params: query_params.append(('includeTasks', params['include_tasks'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{name}/correlated/{correlationId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[Workflow]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def pause_workflow(self, workflow_id, **kwargs): # noqa: E501 """Pauses the workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.pause_workflow(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.pause_workflow_with_http_info(workflow_id, **kwargs) # noqa: E501 else: (data) = self.pause_workflow_with_http_info(workflow_id, **kwargs) # noqa: E501 return data def pause_workflow_with_http_info(self, workflow_id, **kwargs): # noqa: E501 """Pauses the workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.pause_workflow_with_http_info(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['workflow_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method pause_workflow" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `pause_workflow`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{workflowId}/pause', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def rerun(self, body, workflow_id, **kwargs): # noqa: E501 """Reruns the workflow from a specific task # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.rerun(body, workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param RerunWorkflowRequest body: (required) :param str workflow_id: (required) :return: str If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.rerun_with_http_info(body, workflow_id, **kwargs) # noqa: E501 else: (data) = self.rerun_with_http_info(body, workflow_id, **kwargs) # noqa: E501 return data def rerun_with_http_info(self, body, workflow_id, **kwargs): # noqa: E501 """Reruns the workflow from a specific task # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.rerun_with_http_info(body, workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param RerunWorkflowRequest body: (required) :param str workflow_id: (required) :return: str If the method is called asynchronously, returns the request thread. """ all_params = ['body', 'workflow_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method rerun" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `rerun`") # noqa: E501 # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `rerun`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['text/plain']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{workflowId}/rerun', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='str', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def reset_workflow(self, workflow_id, **kwargs): # noqa: E501 """Resets callback times of all non-terminal SIMPLE tasks to 0 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.reset_workflow(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.reset_workflow_with_http_info(workflow_id, **kwargs) # noqa: E501 else: (data) = self.reset_workflow_with_http_info(workflow_id, **kwargs) # noqa: E501 return data def reset_workflow_with_http_info(self, workflow_id, **kwargs): # noqa: E501 """Resets callback times of all non-terminal SIMPLE tasks to 0 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.reset_workflow_with_http_info(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['workflow_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method reset_workflow" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `reset_workflow`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{workflowId}/resetcallbacks', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def restart(self, workflow_id, **kwargs): # noqa: E501 """Restarts a completed workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.restart(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param bool use_latest_definitions: :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.restart_with_http_info(workflow_id, **kwargs) # noqa: E501 else: (data) = self.restart_with_http_info(workflow_id, **kwargs) # noqa: E501 return data def restart_with_http_info(self, workflow_id, **kwargs): # noqa: E501 """Restarts a completed workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.restart_with_http_info(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param bool use_latest_definitions: :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['workflow_id', 'use_latest_definitions'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method restart" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `restart`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 query_params = [] if 'use_latest_definitions' in params: query_params.append(('useLatestDefinitions', params['use_latest_definitions'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{workflowId}/restart', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def resume_workflow(self, workflow_id, **kwargs): # noqa: E501 """Resumes the workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume_workflow(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.resume_workflow_with_http_info(workflow_id, **kwargs) # noqa: E501 else: (data) = self.resume_workflow_with_http_info(workflow_id, **kwargs) # noqa: E501 return data def resume_workflow_with_http_info(self, workflow_id, **kwargs): # noqa: E501 """Resumes the workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume_workflow_with_http_info(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['workflow_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method resume_workflow" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `resume_workflow`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{workflowId}/resume', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def retry(self, workflow_id, **kwargs): # noqa: E501 """Retries the last failed task # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.retry(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param bool resume_subworkflow_tasks: :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.retry_with_http_info(workflow_id, **kwargs) # noqa: E501 else: (data) = self.retry_with_http_info(workflow_id, **kwargs) # noqa: E501 return data def retry_with_http_info(self, workflow_id, **kwargs): # noqa: E501 """Retries the last failed task # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.retry_with_http_info(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param bool resume_subworkflow_tasks: :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['workflow_id', 'resume_subworkflow_tasks'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method retry" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `retry`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 query_params = [] if 'resume_subworkflow_tasks' in params: query_params.append(('resumeSubworkflowTasks', params['resume_subworkflow_tasks'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{workflowId}/retry', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def search(self, **kwargs): # noqa: E501 """Search for workflows based on payload and other parameters # noqa: E501 use sort options as sort=<field>:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search(async_req=True) >>> result = thread.get() :param async_req bool :param int start: :param int size: :param str sort: :param str free_text: :param str query: :return: SearchResultWorkflowSummary If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.search_with_http_info(**kwargs) # noqa: E501 else: (data) = self.search_with_http_info(**kwargs) # noqa: E501 return data def search_with_http_info(self, **kwargs): # noqa: E501 """Search for workflows based on payload and other parameters # noqa: E501 use sort options as sort=<field>:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param int start: :param int size: :param str sort: :param str free_text: :param str query: :return: SearchResultWorkflowSummary If the method is called asynchronously, returns the request thread. """ all_params = ['start', 'size', 'sort', 'free_text', 'query'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method search" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'start' in params: query_params.append(('start', params['start'])) # noqa: E501 if 'size' in params: query_params.append(('size', params['size'])) # noqa: E501 if 'sort' in params: query_params.append(('sort', params['sort'])) # noqa: E501 if 'free_text' in params: query_params.append(('freeText', params['free_text'])) # noqa: E501 if 'query' in params: query_params.append(('query', params['query'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/search', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SearchResultWorkflowSummary', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def search_v2(self, **kwargs): # noqa: E501 """Search for workflows based on payload and other parameters # noqa: E501 use sort options as sort=<field>:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_v2(async_req=True) >>> result = thread.get() :param async_req bool :param int start: :param int size: :param str sort: :param str free_text: :param str query: :return: SearchResultWorkflow If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.search_v2_with_http_info(**kwargs) # noqa: E501 else: (data) = self.search_v2_with_http_info(**kwargs) # noqa: E501 return data def search_v2_with_http_info(self, **kwargs): # noqa: E501 """Search for workflows based on payload and other parameters # noqa: E501 use sort options as sort=<field>:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_v2_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param int start: :param int size: :param str sort: :param str free_text: :param str query: :return: SearchResultWorkflow If the method is called asynchronously, returns the request thread. """ all_params = ['start', 'size', 'sort', 'free_text', 'query'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method search_v2" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'start' in params: query_params.append(('start', params['start'])) # noqa: E501 if 'size' in params: query_params.append(('size', params['size'])) # noqa: E501 if 'sort' in params: query_params.append(('sort', params['sort'])) # noqa: E501 if 'free_text' in params: query_params.append(('freeText', params['free_text'])) # noqa: E501 if 'query' in params: query_params.append(('query', params['query'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/search-v2', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SearchResultWorkflow', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def search_workflows_by_tasks(self, **kwargs): # noqa: E501 """Search for workflows based on task parameters # noqa: E501 use sort options as sort=<field>:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_workflows_by_tasks(async_req=True) >>> result = thread.get() :param async_req bool :param int start: :param int size: :param str sort: :param str free_text: :param str query: :return: SearchResultWorkflowSummary If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.search_workflows_by_tasks_with_http_info(**kwargs) # noqa: E501 else: (data) = self.search_workflows_by_tasks_with_http_info(**kwargs) # noqa: E501 return data def search_workflows_by_tasks_with_http_info(self, **kwargs): # noqa: E501 """Search for workflows based on task parameters # noqa: E501 use sort options as sort=<field>:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_workflows_by_tasks_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param int start: :param int size: :param str sort: :param str free_text: :param str query: :return: SearchResultWorkflowSummary If the method is called asynchronously, returns the request thread. """ all_params = ['start', 'size', 'sort', 'free_text', 'query'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method search_workflows_by_tasks" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'start' in params: query_params.append(('start', params['start'])) # noqa: E501 if 'size' in params: query_params.append(('size', params['size'])) # noqa: E501 if 'sort' in params: query_params.append(('sort', params['sort'])) # noqa: E501 if 'free_text' in params: query_params.append(('freeText', params['free_text'])) # noqa: E501 if 'query' in params: query_params.append(('query', params['query'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/search-by-tasks', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SearchResultWorkflowSummary', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def search_workflows_by_tasks_v2(self, **kwargs): # noqa: E501 """Search for workflows based on task parameters # noqa: E501 use sort options as sort=<field>:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_workflows_by_tasks_v2(async_req=True) >>> result = thread.get() :param async_req bool :param int start: :param int size: :param str sort: :param str free_text: :param str query: :return: SearchResultWorkflow If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.search_workflows_by_tasks_v2_with_http_info(**kwargs) # noqa: E501 else: (data) = self.search_workflows_by_tasks_v2_with_http_info(**kwargs) # noqa: E501 return data def search_workflows_by_tasks_v2_with_http_info(self, **kwargs): # noqa: E501 """Search for workflows based on task parameters # noqa: E501 use sort options as sort=<field>:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_workflows_by_tasks_v2_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param int start: :param int size: :param str sort: :param str free_text: :param str query: :return: SearchResultWorkflow If the method is called asynchronously, returns the request thread. """ all_params = ['start', 'size', 'sort', 'free_text', 'query'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method search_workflows_by_tasks_v2" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'start' in params: query_params.append(('start', params['start'])) # noqa: E501 if 'size' in params: query_params.append(('size', params['size'])) # noqa: E501 if 'sort' in params: query_params.append(('sort', params['sort'])) # noqa: E501 if 'free_text' in params: query_params.append(('freeText', params['free_text'])) # noqa: E501 if 'query' in params: query_params.append(('query', params['query'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/search-by-tasks-v2', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SearchResultWorkflow', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def skip_task_from_workflow(self, workflow_id, task_reference_name, skip_task_request, **kwargs): # noqa: E501 """Skips a given task from a current running workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.skip_task_from_workflow(workflow_id, task_reference_name, skip_task_request, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param str task_reference_name: (required) :param SkipTaskRequest skip_task_request: (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.skip_task_from_workflow_with_http_info(workflow_id, task_reference_name, skip_task_request, **kwargs) # noqa: E501 else: (data) = self.skip_task_from_workflow_with_http_info(workflow_id, task_reference_name, skip_task_request, **kwargs) # noqa: E501 return data def skip_task_from_workflow_with_http_info(self, workflow_id, task_reference_name, skip_task_request, **kwargs): # noqa: E501 """Skips a given task from a current running workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.skip_task_from_workflow_with_http_info(workflow_id, task_reference_name, skip_task_request, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param str task_reference_name: (required) :param SkipTaskRequest skip_task_request: (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['workflow_id', 'task_reference_name', 'skip_task_request'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method skip_task_from_workflow" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `skip_task_from_workflow`") # noqa: E501 # verify the required parameter 'task_reference_name' is set if ('task_reference_name' not in params or params['task_reference_name'] is None): raise ValueError("Missing the required parameter `task_reference_name` when calling `skip_task_from_workflow`") # noqa: E501 # verify the required parameter 'skip_task_request' is set if ('skip_task_request' not in params or params['skip_task_request'] is None): raise ValueError("Missing the required parameter `skip_task_request` when calling `skip_task_from_workflow`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 if 'task_reference_name' in params: path_params['taskReferenceName'] = params['task_reference_name'] # noqa: E501 query_params = [] if 'skip_task_request' in params: query_params.append(('skipTaskRequest', params['skip_task_request'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{workflowId}/skiptask/{taskReferenceName}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def start_workflow(self, body, name, **kwargs): # noqa: E501 """Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.start_workflow(body, name, async_req=True) >>> result = thread.get() :param async_req bool :param dict(str, object) body: (required) :param str name: (required) :param int version: :param str correlation_id: :param int priority: :return: str If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.start_workflow_with_http_info(body, name, **kwargs) # noqa: E501 else: (data) = self.start_workflow_with_http_info(body, name, **kwargs) # noqa: E501 return data def start_workflow_with_http_info(self, body, name, **kwargs): # noqa: E501 """Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.start_workflow_with_http_info(body, name, async_req=True) >>> result = thread.get() :param async_req bool :param dict(str, object) body: (required) :param str name: (required) :param int version: :param str correlation_id: :param int priority: :return: str If the method is called asynchronously, returns the request thread. """ all_params = ['body', 'name', 'version', 'correlation_id', 'priority'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method start_workflow" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `start_workflow`") # noqa: E501 # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `start_workflow`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'version' in params: query_params.append(('version', params['version'])) # noqa: E501 if 'correlation_id' in params: query_params.append(('correlationId', params['correlation_id'])) # noqa: E501 if 'priority' in params: query_params.append(('priority', params['priority'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['text/plain']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{name}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='str', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def start_workflow1(self, body, **kwargs): # noqa: E501 """Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.start_workflow1(body, async_req=True) >>> result = thread.get() :param async_req bool :param StartWorkflowRequest body: (required) :return: str If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.start_workflow1_with_http_info(body, **kwargs) # noqa: E501 else: (data) = self.start_workflow1_with_http_info(body, **kwargs) # noqa: E501 return data def start_workflow1_with_http_info(self, body, **kwargs): # noqa: E501 """Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.start_workflow1_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool :param StartWorkflowRequest body: (required) :return: str If the method is called asynchronously, returns the request thread. """ all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method start_workflow1" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `start_workflow1`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['text/plain']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='str', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def terminate1(self, workflow_id, **kwargs): # noqa: E501 """Terminate workflow execution # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.terminate1(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param str reason: :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.terminate1_with_http_info(workflow_id, **kwargs) # noqa: E501 else: (data) = self.terminate1_with_http_info(workflow_id, **kwargs) # noqa: E501 return data def terminate1_with_http_info(self, workflow_id, **kwargs): # noqa: E501 """Terminate workflow execution # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.terminate1_with_http_info(workflow_id, async_req=True) >>> result = thread.get() :param async_req bool :param str workflow_id: (required) :param str reason: :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['workflow_id', 'reason'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method terminate1" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'workflow_id' is set if ('workflow_id' not in params or params['workflow_id'] is None): raise ValueError("Missing the required parameter `workflow_id` when calling `terminate1`") # noqa: E501 collection_formats = {} path_params = {} if 'workflow_id' in params: path_params['workflowId'] = params['workflow_id'] # noqa: E501 query_params = [] if 'reason' in params: query_params.append(('reason', params['reason'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/{workflowId}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
39.631382
144
0.603134
9,678
84,613
5.028208
0.026968
0.04899
0.024166
0.031071
0.965292
0.952901
0.943283
0.938639
0.929351
0.925282
0
0.015985
0.303535
84,613
2,134
145
39.649953
0.809791
0.314467
0
0.799323
0
0
0.194698
0.048056
0
0
0
0
0
1
0.03641
false
0
0.003387
0
0.093988
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
f6b75b014f8ec52444b83df3cfe3b0ffd4fbabf4
51,963
py
Python
src/elarian/utils/generated/messaging_state_pb2.py
ElarianLtd/python-sdk
f603688dffba4b46c5a9f208a75b3dc3d75ed565
[ "MIT" ]
4
2021-05-27T23:15:21.000Z
2021-12-29T11:40:02.000Z
src/elarian/utils/generated/messaging_state_pb2.py
ElarianLtd/python-sdk
f603688dffba4b46c5a9f208a75b3dc3d75ed565
[ "MIT" ]
2
2021-05-20T05:51:02.000Z
2021-07-13T11:25:54.000Z
src/elarian/utils/generated/messaging_state_pb2.py
ElarianLtd/python-sdk
f603688dffba4b46c5a9f208a75b3dc3d75ed565
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: messaging_state.proto """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 import elarian.utils.generated.common_model_pb2 as common__model__pb2 import elarian.utils.generated.messaging_model_pb2 as messaging__model__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='messaging_state.proto', package='com.elarian.hera.proto', syntax='proto3', serialized_options=None, create_key=_descriptor._internal_create_key, serialized_pb=b'\n\x15messaging_state.proto\x12\x16\x63om.elarian.hera.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x12\x63ommon_model.proto\x1a\x15messaging_model.proto\"\xd4\x04\n\x0fReceivedMessage\x12?\n\x0f\x63ustomer_number\x18\x01 \x01(\x0b\x32&.com.elarian.hera.proto.CustomerNumber\x12\x46\n\x0e\x63hannel_number\x18\x02 \x01(\x0b\x32..com.elarian.hera.proto.MessagingChannelNumber\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12.\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\nsession_id\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x31\n\x0bin_reply_to\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12?\n\x08provider\x18\x07 \x01(\x0e\x32-.com.elarian.hera.proto.ChannelNumberProvider\x12,\n\x06\x61pp_id\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x05parts\x18\t \x03(\x0b\x32*.com.elarian.hera.proto.InboundMessageBody\x12*\n\x04\x63ost\x18\n \x01(\x0b\x32\x1c.com.elarian.hera.proto.Cash\x12\x39\n\x13provider_message_id\x18\x0b \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xff\x05\n\x0bSentMessage\x12?\n\x0f\x63ustomer_number\x18\x01 \x01(\x0b\x32&.com.elarian.hera.proto.CustomerNumber\x12\x46\n\x0e\x63hannel_number\x18\x02 \x01(\x0b\x32..com.elarian.hera.proto.MessagingChannelNumber\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12.\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\nsession_id\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x31\n\x0bin_reply_to\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12?\n\x08provider\x18\x07 \x01(\x0e\x32-.com.elarian.hera.proto.ChannelNumberProvider\x12,\n\x06\x61pp_id\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\nupdated_at\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x06status\x18\n \x01(\x0e\x32-.com.elarian.hera.proto.MessageDeliveryStatus\x12?\n\treactions\x18\x0b \x03(\x0b\x32,.com.elarian.hera.proto.MessageReactionState\x12\x38\n\x07message\x18\x0c \x01(\x0b\x32\'.com.elarian.hera.proto.OutboundMessage\x12*\n\x04\x63ost\x18\r \x01(\x0b\x32\x1c.com.elarian.hera.proto.Cash\x12\x39\n\x13provider_message_id\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x81\x01\n\x14MessageReactionState\x12.\n\ncreated_at\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x39\n\x08reaction\x18\x02 \x01(\x0e\x32\'.com.elarian.hera.proto.MessageReaction\"R\n\x11MessageReplyToken\x12\r\n\x05token\x18\x01 \x01(\t\x12.\n\nexpires_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x8b\x01\n\x0e\x43hannelMessage\x12;\n\x08received\x18\x01 \x01(\x0b\x32\'.com.elarian.hera.proto.ReceivedMessageH\x00\x12\x33\n\x04sent\x18\x02 \x01(\x0b\x32#.com.elarian.hera.proto.SentMessageH\x00\x42\x07\n\x05\x65ntry\"\xd3\x03\n\x18\x43ompleteMessagingSession\x12?\n\x0f\x63ustomer_number\x18\x01 \x01(\x0b\x32&.com.elarian.hera.proto.CustomerNumber\x12\x46\n\x0e\x63hannel_number\x18\x02 \x01(\x0b\x32..com.elarian.hera.proto.MessagingChannelNumber\x12\x12\n\nsession_id\x18\x03 \x01(\t\x12.\n\nstarted_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x08\x64uration\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0f\n\x07\x61pp_ids\x18\x06 \x03(\t\x12\x45\n\nend_reason\x18\x07 \x01(\x0e\x32\x31.com.elarian.hera.proto.MessagingSessionEndReason\x12*\n\x04\x63ost\x18\x08 \x01(\x0b\x32\x1c.com.elarian.hera.proto.Cash\x12\x39\n\x13provider_session_id\x18\t \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x97\x02\n\x1c\x42lockedMessagingChannelState\x12?\n\x0f\x63ustomer_number\x18\x01 \x01(\x0b\x32&.com.elarian.hera.proto.CustomerNumber\x12\x46\n\x0e\x63hannel_number\x18\x02 \x01(\x0b\x32..com.elarian.hera.proto.MessagingChannelNumber\x12>\n\x0breply_token\x18\x03 \x01(\x0b\x32).com.elarian.hera.proto.MessageReplyToken\x12.\n\nblocked_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x96\x02\n\x1b\x41\x63tiveMessagingChannelState\x12?\n\x0f\x63ustomer_number\x18\x01 \x01(\x0b\x32&.com.elarian.hera.proto.CustomerNumber\x12\x46\n\x0e\x63hannel_number\x18\x02 \x01(\x0b\x32..com.elarian.hera.proto.MessagingChannelNumber\x12>\n\x0breply_token\x18\x03 \x01(\x0b\x32).com.elarian.hera.proto.MessageReplyToken\x12.\n\nallowed_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xd9\x03\n\x1eInSessionMessagingChannelState\x12?\n\x0f\x63ustomer_number\x18\x01 \x01(\x0b\x32&.com.elarian.hera.proto.CustomerNumber\x12\x46\n\x0e\x63hannel_number\x18\x02 \x01(\x0b\x32..com.elarian.hera.proto.MessagingChannelNumber\x12>\n\x0breply_token\x18\x03 \x01(\x0b\x32).com.elarian.hera.proto.MessageReplyToken\x12.\n\nallowed_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\nsession_id\x18\x05 \x01(\t\x12.\n\nstarted_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nexpires_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07\x61pp_ids\x18\x08 \x03(\t\x12\x39\n\x13provider_session_id\x18\t \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xfe\x01\n\x15MessagingChannelState\x12G\n\x07\x62locked\x18\x01 \x01(\x0b\x32\x34.com.elarian.hera.proto.BlockedMessagingChannelStateH\x00\x12\x45\n\x06\x61\x63tive\x18\x02 \x01(\x0b\x32\x33.com.elarian.hera.proto.ActiveMessagingChannelStateH\x00\x12L\n\nin_session\x18\x03 \x01(\x0b\x32\x36.com.elarian.hera.proto.InSessionMessagingChannelStateH\x00\x42\x07\n\x05state\"\xcf\x01\n\x0eMessagingState\x12?\n\x08\x63hannels\x18\x01 \x03(\x0b\x32-.com.elarian.hera.proto.MessagingChannelState\x12\x38\n\x08messages\x18\x02 \x03(\x0b\x32&.com.elarian.hera.proto.ChannelMessage\x12\x42\n\x08sessions\x18\x03 \x03(\x0b\x32\x30.com.elarian.hera.proto.CompleteMessagingSessionb\x06proto3' , dependencies=[google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,common__model__pb2.DESCRIPTOR,messaging__model__pb2.DESCRIPTOR,]) _RECEIVEDMESSAGE = _descriptor.Descriptor( name='ReceivedMessage', full_name='com.elarian.hera.proto.ReceivedMessage', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='customer_number', full_name='com.elarian.hera.proto.ReceivedMessage.customer_number', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='channel_number', full_name='com.elarian.hera.proto.ReceivedMessage.channel_number', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message_id', full_name='com.elarian.hera.proto.ReceivedMessage.message_id', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='created_at', full_name='com.elarian.hera.proto.ReceivedMessage.created_at', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='session_id', full_name='com.elarian.hera.proto.ReceivedMessage.session_id', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='in_reply_to', full_name='com.elarian.hera.proto.ReceivedMessage.in_reply_to', index=5, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='provider', full_name='com.elarian.hera.proto.ReceivedMessage.provider', index=6, number=7, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='app_id', full_name='com.elarian.hera.proto.ReceivedMessage.app_id', index=7, number=8, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='parts', full_name='com.elarian.hera.proto.ReceivedMessage.parts', index=8, number=9, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='cost', full_name='com.elarian.hera.proto.ReceivedMessage.cost', index=9, number=10, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='provider_message_id', full_name='com.elarian.hera.proto.ReceivedMessage.provider_message_id', index=10, number=11, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=190, serialized_end=786, ) _SENTMESSAGE = _descriptor.Descriptor( name='SentMessage', full_name='com.elarian.hera.proto.SentMessage', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='customer_number', full_name='com.elarian.hera.proto.SentMessage.customer_number', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='channel_number', full_name='com.elarian.hera.proto.SentMessage.channel_number', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message_id', full_name='com.elarian.hera.proto.SentMessage.message_id', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='created_at', full_name='com.elarian.hera.proto.SentMessage.created_at', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='session_id', full_name='com.elarian.hera.proto.SentMessage.session_id', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='in_reply_to', full_name='com.elarian.hera.proto.SentMessage.in_reply_to', index=5, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='provider', full_name='com.elarian.hera.proto.SentMessage.provider', index=6, number=7, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='app_id', full_name='com.elarian.hera.proto.SentMessage.app_id', index=7, number=8, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='updated_at', full_name='com.elarian.hera.proto.SentMessage.updated_at', index=8, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='status', full_name='com.elarian.hera.proto.SentMessage.status', index=9, number=10, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reactions', full_name='com.elarian.hera.proto.SentMessage.reactions', index=10, number=11, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='com.elarian.hera.proto.SentMessage.message', index=11, number=12, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='cost', full_name='com.elarian.hera.proto.SentMessage.cost', index=12, number=13, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='provider_message_id', full_name='com.elarian.hera.proto.SentMessage.provider_message_id', index=13, number=14, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=789, serialized_end=1556, ) _MESSAGEREACTIONSTATE = _descriptor.Descriptor( name='MessageReactionState', full_name='com.elarian.hera.proto.MessageReactionState', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='created_at', full_name='com.elarian.hera.proto.MessageReactionState.created_at', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reaction', full_name='com.elarian.hera.proto.MessageReactionState.reaction', index=1, number=2, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1559, serialized_end=1688, ) _MESSAGEREPLYTOKEN = _descriptor.Descriptor( name='MessageReplyToken', full_name='com.elarian.hera.proto.MessageReplyToken', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='token', full_name='com.elarian.hera.proto.MessageReplyToken.token', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='expires_at', full_name='com.elarian.hera.proto.MessageReplyToken.expires_at', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1690, serialized_end=1772, ) _CHANNELMESSAGE = _descriptor.Descriptor( name='ChannelMessage', full_name='com.elarian.hera.proto.ChannelMessage', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='received', full_name='com.elarian.hera.proto.ChannelMessage.received', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='sent', full_name='com.elarian.hera.proto.ChannelMessage.sent', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='entry', full_name='com.elarian.hera.proto.ChannelMessage.entry', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=1775, serialized_end=1914, ) _COMPLETEMESSAGINGSESSION = _descriptor.Descriptor( name='CompleteMessagingSession', full_name='com.elarian.hera.proto.CompleteMessagingSession', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='customer_number', full_name='com.elarian.hera.proto.CompleteMessagingSession.customer_number', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='channel_number', full_name='com.elarian.hera.proto.CompleteMessagingSession.channel_number', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='session_id', full_name='com.elarian.hera.proto.CompleteMessagingSession.session_id', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='started_at', full_name='com.elarian.hera.proto.CompleteMessagingSession.started_at', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='duration', full_name='com.elarian.hera.proto.CompleteMessagingSession.duration', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='app_ids', full_name='com.elarian.hera.proto.CompleteMessagingSession.app_ids', index=5, number=6, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='end_reason', full_name='com.elarian.hera.proto.CompleteMessagingSession.end_reason', index=6, number=7, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='cost', full_name='com.elarian.hera.proto.CompleteMessagingSession.cost', index=7, number=8, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='provider_session_id', full_name='com.elarian.hera.proto.CompleteMessagingSession.provider_session_id', index=8, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1917, serialized_end=2384, ) _BLOCKEDMESSAGINGCHANNELSTATE = _descriptor.Descriptor( name='BlockedMessagingChannelState', full_name='com.elarian.hera.proto.BlockedMessagingChannelState', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='customer_number', full_name='com.elarian.hera.proto.BlockedMessagingChannelState.customer_number', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='channel_number', full_name='com.elarian.hera.proto.BlockedMessagingChannelState.channel_number', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reply_token', full_name='com.elarian.hera.proto.BlockedMessagingChannelState.reply_token', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='blocked_at', full_name='com.elarian.hera.proto.BlockedMessagingChannelState.blocked_at', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2387, serialized_end=2666, ) _ACTIVEMESSAGINGCHANNELSTATE = _descriptor.Descriptor( name='ActiveMessagingChannelState', full_name='com.elarian.hera.proto.ActiveMessagingChannelState', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='customer_number', full_name='com.elarian.hera.proto.ActiveMessagingChannelState.customer_number', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='channel_number', full_name='com.elarian.hera.proto.ActiveMessagingChannelState.channel_number', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reply_token', full_name='com.elarian.hera.proto.ActiveMessagingChannelState.reply_token', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='allowed_at', full_name='com.elarian.hera.proto.ActiveMessagingChannelState.allowed_at', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2669, serialized_end=2947, ) _INSESSIONMESSAGINGCHANNELSTATE = _descriptor.Descriptor( name='InSessionMessagingChannelState', full_name='com.elarian.hera.proto.InSessionMessagingChannelState', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='customer_number', full_name='com.elarian.hera.proto.InSessionMessagingChannelState.customer_number', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='channel_number', full_name='com.elarian.hera.proto.InSessionMessagingChannelState.channel_number', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reply_token', full_name='com.elarian.hera.proto.InSessionMessagingChannelState.reply_token', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='allowed_at', full_name='com.elarian.hera.proto.InSessionMessagingChannelState.allowed_at', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='session_id', full_name='com.elarian.hera.proto.InSessionMessagingChannelState.session_id', index=4, number=5, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='started_at', full_name='com.elarian.hera.proto.InSessionMessagingChannelState.started_at', index=5, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='expires_at', full_name='com.elarian.hera.proto.InSessionMessagingChannelState.expires_at', index=6, number=7, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='app_ids', full_name='com.elarian.hera.proto.InSessionMessagingChannelState.app_ids', index=7, number=8, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='provider_session_id', full_name='com.elarian.hera.proto.InSessionMessagingChannelState.provider_session_id', index=8, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2950, serialized_end=3423, ) _MESSAGINGCHANNELSTATE = _descriptor.Descriptor( name='MessagingChannelState', full_name='com.elarian.hera.proto.MessagingChannelState', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='blocked', full_name='com.elarian.hera.proto.MessagingChannelState.blocked', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='active', full_name='com.elarian.hera.proto.MessagingChannelState.active', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='in_session', full_name='com.elarian.hera.proto.MessagingChannelState.in_session', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='state', full_name='com.elarian.hera.proto.MessagingChannelState.state', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=3426, serialized_end=3680, ) _MESSAGINGSTATE = _descriptor.Descriptor( name='MessagingState', full_name='com.elarian.hera.proto.MessagingState', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='channels', full_name='com.elarian.hera.proto.MessagingState.channels', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='messages', full_name='com.elarian.hera.proto.MessagingState.messages', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='sessions', full_name='com.elarian.hera.proto.MessagingState.sessions', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=3683, serialized_end=3890, ) _RECEIVEDMESSAGE.fields_by_name['customer_number'].message_type = common__model__pb2._CUSTOMERNUMBER _RECEIVEDMESSAGE.fields_by_name['channel_number'].message_type = messaging__model__pb2._MESSAGINGCHANNELNUMBER _RECEIVEDMESSAGE.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _RECEIVEDMESSAGE.fields_by_name['session_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE _RECEIVEDMESSAGE.fields_by_name['in_reply_to'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE _RECEIVEDMESSAGE.fields_by_name['provider'].enum_type = common__model__pb2._CHANNELNUMBERPROVIDER _RECEIVEDMESSAGE.fields_by_name['app_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE _RECEIVEDMESSAGE.fields_by_name['parts'].message_type = messaging__model__pb2._INBOUNDMESSAGEBODY _RECEIVEDMESSAGE.fields_by_name['cost'].message_type = common__model__pb2._CASH _RECEIVEDMESSAGE.fields_by_name['provider_message_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE _SENTMESSAGE.fields_by_name['customer_number'].message_type = common__model__pb2._CUSTOMERNUMBER _SENTMESSAGE.fields_by_name['channel_number'].message_type = messaging__model__pb2._MESSAGINGCHANNELNUMBER _SENTMESSAGE.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _SENTMESSAGE.fields_by_name['session_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE _SENTMESSAGE.fields_by_name['in_reply_to'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE _SENTMESSAGE.fields_by_name['provider'].enum_type = common__model__pb2._CHANNELNUMBERPROVIDER _SENTMESSAGE.fields_by_name['app_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE _SENTMESSAGE.fields_by_name['updated_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _SENTMESSAGE.fields_by_name['status'].enum_type = messaging__model__pb2._MESSAGEDELIVERYSTATUS _SENTMESSAGE.fields_by_name['reactions'].message_type = _MESSAGEREACTIONSTATE _SENTMESSAGE.fields_by_name['message'].message_type = messaging__model__pb2._OUTBOUNDMESSAGE _SENTMESSAGE.fields_by_name['cost'].message_type = common__model__pb2._CASH _SENTMESSAGE.fields_by_name['provider_message_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE _MESSAGEREACTIONSTATE.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _MESSAGEREACTIONSTATE.fields_by_name['reaction'].enum_type = messaging__model__pb2._MESSAGEREACTION _MESSAGEREPLYTOKEN.fields_by_name['expires_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _CHANNELMESSAGE.fields_by_name['received'].message_type = _RECEIVEDMESSAGE _CHANNELMESSAGE.fields_by_name['sent'].message_type = _SENTMESSAGE _CHANNELMESSAGE.oneofs_by_name['entry'].fields.append( _CHANNELMESSAGE.fields_by_name['received']) _CHANNELMESSAGE.fields_by_name['received'].containing_oneof = _CHANNELMESSAGE.oneofs_by_name['entry'] _CHANNELMESSAGE.oneofs_by_name['entry'].fields.append( _CHANNELMESSAGE.fields_by_name['sent']) _CHANNELMESSAGE.fields_by_name['sent'].containing_oneof = _CHANNELMESSAGE.oneofs_by_name['entry'] _COMPLETEMESSAGINGSESSION.fields_by_name['customer_number'].message_type = common__model__pb2._CUSTOMERNUMBER _COMPLETEMESSAGINGSESSION.fields_by_name['channel_number'].message_type = messaging__model__pb2._MESSAGINGCHANNELNUMBER _COMPLETEMESSAGINGSESSION.fields_by_name['started_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _COMPLETEMESSAGINGSESSION.fields_by_name['duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION _COMPLETEMESSAGINGSESSION.fields_by_name['end_reason'].enum_type = messaging__model__pb2._MESSAGINGSESSIONENDREASON _COMPLETEMESSAGINGSESSION.fields_by_name['cost'].message_type = common__model__pb2._CASH _COMPLETEMESSAGINGSESSION.fields_by_name['provider_session_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE _BLOCKEDMESSAGINGCHANNELSTATE.fields_by_name['customer_number'].message_type = common__model__pb2._CUSTOMERNUMBER _BLOCKEDMESSAGINGCHANNELSTATE.fields_by_name['channel_number'].message_type = messaging__model__pb2._MESSAGINGCHANNELNUMBER _BLOCKEDMESSAGINGCHANNELSTATE.fields_by_name['reply_token'].message_type = _MESSAGEREPLYTOKEN _BLOCKEDMESSAGINGCHANNELSTATE.fields_by_name['blocked_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _ACTIVEMESSAGINGCHANNELSTATE.fields_by_name['customer_number'].message_type = common__model__pb2._CUSTOMERNUMBER _ACTIVEMESSAGINGCHANNELSTATE.fields_by_name['channel_number'].message_type = messaging__model__pb2._MESSAGINGCHANNELNUMBER _ACTIVEMESSAGINGCHANNELSTATE.fields_by_name['reply_token'].message_type = _MESSAGEREPLYTOKEN _ACTIVEMESSAGINGCHANNELSTATE.fields_by_name['allowed_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _INSESSIONMESSAGINGCHANNELSTATE.fields_by_name['customer_number'].message_type = common__model__pb2._CUSTOMERNUMBER _INSESSIONMESSAGINGCHANNELSTATE.fields_by_name['channel_number'].message_type = messaging__model__pb2._MESSAGINGCHANNELNUMBER _INSESSIONMESSAGINGCHANNELSTATE.fields_by_name['reply_token'].message_type = _MESSAGEREPLYTOKEN _INSESSIONMESSAGINGCHANNELSTATE.fields_by_name['allowed_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _INSESSIONMESSAGINGCHANNELSTATE.fields_by_name['started_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _INSESSIONMESSAGINGCHANNELSTATE.fields_by_name['expires_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _INSESSIONMESSAGINGCHANNELSTATE.fields_by_name['provider_session_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE _MESSAGINGCHANNELSTATE.fields_by_name['blocked'].message_type = _BLOCKEDMESSAGINGCHANNELSTATE _MESSAGINGCHANNELSTATE.fields_by_name['active'].message_type = _ACTIVEMESSAGINGCHANNELSTATE _MESSAGINGCHANNELSTATE.fields_by_name['in_session'].message_type = _INSESSIONMESSAGINGCHANNELSTATE _MESSAGINGCHANNELSTATE.oneofs_by_name['state'].fields.append( _MESSAGINGCHANNELSTATE.fields_by_name['blocked']) _MESSAGINGCHANNELSTATE.fields_by_name['blocked'].containing_oneof = _MESSAGINGCHANNELSTATE.oneofs_by_name['state'] _MESSAGINGCHANNELSTATE.oneofs_by_name['state'].fields.append( _MESSAGINGCHANNELSTATE.fields_by_name['active']) _MESSAGINGCHANNELSTATE.fields_by_name['active'].containing_oneof = _MESSAGINGCHANNELSTATE.oneofs_by_name['state'] _MESSAGINGCHANNELSTATE.oneofs_by_name['state'].fields.append( _MESSAGINGCHANNELSTATE.fields_by_name['in_session']) _MESSAGINGCHANNELSTATE.fields_by_name['in_session'].containing_oneof = _MESSAGINGCHANNELSTATE.oneofs_by_name['state'] _MESSAGINGSTATE.fields_by_name['channels'].message_type = _MESSAGINGCHANNELSTATE _MESSAGINGSTATE.fields_by_name['messages'].message_type = _CHANNELMESSAGE _MESSAGINGSTATE.fields_by_name['sessions'].message_type = _COMPLETEMESSAGINGSESSION DESCRIPTOR.message_types_by_name['ReceivedMessage'] = _RECEIVEDMESSAGE DESCRIPTOR.message_types_by_name['SentMessage'] = _SENTMESSAGE DESCRIPTOR.message_types_by_name['MessageReactionState'] = _MESSAGEREACTIONSTATE DESCRIPTOR.message_types_by_name['MessageReplyToken'] = _MESSAGEREPLYTOKEN DESCRIPTOR.message_types_by_name['ChannelMessage'] = _CHANNELMESSAGE DESCRIPTOR.message_types_by_name['CompleteMessagingSession'] = _COMPLETEMESSAGINGSESSION DESCRIPTOR.message_types_by_name['BlockedMessagingChannelState'] = _BLOCKEDMESSAGINGCHANNELSTATE DESCRIPTOR.message_types_by_name['ActiveMessagingChannelState'] = _ACTIVEMESSAGINGCHANNELSTATE DESCRIPTOR.message_types_by_name['InSessionMessagingChannelState'] = _INSESSIONMESSAGINGCHANNELSTATE DESCRIPTOR.message_types_by_name['MessagingChannelState'] = _MESSAGINGCHANNELSTATE DESCRIPTOR.message_types_by_name['MessagingState'] = _MESSAGINGSTATE _sym_db.RegisterFileDescriptor(DESCRIPTOR) ReceivedMessage = _reflection.GeneratedProtocolMessageType('ReceivedMessage', (_message.Message,), { 'DESCRIPTOR' : _RECEIVEDMESSAGE, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.ReceivedMessage) }) _sym_db.RegisterMessage(ReceivedMessage) SentMessage = _reflection.GeneratedProtocolMessageType('SentMessage', (_message.Message,), { 'DESCRIPTOR' : _SENTMESSAGE, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.SentMessage) }) _sym_db.RegisterMessage(SentMessage) MessageReactionState = _reflection.GeneratedProtocolMessageType('MessageReactionState', (_message.Message,), { 'DESCRIPTOR' : _MESSAGEREACTIONSTATE, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.MessageReactionState) }) _sym_db.RegisterMessage(MessageReactionState) MessageReplyToken = _reflection.GeneratedProtocolMessageType('MessageReplyToken', (_message.Message,), { 'DESCRIPTOR' : _MESSAGEREPLYTOKEN, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.MessageReplyToken) }) _sym_db.RegisterMessage(MessageReplyToken) ChannelMessage = _reflection.GeneratedProtocolMessageType('ChannelMessage', (_message.Message,), { 'DESCRIPTOR' : _CHANNELMESSAGE, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.ChannelMessage) }) _sym_db.RegisterMessage(ChannelMessage) CompleteMessagingSession = _reflection.GeneratedProtocolMessageType('CompleteMessagingSession', (_message.Message,), { 'DESCRIPTOR' : _COMPLETEMESSAGINGSESSION, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.CompleteMessagingSession) }) _sym_db.RegisterMessage(CompleteMessagingSession) BlockedMessagingChannelState = _reflection.GeneratedProtocolMessageType('BlockedMessagingChannelState', (_message.Message,), { 'DESCRIPTOR' : _BLOCKEDMESSAGINGCHANNELSTATE, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.BlockedMessagingChannelState) }) _sym_db.RegisterMessage(BlockedMessagingChannelState) ActiveMessagingChannelState = _reflection.GeneratedProtocolMessageType('ActiveMessagingChannelState', (_message.Message,), { 'DESCRIPTOR' : _ACTIVEMESSAGINGCHANNELSTATE, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.ActiveMessagingChannelState) }) _sym_db.RegisterMessage(ActiveMessagingChannelState) InSessionMessagingChannelState = _reflection.GeneratedProtocolMessageType('InSessionMessagingChannelState', (_message.Message,), { 'DESCRIPTOR' : _INSESSIONMESSAGINGCHANNELSTATE, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.InSessionMessagingChannelState) }) _sym_db.RegisterMessage(InSessionMessagingChannelState) MessagingChannelState = _reflection.GeneratedProtocolMessageType('MessagingChannelState', (_message.Message,), { 'DESCRIPTOR' : _MESSAGINGCHANNELSTATE, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.MessagingChannelState) }) _sym_db.RegisterMessage(MessagingChannelState) MessagingState = _reflection.GeneratedProtocolMessageType('MessagingState', (_message.Message,), { 'DESCRIPTOR' : _MESSAGINGSTATE, '__module__' : 'messaging_state_pb2' # @@protoc_insertion_point(class_scope:com.elarian.hera.proto.MessagingState) }) _sym_db.RegisterMessage(MessagingState) # @@protoc_insertion_point(module_scope)
56.359002
5,681
0.785617
6,535
51,963
5.893191
0.043917
0.041961
0.063643
0.060189
0.821692
0.786015
0.773136
0.714894
0.694745
0.686747
0
0.033783
0.100514
51,963
921
5,682
56.420195
0.79018
0.021496
0
0.680604
1
0.069686
0.198642
0.155042
0
0
0
0
0
1
0
false
0
0.010453
0
0.010453
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
f6ff0457345c56cfef202075f7cdd6268f00208b
2,045
py
Python
app/demo/countries/migrations/0017_country1_country2_country3_country4_country5_country6.py
sesostris/django-material-admin
f6678e57286bd871a820b235f868873d5f86d649
[ "MIT" ]
270
2018-09-14T07:55:04.000Z
2022-03-31T13:12:41.000Z
app/demo/countries/migrations/0017_country1_country2_country3_country4_country5_country6.py
sesostris/django-material-admin
f6678e57286bd871a820b235f868873d5f86d649
[ "MIT" ]
107
2019-03-26T20:35:23.000Z
2022-03-15T15:34:38.000Z
app/demo/countries/migrations/0017_country1_country2_country3_country4_country5_country6.py
sesostris/django-material-admin
f6678e57286bd871a820b235f868873d5f86d649
[ "MIT" ]
66
2018-11-05T13:07:14.000Z
2022-03-31T17:17:22.000Z
# Generated by Django 3.0 on 2019-12-12 20:35 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('countries', '0016_person_smart'), ] operations = [ migrations.CreateModel( name='Country1', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=64, verbose_name='Name')), ], ), migrations.CreateModel( name='Country2', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=64, verbose_name='Name')), ], ), migrations.CreateModel( name='Country3', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=64, verbose_name='Name')), ], ), migrations.CreateModel( name='Country4', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=64, verbose_name='Name')), ], ), migrations.CreateModel( name='Country5', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=64, verbose_name='Name')), ], ), migrations.CreateModel( name='Country6', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=64, verbose_name='Name')), ], ), ]
36.517857
114
0.547677
195
2,045
5.579487
0.235897
0.121324
0.137868
0.126838
0.798713
0.798713
0.798713
0.798713
0.798713
0.798713
0
0.02555
0.311002
2,045
55
115
37.181818
0.746629
0.021027
0
0.734694
1
0
0.073
0
0
0
0
0
0
1
0
false
0
0.020408
0
0.081633
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
121d4d555b6d898b4ac327ea5819c50ff4d52f5d
135,668
py
Python
test/augmentables/test_normalization.py
MinhTran0311/imgaug_Minh
0266ef52d61a3a892009ae5718fd5d41d8156d6a
[ "MIT" ]
12,895
2016-11-26T05:59:21.000Z
2022-03-31T07:03:40.000Z
test/augmentables/test_normalization.py
MinhTran0311/imgaug_Minh
0266ef52d61a3a892009ae5718fd5d41d8156d6a
[ "MIT" ]
783
2016-11-24T12:54:26.000Z
2022-03-25T11:27:10.000Z
test/augmentables/test_normalization.py
MinhTran0311/imgaug_Minh
0266ef52d61a3a892009ae5718fd5d41d8156d6a
[ "MIT" ]
2,504
2016-12-10T09:55:43.000Z
2022-03-31T14:29:34.000Z
from __future__ import print_function, division, absolute_import import sys # unittest only added in 3.4 self.subTest() if sys.version_info[0] < 3 or sys.version_info[1] < 4: import unittest2 as unittest else: import unittest # unittest.mock is not available in 2.7 (though unittest2 might contain it?) try: import unittest.mock as mock except ImportError: import mock import numpy as np import six.moves as sm import imgaug as ia import imgaug.augmentables.normalization as normalization from imgaug.testutils import reseed # TODO split up tests here class TestNormalization(unittest.TestCase): def setUp(self): reseed() def test_invert_normalize_images(self): assert normalization.invert_normalize_images(None, None) is None arr = np.zeros((1, 4, 4, 3), dtype=np.uint8) arr_old = np.zeros((1, 4, 4, 3), dtype=np.uint8) observed = normalization.invert_normalize_images(arr, arr_old) assert ia.is_np_array(observed) assert observed.shape == (1, 4, 4, 3) assert observed.dtype.name == "uint8" arr = np.zeros((1, 4, 4, 1), dtype=np.uint8) arr_old = np.zeros((4, 4), dtype=np.uint8) observed = normalization.invert_normalize_images(arr, arr_old) assert ia.is_np_array(observed) assert observed.shape == (4, 4) assert observed.dtype.name == "uint8" arr = np.zeros((1, 4, 4, 1), dtype=np.uint8) arr_old = np.zeros((1, 4, 4), dtype=np.uint8) observed = normalization.invert_normalize_images(arr, arr_old) assert ia.is_np_array(observed) assert observed.shape == (1, 4, 4) assert observed.dtype.name == "uint8" images = [] images_old = [] observed = normalization.invert_normalize_images(images, images_old) assert isinstance(observed, list) assert len(observed) == 0 arr1 = np.zeros((4, 4, 1), dtype=np.uint8) arr2 = np.zeros((5, 5, 3), dtype=np.uint8) arr1_old = np.zeros((4, 4), dtype=np.uint8) arr2_old = np.zeros((5, 5, 3), dtype=np.uint8) observed = normalization.invert_normalize_images([arr1, arr2], [arr1_old, arr2_old]) assert isinstance(observed, list) assert len(observed) == 2 assert ia.is_np_array(observed[0]) assert ia.is_np_array(observed[1]) assert observed[0].shape == (4, 4) assert observed[1].shape == (5, 5, 3) assert observed[0].dtype.name == "uint8" assert observed[1].dtype.name == "uint8" # --------- # images turned to list during augmentation # --------- # different shapes, each 3D images = [np.zeros((3, 4, 1), dtype=np.uint8), np.zeros((4, 3, 1), dtype=np.uint8)] images_old = np.zeros((2, 4, 4, 1), dtype=np.uint8) observed = normalization.invert_normalize_images(images, images_old) assert isinstance(observed, list) assert len(observed) == 2 assert observed[0] is images[0] assert observed[1] is images[1] # different shapes, each 2D images = [np.zeros((3, 4, 1), dtype=np.uint8), np.zeros((4, 3, 1), dtype=np.uint8)] images_old = np.zeros((2, 4, 4), dtype=np.uint8) observed = normalization.invert_normalize_images(images, images_old) assert isinstance(observed, list) assert len(observed) == 2 assert observed[0].shape == (3, 4) assert observed[1].shape == (4, 3) # same shapes, each 3D images = [np.zeros((3, 4, 1), dtype=np.uint8), np.zeros((3, 4, 1), dtype=np.uint8)] images_old = np.zeros((2, 4, 4, 1), dtype=np.uint8) observed = normalization.invert_normalize_images(images, images_old) # assert ia.is_np_array(observed) # assert observed.shape == (2, 3, 4, 1) assert isinstance(observed, list) assert len(observed) == 2 assert observed[0] is images[0] assert observed[1] is images[1] # same shapes, each 2D images = [np.zeros((3, 4, 1), dtype=np.uint8), np.zeros((3, 4, 1), dtype=np.uint8)] images_old = np.zeros((2, 4, 4), dtype=np.uint8) observed = normalization.invert_normalize_images(images, images_old) # assert ia.is_np_array(observed) # assert observed.shape == (2, 3, 4) assert isinstance(observed, list) assert len(observed) == 2 assert observed[0].shape == (3, 4) assert observed[1].shape == (3, 4) # single item in list images = [np.zeros((3, 4, 1), dtype=np.uint8)] images_old = np.zeros((1, 4, 4), dtype=np.uint8) observed = normalization.invert_normalize_images(images, images_old) # assert ia.is_np_array(observed) # assert observed.shape == (1, 3, 4) assert isinstance(observed, list) assert len(observed) == 1 assert observed[0].shape == (3, 4) # single item in list, original was 2D images = [np.zeros((3, 4, 1), dtype=np.uint8)] images_old = np.zeros((4, 4), dtype=np.uint8) observed = normalization.invert_normalize_images(images, images_old) # assert ia.is_np_array(observed) # assert observed.shape == (3, 4) assert isinstance(observed, list) assert len(observed) == 1 assert observed[0].shape == (3, 4) with self.assertRaises(ValueError): normalization.invert_normalize_images(False, False) def test_invert_normalize_heatmaps(self): def _norm_and_invert(heatmaps, images): return normalization.invert_normalize_heatmaps( normalization.normalize_heatmaps(heatmaps, shapes=images), heatmaps ) # ---- # None # ---- observed = normalization.invert_normalize_heatmaps(None, None) assert observed is None # ---- # array # ---- for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = np.zeros((1, 1, 1, 1), dtype=np.float32) + 0.1 after = _norm_and_invert(before, images=images) assert ia.is_np_array(after) assert after.shape == (1, 1, 1, 1) assert after.dtype.name == "float32" assert np.allclose(after, before) # ---- # single HeatmapsOnImage # ---- before = ia.HeatmapsOnImage( np.zeros((1, 1, 1), dtype=np.float32) + 0.1, shape=(1, 1, 3)) after = _norm_and_invert(before, images=None) assert isinstance(after, ia.HeatmapsOnImage) assert after.shape == before.shape assert np.allclose(after.arr_0to1, before.arr_0to1) # ---- # empty iterable # ---- before = [] after = _norm_and_invert(before, images=None) assert isinstance(after, list) assert len(after) == 0 # ---- # iterable of arrays # ---- for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = [np.zeros((1, 1, 1), dtype=np.float32) + 0.1] after = _norm_and_invert(before, images=images) assert isinstance(after, list) assert len(after) == 1 assert after[0].shape == (1, 1, 1) assert after[0].dtype.name == "float32" assert np.allclose(after[0], before[0]) # ---- # iterable of HeatmapsOnImage # ---- before = [ia.HeatmapsOnImage( np.zeros((1, 1, 1), dtype=np.float32) + 0.1, shape=(1, 1, 3))] after = _norm_and_invert(before, images=None) assert isinstance(after, list) assert isinstance(after[0], ia.HeatmapsOnImage) assert after[0].shape == before[0].shape assert np.allclose(after[0].arr_0to1, before[0].arr_0to1) def test_invert_normalize_segmentation_maps(self): def _norm_and_invert(segmaps, images): return normalization.invert_normalize_segmentation_maps( normalization.normalize_segmentation_maps( segmaps, shapes=images), segmaps ) # ---- # None # ---- observed = normalization.invert_normalize_segmentation_maps(None, None) assert observed is None # ---- # array # ---- for dt in [np.dtype("int32"), np.dtype("uint16"), np.dtype(bool)]: for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 3), dtype=np.uint8)]: before = np.ones((1, 1, 1, 1), dtype=dt) after = _norm_and_invert(before, images=images) assert ia.is_np_array(after) assert after.shape == (1, 1, 1, 1) assert after.dtype.name == dt.name assert np.array_equal(after, before) # ---- # single SegmentationMapsOnImage # ---- before = ia.SegmentationMapsOnImage( np.zeros((1, 1, 1), dtype=np.int32) + 1, shape=(1, 1, 3)) after = _norm_and_invert(before, images=None) assert isinstance(after, ia.SegmentationMapsOnImage) assert after.shape == before.shape assert np.array_equal(after.arr, before.arr) # ---- # empty iterable # ---- before = [] after = _norm_and_invert(before, images=None) assert isinstance(after, list) assert len(after) == 0 # ---- # iterable of arrays # ---- for dt in [np.dtype("int32"), np.dtype("uint16"), np.dtype(bool)]: for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = [np.ones((1, 1, 1), dtype=dt)] after = _norm_and_invert(before, images=images) assert isinstance(after, list) assert len(after) == 1 assert after[0].shape == (1, 1, 1) assert after[0].dtype.name == dt.name assert np.array_equal(after[0], before[0]) # ---- # iterable of SegmentationMapsOnImage # ---- before = [ia.SegmentationMapsOnImage( np.zeros((1, 1, 1), dtype=np.int32) + 1, shape=(1, 1, 3))] after = _norm_and_invert(before, images=None) assert isinstance(after, list) assert isinstance(after[0], ia.SegmentationMapsOnImage) assert after[0].shape == before[0].shape assert np.allclose(after[0].arr, before[0].arr) def test_invert_normalize_keypoints(self): def _norm_and_invert(kps, images): return normalization.invert_normalize_keypoints( normalization.normalize_keypoints( kps, shapes=images), kps ) # ---- # None # ---- observed = normalization.invert_normalize_keypoints(None, None) assert observed is None # ---- # array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = np.zeros((1, 1, 2), dtype=dt) + 1 after = _norm_and_invert(before, images=images) assert ia.is_np_array(after) assert after.shape == (1, 1, 2) assert after.dtype.name == dt.name assert np.allclose(after, 1) # ---- # (x,y) # ---- before = (1, 2) after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, tuple) assert after == (1, 2) # ---- # single Keypoint instance # ---- before = ia.Keypoint(x=1, y=2) after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, ia.Keypoint) assert after.x == 1 assert after.y == 2 # ---- # single KeypointsOnImage instance # ---- before = ia.KeypointsOnImage([ia.Keypoint(x=1, y=2)], shape=(1, 1, 3)) after = _norm_and_invert(before, images=None) assert isinstance(after, ia.KeypointsOnImage) assert len(after.keypoints) == 1 assert after.keypoints[0].x == 1 assert after.keypoints[0].y == 2 assert after.shape == (1, 1, 3) # ---- # empty iterable # ---- before = [] after = _norm_and_invert(before, images=None) assert after == [] # ---- # iterable of array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = np.zeros((1, 1, 2), dtype=dt) + 1 after = _norm_and_invert(before, images=images) assert ia.is_np_array(after) assert after.shape == (1, 1, 2) assert after.dtype.name == dt.name assert np.allclose(after, 1) # ---- # iterable of (x,y) # ---- before = [(1, 2), (3, 4)] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert after == [(1, 2), (3, 4)] # ---- # iterable of Keypoint # ---- before = [ia.Keypoint(x=1, y=2), ia.Keypoint(x=3, y=4)] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == 2 assert isinstance(after[0], ia.Keypoint) assert isinstance(after[1], ia.Keypoint) assert after[0].x == 1 assert after[0].y == 2 assert after[1].x == 3 assert after[1].y == 4 # ---- # iterable of KeypointsOnImage # ---- before = [ ia.KeypointsOnImage([ia.Keypoint(x=1, y=2)], shape=(1, 1, 3)), ia.KeypointsOnImage([ia.Keypoint(x=3, y=4)], shape=(1, 1, 3)), ] after = _norm_and_invert(before, images=None) assert isinstance(after, list) assert len(after) == 2 assert isinstance(after[0], ia.KeypointsOnImage) assert isinstance(after[1], ia.KeypointsOnImage) assert after[0].keypoints[0].x == 1 assert after[0].keypoints[0].y == 2 assert after[1].keypoints[0].x == 3 assert after[1].keypoints[0].y == 4 # ---- # iterable of empty interables # ---- before = [[]] after = _norm_and_invert(before, [np.zeros((1, 1, 3), dtype=np.uint8)]) assert after == [[]] # ---- # iterable of iterable of (x,y) # ---- before = [ [(1, 2), (3, 4)], [(5, 6), (7, 8)] ] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == 2 assert isinstance(after[0], list) assert isinstance(after[1], list) assert after[0][0][0] == 1 assert after[0][0][1] == 2 assert after[0][1][0] == 3 assert after[0][1][1] == 4 assert after[1][0][0] == 5 assert after[1][0][1] == 6 assert after[1][1][0] == 7 assert after[1][1][1] == 8 # ---- # iterable of iterable of Keypoint # ---- before = [ [ia.Keypoint(x=1, y=2), ia.Keypoint(x=3, y=4)], [ia.Keypoint(x=5, y=6), ia.Keypoint(x=7, y=8)] ] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == 2 assert isinstance(after[0], list) assert isinstance(after[1], list) assert after[0][0].x == 1 assert after[0][0].y == 2 assert after[0][1].x == 3 assert after[0][1].y == 4 assert after[1][0].x == 5 assert after[1][0].y == 6 assert after[1][1].x == 7 assert after[1][1].y == 8 def test_invert_normalize_bounding_boxes(self): def _norm_and_invert(bbs, images): return normalization.invert_normalize_bounding_boxes( normalization.normalize_bounding_boxes( bbs, shapes=images), bbs ) # ---- # None # ---- observed = normalization.invert_normalize_bounding_boxes(None, None) assert observed is None # ---- # array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = np.zeros((1, 1, 4), dtype=dt) + 1 after = _norm_and_invert(before, images=images) assert ia.is_np_array(after) assert after.shape == (1, 1, 4) assert after.dtype.name == dt.name assert np.allclose(after, 1) # ---- # (x1,y1,x2,y2) # ---- before = (1, 2, 3, 4) after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, tuple) assert after == (1, 2, 3, 4) # ---- # single BoundingBox instance # ---- before = ia.BoundingBox(x1=1, y1=2, x2=3, y2=4) after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, ia.BoundingBox) assert after.x1 == 1 assert after.y1 == 2 assert after.x2 == 3 assert after.y2 == 4 # ---- # single BoundingBoxesOnImage instance # ---- before = ia.BoundingBoxesOnImage( [ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)], shape=(1, 1, 3)) after = _norm_and_invert(before, images=None) assert isinstance(after, ia.BoundingBoxesOnImage) assert len(after.bounding_boxes) == 1 assert after.bounding_boxes[0].x1 == 1 assert after.bounding_boxes[0].y1 == 2 assert after.bounding_boxes[0].x2 == 3 assert after.bounding_boxes[0].y2 == 4 assert after.shape == (1, 1, 3) # ---- # empty iterable # ---- before = [] after = _norm_and_invert(before, images=None) assert after == [] # ---- # iterable of array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = [np.zeros((1, 4), dtype=dt) + 1] after = _norm_and_invert(before, images=images) assert isinstance(after, list) assert len(after) == 1 assert ia.is_np_array(after[0]) assert after[0].shape == (1, 4) assert after[0].dtype.name == dt.name assert np.allclose(after[0], 1) # ---- # iterable of (x1,y1,x2,y2) # ---- before = [(1, 2, 3, 4), (5, 6, 7, 8)] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert after == [(1, 2, 3, 4), (5, 6, 7, 8)] # ---- # iterable of BoundingBox # ---- before = [ ia.BoundingBox(x1=1, y1=2, x2=3, y2=4), ia.BoundingBox(x1=5, y1=6, x2=7, y2=8) ] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == 2 assert isinstance(after[0], ia.BoundingBox) assert isinstance(after[1], ia.BoundingBox) assert after[0].x1 == 1 assert after[0].y1 == 2 assert after[0].x2 == 3 assert after[0].y2 == 4 assert after[1].x1 == 5 assert after[1].y1 == 6 assert after[1].x2 == 7 assert after[1].y2 == 8 # ---- # iterable of BoundingBoxesOnImage # ---- before = [ ia.BoundingBoxesOnImage( [ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)], shape=(1, 1, 3)), ia.BoundingBoxesOnImage( [ia.BoundingBox(x1=5, y1=6, x2=7, y2=8)], shape=(1, 1, 3)) ] after = _norm_and_invert(before, images=None) assert isinstance(after, list) assert len(after) == 2 assert isinstance(after[0], ia.BoundingBoxesOnImage) assert isinstance(after[1], ia.BoundingBoxesOnImage) assert isinstance(after[0].bounding_boxes[0], ia.BoundingBox) assert isinstance(after[1].bounding_boxes[0], ia.BoundingBox) assert after[0].bounding_boxes[0].x1 == 1 assert after[0].bounding_boxes[0].y1 == 2 assert after[0].bounding_boxes[0].x2 == 3 assert after[0].bounding_boxes[0].y2 == 4 assert after[1].bounding_boxes[0].x1 == 5 assert after[1].bounding_boxes[0].y1 == 6 assert after[1].bounding_boxes[0].x2 == 7 assert after[1].bounding_boxes[0].y2 == 8 assert after[0].shape == (1, 1, 3) assert after[1].shape == (1, 1, 3) # ---- # iterable of empty interables # ---- before = [[]] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert after == [[]] # ---- # iterable of iterable of (x1,y1,x2,y2) # ---- before = [ [(1, 2, 3, 4)], [(5, 6, 7, 8), (9, 10, 11, 12)] ] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert after == [ [(1, 2, 3, 4)], [(5, 6, 7, 8), (9, 10, 11, 12)] ] # ---- # iterable of iterable of Keypoint # ---- before = [ [ia.BoundingBox(x1=1, y1=2, x2=3, y2=4), ia.BoundingBox(x1=5, y1=6, x2=7, y2=8)], [ia.BoundingBox(x1=9, y1=10, x2=11, y2=12), ia.BoundingBox(x1=13, y1=14, x2=15, y2=16)] ] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert isinstance(after[0], list) assert isinstance(after[1], list) assert len(after[0]) == 2 assert len(after[1]) == 2 assert after[0][0].x1 == 1 assert after[0][0].y1 == 2 assert after[0][0].x2 == 3 assert after[0][0].y2 == 4 assert after[0][1].x1 == 5 assert after[0][1].y1 == 6 assert after[0][1].x2 == 7 assert after[0][1].y2 == 8 assert after[1][0].x1 == 9 assert after[1][0].y1 == 10 assert after[1][0].x2 == 11 assert after[1][0].y2 == 12 assert after[1][1].x1 == 13 assert after[1][1].y1 == 14 assert after[1][1].x2 == 15 assert after[1][1].y2 == 16 def test_invert_normalize_polygons(self): def _norm_and_invert(polys, images): return normalization.invert_normalize_polygons( normalization.normalize_polygons( polys, shapes=images), polys ) coords1 = [(0, 0), (10, 0), (10, 10)] coords2 = [(5, 5), (15, 5), (15, 15)] coords3 = [(0, 0), (10, 0), (10, 10), (0, 10)] coords4 = [(5, 5), (15, 5), (15, 15), (5, 15)] coords1_kps = [ia.Keypoint(x=x, y=y) for x, y in coords1] coords2_kps = [ia.Keypoint(x=x, y=y) for x, y in coords2] coords3_kps = [ia.Keypoint(x=x, y=y) for x, y in coords3] coords4_kps = [ia.Keypoint(x=x, y=y) for x, y in coords4] coords1_arr = np.float32(coords1) coords2_arr = np.float32(coords2) coords3_arr = np.float32(coords3) coords4_arr = np.float32(coords4) # ---- # None # ---- observed = normalization.invert_normalize_polygons(None, None) assert observed is None # ---- # array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = coords1_arr[np.newaxis, np.newaxis, ...].astype(dt) after = _norm_and_invert(before, images=images) assert ia.is_np_array(after) assert after.shape == (1, 1, 3, 2) assert after.dtype.name == dt.name assert np.allclose(after, coords1_arr[np.newaxis, np.newaxis, ...]) before = np.tile( coords1_arr[np.newaxis, np.newaxis, ...].astype(dt), (1, 5, 1, 1) ) after = _norm_and_invert(before, images=images) assert ia.is_np_array(after) assert after.shape == (1, 5, 3, 2) assert after.dtype.name == dt.name assert np.allclose(after[0], coords1_arr[np.newaxis, ...]) # ---- # single Polygon instance # ---- before = ia.Polygon(coords1) after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, ia.Polygon) assert after.exterior_almost_equals(coords1) # ---- # single PolygonsOnImage instance # ---- before = ia.PolygonsOnImage([ia.Polygon(coords1)], shape=(1, 1, 3)) after = _norm_and_invert(before, images=None) assert isinstance(after, ia.PolygonsOnImage) assert len(after.polygons) == 1 assert after.polygons[0].exterior_almost_equals(coords1) assert after.shape == (1, 1, 3) # ---- # empty iterable # ---- before = [] after = _norm_and_invert(before, images=None) assert isinstance(after, list) assert after == [] # ---- # iterable of array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = [coords1_arr[np.newaxis, ...].astype(dt)] after = _norm_and_invert(before, images=images) assert isinstance(after, list) assert len(after) == 1 assert ia.is_np_array(after[0]) assert after[0].shape == (1, 3, 2) assert after[0].dtype.name == dt.name assert np.allclose(after[0], coords1_arr[np.newaxis, ...]) before = [np.tile( coords1_arr[np.newaxis, ...].astype(dt), (5, 1, 1) )] after = _norm_and_invert(before, images=images) assert isinstance(after, list) assert len(after) == 1 assert ia.is_np_array(after[0]) assert after[0].shape == (5, 3, 2) assert after[0].dtype.name == dt.name assert np.allclose(after[0][0], coords1_arr) # ---- # iterable of (x,y) # ---- before = coords1 after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert after == coords1 # ---- # iterable of Keypoint # ---- before = coords1_kps after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == len(coords1_kps) assert all([kp_after.x == kp_before.x and kp_after.y == kp_before.y for kp_after, kp_before in zip(after, coords1_kps)]) # ---- # iterable of Polygon # ---- before = [ia.Polygon(coords1), ia.Polygon(coords2)] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == 2 assert after[0].exterior_almost_equals(coords1) assert after[1].exterior_almost_equals(coords2) # ---- # iterable of PolygonsOnImage # ---- before = [ ia.PolygonsOnImage([ia.Polygon(coords1)], shape=(1, 1, 3)), ia.PolygonsOnImage([ia.Polygon(coords2)], shape=(2, 1, 3)) ] after = _norm_and_invert(before, images=None) assert isinstance(after, list) assert len(after) == 2 assert isinstance(after[0], ia.PolygonsOnImage) assert isinstance(after[1], ia.PolygonsOnImage) assert after[0].polygons[0].exterior_almost_equals(coords1) assert after[1].polygons[0].exterior_almost_equals(coords2) assert after[0].shape == (1, 1, 3) assert after[1].shape == (2, 1, 3) # ---- # iterable of empty interables # ---- before = [[]] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert after == [[]] # ---- # iterable of iterable of array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = [[coords1_arr.astype(dt)]] after = _norm_and_invert(before, images=images) assert isinstance(after, list) assert len(after) == 1 assert isinstance(after[0], list) assert len(after[0]) == 1 assert ia.is_np_array(after[0][0]) assert after[0][0].shape == (3, 2) assert after[0][0].dtype.name == dt.name assert np.allclose(after[0][0], coords1_arr) before = [[coords1_arr.astype(dt) for _ in sm.xrange(5)]] after = _norm_and_invert(before, images=images) assert isinstance(after, list) assert len(after) == 1 assert isinstance(after[0], list) assert len(after[0]) == 5 assert ia.is_np_array(after[0][0]) assert after[0][0].shape == (3, 2) assert after[0][0].dtype.name == dt.name assert np.allclose(after[0][0], coords1_arr) # ---- # iterable of iterable of (x,y) # ---- before = [coords1, coords2] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == 2 assert after[0] == coords1 assert after[1] == coords2 # ---- # iterable of iterable of Keypoint # ---- before = [coords1_kps, coords2_kps] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == 2 assert len(after[0]) == len(coords1_kps) assert len(after[1]) == len(coords2_kps) assert all([kp_after.x == kp_before.x and kp_after.y == kp_before.y for kp_after, kp_before in zip(after[0], coords1_kps)]) assert all([kp_after.x == kp_before.x and kp_after.y == kp_before.y for kp_after, kp_before in zip(after[1], coords2_kps)]) # ---- # iterable of iterable of Polygon # ---- before = [ [ia.Polygon(coords1), ia.Polygon(coords2)], [ia.Polygon(coords3), ia.Polygon(coords4)] ] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert isinstance(after[0], list) assert isinstance(after[1], list) assert len(after[0]) == 2 assert len(after[1]) == 2 assert after[0][0].exterior_almost_equals(coords1) assert after[0][1].exterior_almost_equals(coords2) assert after[1][0].exterior_almost_equals(coords3) assert after[1][1].exterior_almost_equals(coords4) # ---- # iterable of iterable of empty iterable # ---- before = [[[]]] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert after == [[[]]] # ---- # iterable of iterable of iterable of (x,y) # ---- before = [[coords1, coords2], [coords3, coords4]] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == 2 assert len(after[0]) == 2 assert len(after[1]) == 2 assert after[0][0] == coords1 assert after[0][1] == coords2 assert after[1][0] == coords3 assert after[1][1] == coords4 # ---- # iterable of iterable of iterable of Keypoint # ---- before = [[coords1_kps, coords2_kps], [coords3_kps, coords4_kps]] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == 2 assert len(after[0]) == 2 assert len(after[1]) == 2 assert all([kp_after.x == kp_before.x and kp_after.y == kp_before.y for kp_after, kp_before in zip(after[0][0], coords1_kps)]) assert all([kp_after.x == kp_before.x and kp_after.y == kp_before.y for kp_after, kp_before in zip(after[0][1], coords2_kps)]) assert all([kp_after.x == kp_before.x and kp_after.y == kp_before.y for kp_after, kp_before in zip(after[1][0], coords3_kps)]) assert all([kp_after.x == kp_before.x and kp_after.y == kp_before.y for kp_after, kp_before in zip(after[1][1], coords4_kps)]) # The underlying normalization functions are mostly identical for # LineStrings and Polygons, hence we run only a few tests for LineStrings # here. Most of the code was already tested for Polygons. def test_invert_normalize_line_strings(self): def _norm_and_invert(line_strings, images): return normalization.invert_normalize_line_strings( normalization.normalize_line_strings( line_strings, shapes=images), line_strings ) coords1 = [(0, 0), (10, 0), (10, 10)] coords2 = [(5, 5), (15, 5), (15, 15)] coords3 = [(0, 0), (10, 0), (10, 10), (0, 10)] coords4 = [(5, 5), (15, 5), (15, 15), (5, 15)] coords1_arr = np.float32(coords1) # ---- # None # ---- observed = normalization.invert_normalize_line_strings(None, None) assert observed is None # ---- # single LineString instance # ---- before = ia.LineString(coords1) after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, ia.LineString) assert np.allclose(after.coords, coords1) # ---- # single LineStringsOnImage instance # ---- before = ia.LineStringsOnImage([ia.LineString(coords1)], shape=(1, 1, 3)) after = _norm_and_invert(before, images=None) assert isinstance(after, ia.LineStringsOnImage) assert len(after.line_strings) == 1 assert np.allclose(after.line_strings[0].coords, coords1) assert after.shape == (1, 1, 3) # ---- # iterable of LineStringsOnImage # ---- before = [ ia.LineStringsOnImage([ia.LineString(coords1)], shape=(1, 1, 3)), ia.LineStringsOnImage([ia.LineString(coords2)], shape=(2, 1, 3)) ] after = _norm_and_invert(before, images=None) assert isinstance(after, list) assert len(after) == 2 assert isinstance(after[0], ia.LineStringsOnImage) assert isinstance(after[1], ia.LineStringsOnImage) assert np.allclose(after[0].line_strings[0].coords, coords1) assert np.allclose(after[1].line_strings[0].coords, coords2) assert after[0].shape == (1, 1, 3) assert after[1].shape == (2, 1, 3) # ---- # iterable of iterable of array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: for images in [[np.zeros((1, 1, 3), dtype=np.uint8)], np.zeros((1, 1, 1, 3), dtype=np.uint8)]: before = [[coords1_arr.astype(dt)]] after = _norm_and_invert(before, images=images) assert isinstance(after, list) assert len(after) == 1 assert isinstance(after[0], list) assert len(after[0]) == 1 assert ia.is_np_array(after[0][0]) assert after[0][0].shape == (3, 2) assert after[0][0].dtype.name == dt.name assert np.allclose(after[0][0], coords1_arr) before = [[coords1_arr.astype(dt) for _ in sm.xrange(5)]] after = _norm_and_invert(before, images=images) assert isinstance(after, list) assert len(after) == 1 assert isinstance(after[0], list) assert len(after[0]) == 5 assert ia.is_np_array(after[0][0]) assert after[0][0].shape == (3, 2) assert after[0][0].dtype.name == dt.name assert np.allclose(after[0][0], coords1_arr) # ---- # iterable of iterable of LineString # ---- before = [ [ia.LineString(coords1), ia.LineString(coords2)], [ia.LineString(coords3), ia.LineString(coords4)] ] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert isinstance(after[0], list) assert isinstance(after[1], list) assert len(after[0]) == 2 assert len(after[1]) == 2 assert np.allclose(after[0][0].coords, coords1) assert np.allclose(after[0][1].coords, coords2) assert np.allclose(after[1][0].coords, coords3) assert np.allclose(after[1][1].coords, coords4) # ---- # iterable of iterable of iterable of (x,y) # ---- before = [[coords1, coords2], [coords3, coords4]] after = _norm_and_invert(before, images=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)]) assert isinstance(after, list) assert len(after) == 2 assert len(after[0]) == 2 assert len(after[1]) == 2 assert after[0][0] == coords1 assert after[0][1] == coords2 assert after[1][0] == coords3 assert after[1][1] == coords4 def test_normalize_images(self): assert normalization.normalize_images(None) is None arr = np.zeros((1, 4, 4, 3), dtype=np.uint8) observed = normalization.normalize_images(arr) assert ia.is_np_array(observed) assert observed.shape == (1, 4, 4, 3) assert observed.dtype.name == "uint8" arr = np.zeros((1, 4, 4), dtype=np.uint8) observed = normalization.normalize_images(arr) assert ia.is_np_array(observed) assert observed.shape == (1, 4, 4, 1) assert observed.dtype.name == "uint8" arr = np.zeros((4, 4), dtype=np.uint8) observed = normalization.normalize_images(arr) assert ia.is_np_array(observed) assert observed.shape == (1, 4, 4, 1) assert observed.dtype.name == "uint8" observed = normalization.normalize_images([]) assert isinstance(observed, list) assert len(observed) == 0 arr1 = np.zeros((4, 4), dtype=np.uint8) arr2 = np.zeros((5, 5, 3), dtype=np.uint8) observed = normalization.normalize_images([arr1, arr2]) assert isinstance(observed, list) assert len(observed) == 2 assert ia.is_np_array(observed[0]) assert ia.is_np_array(observed[1]) assert observed[0].shape == (4, 4, 1) assert observed[1].shape == (5, 5, 3) assert observed[0].dtype.name == "uint8" assert observed[1].dtype.name == "uint8" with self.assertRaises(ValueError): normalization.normalize_images(False) def test_normalize_heatmaps(self): # ---- # None # ---- heatmaps_norm = normalization.normalize_heatmaps(None) assert heatmaps_norm is None # ---- # array # ---- heatmaps_norm = normalization.normalize_heatmaps( np.zeros((1, 1, 1, 1), dtype=np.float32) + 0.1, shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(heatmaps_norm, list) assert isinstance(heatmaps_norm[0], ia.HeatmapsOnImage) assert np.allclose(heatmaps_norm[0].arr_0to1, 0 + 0.1) heatmaps_norm = normalization.normalize_heatmaps( np.zeros((1, 1, 1, 1), dtype=np.float32) + 0.1, shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(heatmaps_norm, list) assert isinstance(heatmaps_norm[0], ia.HeatmapsOnImage) assert np.allclose(heatmaps_norm[0].arr_0to1, 0 + 0.1) # --> heatmaps for too many images with self.assertRaises(ValueError): _heatmaps_norm = normalization.normalize_heatmaps( np.zeros((2, 1, 1, 1), dtype=np.float32) + 0.1, shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few heatmaps with self.assertRaises(ValueError): _heatmaps_norm = normalization.normalize_heatmaps( np.zeros((1, 1, 1, 1), dtype=np.float32) + 0.1, np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> wrong channel number with self.assertRaises(ValueError): _heatmaps_norm = normalization.normalize_heatmaps( np.zeros((1, 1, 1), dtype=np.float32) + 0.1, shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) # --> images None with self.assertRaises(ValueError): _heatmaps_norm = normalization.normalize_heatmaps( np.zeros((1, 1, 1, 1), dtype=np.float32) + 0.1, shapes=None ) # ---- # single HeatmapsOnImage # ---- heatmaps_norm = normalization.normalize_heatmaps( ia.HeatmapsOnImage( np.zeros((1, 1, 1), dtype=np.float32) + 0.1, shape=(1, 1, 3)), shapes=None ) assert isinstance(heatmaps_norm, list) assert isinstance(heatmaps_norm[0], ia.HeatmapsOnImage) assert np.allclose(heatmaps_norm[0].arr_0to1, 0 + 0.1) # ---- # empty iterable # ---- heatmaps_norm = normalization.normalize_heatmaps( [], shapes=None ) assert heatmaps_norm is None # ---- # iterable of arrays # ---- heatmaps_norm = normalization.normalize_heatmaps( [np.zeros((1, 1, 1), dtype=np.float32) + 0.1], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(heatmaps_norm, list) assert isinstance(heatmaps_norm[0], ia.HeatmapsOnImage) assert np.allclose(heatmaps_norm[0].arr_0to1, 0 + 0.1) heatmaps_norm = normalization.normalize_heatmaps( [np.zeros((1, 1, 1), dtype=np.float32) + 0.1], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(heatmaps_norm, list) assert isinstance(heatmaps_norm[0], ia.HeatmapsOnImage) assert np.allclose(heatmaps_norm[0].arr_0to1, 0 + 0.1) # --> heatmaps for too many images with self.assertRaises(ValueError): _heatmaps_norm = normalization.normalize_heatmaps( [ np.zeros((1, 1, 1), dtype=np.float32) + 0.1, np.zeros((1, 1, 1), dtype=np.float32) + 0.1 ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few heatmaps with self.assertRaises(ValueError): _heatmaps_norm = normalization.normalize_heatmaps( [np.zeros((1, 1, 1), dtype=np.float32) + 0.1], shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> images None with self.assertRaises(ValueError): _heatmaps_norm = normalization.normalize_heatmaps( [np.zeros((1, 1, 1), dtype=np.float32) + 0.1], shapes=None, ) # --> wrong number of dimensions with self.assertRaises(ValueError): _heatmaps_norm = normalization.normalize_heatmaps( [np.zeros((1, 1, 1, 1), dtype=np.float32) + 0.1], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) # ---- # iterable of HeatmapsOnImage # ---- heatmaps_norm = normalization.normalize_heatmaps( [ia.HeatmapsOnImage( np.zeros((1, 1, 1), dtype=np.float32) + 0.1, shape=(1, 1, 3))], shapes=None ) assert isinstance(heatmaps_norm, list) assert isinstance(heatmaps_norm[0], ia.HeatmapsOnImage) assert np.allclose(heatmaps_norm[0].arr_0to1, 0 + 0.1) def test_normalize_segmentation_maps(self): # ---- # None # ---- segmaps_norm = normalization.normalize_segmentation_maps(None) assert segmaps_norm is None # ---- # array # ---- for dt in [np.dtype("int32"), np.dtype("uint16"), np.dtype(bool)]: # NOTE: use np.full(shape, 1, dtype=dt) here and below instead of # np.zeros(shape, dtype=dt) + 1, because the latter one converts # dtype bool_ to int64. segmaps_norm = normalization.normalize_segmentation_maps( np.full((1, 1, 1, 1), 1, dtype=dt), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(segmaps_norm, list) assert isinstance(segmaps_norm[0], ia.SegmentationMapsOnImage) assert np.allclose(segmaps_norm[0].arr[..., 0], 1) segmaps_norm = normalization.normalize_segmentation_maps( np.full((1, 1, 1, 1), 1, dtype=dt), shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(segmaps_norm, list) assert isinstance(segmaps_norm[0], ia.SegmentationMapsOnImage) assert np.allclose(segmaps_norm[0].arr[..., 0], 1) # --> segmaps for too many images with self.assertRaises(ValueError): _segmaps_norm = normalization.normalize_segmentation_maps( np.full((2, 1, 1), 1, dtype=dt), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few segmaps with self.assertRaises(ValueError): _segmaps_norm = normalization.normalize_segmentation_maps( np.full((1, 1, 1), 1, dtype=dt), shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> images None with self.assertRaises(ValueError): _segmaps_norm = normalization.normalize_segmentation_maps( np.full((1, 1, 1), 1, dtype=dt), shapes=None ) # ---- # single SegmentationMapsOnImage # ---- segmaps_norm = normalization.normalize_segmentation_maps( ia.SegmentationMapsOnImage( np.full((1, 1, 1), 1, dtype=np.int32), shape=(1, 1, 3)), shapes=None ) assert isinstance(segmaps_norm, list) assert isinstance(segmaps_norm[0], ia.SegmentationMapsOnImage) assert np.allclose(segmaps_norm[0].arr[..., 0], 0 + 1) # ---- # empty iterable # ---- segmaps_norm = normalization.normalize_segmentation_maps( [], shapes=None ) assert segmaps_norm is None # ---- # iterable of arrays # ---- for dt in [np.dtype("int32"), np.dtype("uint16"), np.dtype(bool)]: segmaps_norm = normalization.normalize_segmentation_maps( [np.full((1, 1, 1), 1, dtype=dt)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(segmaps_norm, list) assert isinstance(segmaps_norm[0], ia.SegmentationMapsOnImage) assert np.allclose(segmaps_norm[0].arr[..., 0], 1) segmaps_norm = normalization.normalize_segmentation_maps( [np.full((1, 1, 1), 1, dtype=dt)], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(segmaps_norm, list) assert isinstance(segmaps_norm[0], ia.SegmentationMapsOnImage) assert np.allclose(segmaps_norm[0].arr[..., 0], 1) # --> segmaps for too many images with self.assertRaises(ValueError): _segmaps_norm = normalization.normalize_segmentation_maps( [ np.full((1, 1, 1), 1, dtype=np.int32), np.full((1, 1, 1), 1, dtype=np.int32) ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few segmaps with self.assertRaises(ValueError): _segmaps_norm = normalization.normalize_segmentation_maps( [np.full((1, 1, 1), 1, dtype=np.int32)], shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> images None with self.assertRaises(ValueError): _segmaps_norm = normalization.normalize_segmentation_maps( [np.full((1, 1, 1), 1, dtype=np.int32)], shapes=None ) # --> wrong number of dimensions with self.assertRaises(ValueError): _segmaps_norm = normalization.normalize_segmentation_maps( [np.full((1, 1, 1, 1), 1, dtype=np.int32)], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) # ---- # iterable of SegmentationMapsOnImage # ---- segmaps_norm = normalization.normalize_segmentation_maps( [ia.SegmentationMapsOnImage( np.full((1, 1, 1), 1, dtype=np.int32), shape=(1, 1, 3))], shapes=None ) assert isinstance(segmaps_norm, list) assert isinstance(segmaps_norm[0], ia.SegmentationMapsOnImage) assert np.allclose(segmaps_norm[0].arr[..., 0], 1) def test_normalize_keypoints(self): def _assert_single_image_expected(inputs): # --> images None with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( inputs, None) # --> too many images with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( inputs, shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> too many images with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( inputs, shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # None # ---- keypoints_norm = normalization.normalize_keypoints(None) assert keypoints_norm is None # ---- # array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: keypoints_norm = normalization.normalize_keypoints( np.zeros((1, 1, 2), dtype=dt) + 1, shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 1 assert np.allclose(keypoints_norm[0].to_xy_array(), 1) keypoints_norm = normalization.normalize_keypoints( np.zeros((1, 5, 2), dtype=dt) + 1, shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 5 assert np.allclose(keypoints_norm[0].to_xy_array(), 1) # --> keypoints for too many images with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( np.zeros((2, 1, 2), dtype=dt) + 1, shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few keypoints with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( np.zeros((1, 1, 2), dtype=dt) + 1, shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> wrong keypoints shape with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( np.zeros((1, 1, 100), dtype=dt) + 1, shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) _assert_single_image_expected(np.zeros((1, 1, 2), dtype=dt) + 1) # ---- # (x,y) # ---- keypoints_norm = normalization.normalize_keypoints( (1, 2), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 1 assert keypoints_norm[0].keypoints[0].x == 1 assert keypoints_norm[0].keypoints[0].y == 2 _assert_single_image_expected((1, 2)) # ---- # single Keypoint instance # ---- keypoints_norm = normalization.normalize_keypoints( ia.Keypoint(x=1, y=2), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 1 assert keypoints_norm[0].keypoints[0].x == 1 assert keypoints_norm[0].keypoints[0].y == 2 _assert_single_image_expected(ia.Keypoint(x=1, y=2)) # ---- # single KeypointsOnImage instance # ---- keypoints_norm = normalization.normalize_keypoints( ia.KeypointsOnImage([ia.Keypoint(x=1, y=2)], shape=(1, 1, 3)), shapes=None ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 1 assert keypoints_norm[0].keypoints[0].x == 1 assert keypoints_norm[0].keypoints[0].y == 2 # ---- # empty iterable # ---- keypoints_norm = normalization.normalize_keypoints( [], shapes=None ) assert keypoints_norm is None # ---- # iterable of array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: keypoints_norm = normalization.normalize_keypoints( [np.zeros((1, 2), dtype=dt) + 1], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 1 assert np.allclose(keypoints_norm[0].to_xy_array(), 1) keypoints_norm = normalization.normalize_keypoints( [np.zeros((5, 2), dtype=dt) + 1], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 5 assert np.allclose(keypoints_norm[0].to_xy_array(), 1) # --> keypoints for too many images with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( [ np.zeros((1, 2), dtype=dt) + 1, np.zeros((1, 2), dtype=dt) + 1 ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few keypoints with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( [np.zeros((1, 2), dtype=dt) + 1], shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> images None with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( [np.zeros((1, 2), dtype=dt) + 1], shapes=None ) # --> wrong shape with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( [np.zeros((1, 100), dtype=dt) + 1], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) # ---- # iterable of (x,y) # ---- keypoints_norm = normalization.normalize_keypoints( [(1, 2), (3, 4)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 2 assert keypoints_norm[0].keypoints[0].x == 1 assert keypoints_norm[0].keypoints[0].y == 2 assert keypoints_norm[0].keypoints[1].x == 3 assert keypoints_norm[0].keypoints[1].y == 4 # may only be used for single images with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( [(1, 2)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of Keypoint # ---- keypoints_norm = normalization.normalize_keypoints( [ia.Keypoint(x=1, y=2), ia.Keypoint(x=3, y=4)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 2 assert keypoints_norm[0].keypoints[0].x == 1 assert keypoints_norm[0].keypoints[0].y == 2 assert keypoints_norm[0].keypoints[1].x == 3 assert keypoints_norm[0].keypoints[1].y == 4 # may only be used for single images with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( [ia.Keypoint(x=1, y=2)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of KeypointsOnImage # ---- keypoints_norm = normalization.normalize_keypoints( [ ia.KeypointsOnImage([ia.Keypoint(x=1, y=2)], shape=(1, 1, 3)), ia.KeypointsOnImage([ia.Keypoint(x=3, y=4)], shape=(1, 1, 3)), ], shapes=None ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 1 assert keypoints_norm[0].keypoints[0].x == 1 assert keypoints_norm[0].keypoints[0].y == 2 assert isinstance(keypoints_norm[1], ia.KeypointsOnImage) assert len(keypoints_norm[1].keypoints) == 1 assert keypoints_norm[1].keypoints[0].x == 3 assert keypoints_norm[1].keypoints[0].y == 4 # ---- # iterable of empty interables # ---- keypoints_norm = normalization.normalize_keypoints( [[]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert keypoints_norm is None # ---- # iterable of iterable of (x,y) # ---- keypoints_norm = normalization.normalize_keypoints( [ [(1, 2), (3, 4)], [(5, 6), (7, 8)] ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 2 assert keypoints_norm[0].keypoints[0].x == 1 assert keypoints_norm[0].keypoints[0].y == 2 assert keypoints_norm[0].keypoints[1].x == 3 assert keypoints_norm[0].keypoints[1].y == 4 assert len(keypoints_norm[1].keypoints) == 2 assert keypoints_norm[1].keypoints[0].x == 5 assert keypoints_norm[1].keypoints[0].y == 6 assert keypoints_norm[1].keypoints[1].x == 7 assert keypoints_norm[1].keypoints[1].y == 8 # --> images None with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( [ [(1, 2), (3, 4)], [(5, 6), (7, 8)] ], shapes=None ) # --> different number of images with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( [ [(1, 2), (3, 4)], [(5, 6), (7, 8)] ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of iterable of Keypoint # ---- keypoints_norm = normalization.normalize_keypoints( [ [ia.Keypoint(x=1, y=2), ia.Keypoint(x=3, y=4)], [ia.Keypoint(x=5, y=6), ia.Keypoint(x=7, y=8)] ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(keypoints_norm, list) assert isinstance(keypoints_norm[0], ia.KeypointsOnImage) assert len(keypoints_norm[0].keypoints) == 2 assert keypoints_norm[0].keypoints[0].x == 1 assert keypoints_norm[0].keypoints[0].y == 2 assert keypoints_norm[0].keypoints[1].x == 3 assert keypoints_norm[0].keypoints[1].y == 4 assert len(keypoints_norm[1].keypoints) == 2 assert keypoints_norm[1].keypoints[0].x == 5 assert keypoints_norm[1].keypoints[0].y == 6 assert keypoints_norm[1].keypoints[1].x == 7 assert keypoints_norm[1].keypoints[1].y == 8 # --> images None with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( [ [ia.Keypoint(x=1, y=2), ia.Keypoint(x=3, y=4)], [ia.Keypoint(x=5, y=6), ia.Keypoint(x=7, y=8)] ], shapes=None ) # --> different number of images with self.assertRaises(ValueError): _keypoints_norm = normalization.normalize_keypoints( [ [ia.Keypoint(x=1, y=2), ia.Keypoint(x=3, y=4)], [ia.Keypoint(x=5, y=6), ia.Keypoint(x=7, y=8)] ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) def test_normalize_bounding_boxes(self): def _assert_single_image_expected(inputs): # --> images None with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( inputs, shapes=None ) # --> too many images with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( inputs, shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> too many images with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( inputs, shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # None # ---- bbs_norm = normalization.normalize_bounding_boxes(None) assert bbs_norm is None # ---- # array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: bbs_norm = normalization.normalize_bounding_boxes( np.zeros((1, 1, 4), dtype=dt) + 1, shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 1 assert np.allclose(bbs_norm[0].to_xyxy_array(), 1) bbs_norm = normalization.normalize_bounding_boxes( np.zeros((1, 5, 4), dtype=dt) + 1, shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 5 assert np.allclose(bbs_norm[0].to_xyxy_array(), 1) # --> bounding boxes for too many images with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( np.zeros((2, 1, 4), dtype=dt) + 1, shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few bounding boxes with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( np.zeros((1, 1, 4), dtype=dt) + 1, shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> wrong keypoints shape with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( np.zeros((1, 1, 100), dtype=dt) + 1, shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) _assert_single_image_expected(np.zeros((1, 1, 4), dtype=dt) + 1) # ---- # (x1,y1,x2,y2) # ---- bbs_norm = normalization.normalize_bounding_boxes( (1, 2, 3, 4), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 1 assert bbs_norm[0].bounding_boxes[0].x1 == 1 assert bbs_norm[0].bounding_boxes[0].y1 == 2 assert bbs_norm[0].bounding_boxes[0].x2 == 3 assert bbs_norm[0].bounding_boxes[0].y2 == 4 _assert_single_image_expected((1, 2, 3, 4)) # ---- # single BoundingBox instance # ---- bbs_norm = normalization.normalize_bounding_boxes( ia.BoundingBox(x1=1, y1=2, x2=3, y2=4), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 1 assert bbs_norm[0].bounding_boxes[0].x1 == 1 assert bbs_norm[0].bounding_boxes[0].y1 == 2 assert bbs_norm[0].bounding_boxes[0].x2 == 3 assert bbs_norm[0].bounding_boxes[0].y2 == 4 _assert_single_image_expected(ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)) # ---- # single BoundingBoxesOnImage instance # ---- bbs_norm = normalization.normalize_bounding_boxes( ia.BoundingBoxesOnImage( [ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)], shape=(1, 1, 3)), shapes=None ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 1 assert bbs_norm[0].bounding_boxes[0].x1 == 1 assert bbs_norm[0].bounding_boxes[0].y1 == 2 assert bbs_norm[0].bounding_boxes[0].x2 == 3 assert bbs_norm[0].bounding_boxes[0].y2 == 4 # ---- # empty iterable # ---- bbs_norm = normalization.normalize_bounding_boxes([], shapes=None) assert bbs_norm is None # ---- # iterable of array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: bbs_norm = normalization.normalize_bounding_boxes( [np.zeros((1, 4), dtype=dt) + 1], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 1 assert np.allclose(bbs_norm[0].to_xyxy_array(), 1) bbs_norm = normalization.normalize_bounding_boxes( [np.zeros((5, 4), dtype=dt) + 1], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 5 assert np.allclose(bbs_norm[0].to_xyxy_array(), 1) # --> bounding boxes for too many images with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( [ np.zeros((1, 4), dtype=dt) + 1, np.zeros((1, 4), dtype=dt) + 1 ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few bounding boxes with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( [np.zeros((1, 4), dtype=dt) + 1], shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> images None with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( [np.zeros((1, 4), dtype=dt) + 1], shapes=None ) # --> wrong shape with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( [np.zeros((1, 100), dtype=dt) + 1], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) # ---- # iterable of (x1,y1,x2,y2) # ---- bbs_norm = normalization.normalize_bounding_boxes( [(1, 2, 3, 4), (5, 6, 7, 8)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 2 assert bbs_norm[0].bounding_boxes[0].x1 == 1 assert bbs_norm[0].bounding_boxes[0].y1 == 2 assert bbs_norm[0].bounding_boxes[0].x2 == 3 assert bbs_norm[0].bounding_boxes[0].y2 == 4 assert bbs_norm[0].bounding_boxes[1].x1 == 5 assert bbs_norm[0].bounding_boxes[1].y1 == 6 assert bbs_norm[0].bounding_boxes[1].x2 == 7 assert bbs_norm[0].bounding_boxes[1].y2 == 8 # may only be used for single images with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( [(1, 2, 3, 4)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of BoundingBox # ---- bbs_norm = normalization.normalize_bounding_boxes( [ ia.BoundingBox(x1=1, y1=2, x2=3, y2=4), ia.BoundingBox(x1=5, y1=6, x2=7, y2=8) ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 2 assert bbs_norm[0].bounding_boxes[0].x1 == 1 assert bbs_norm[0].bounding_boxes[0].y1 == 2 assert bbs_norm[0].bounding_boxes[0].x2 == 3 assert bbs_norm[0].bounding_boxes[0].y2 == 4 assert bbs_norm[0].bounding_boxes[1].x1 == 5 assert bbs_norm[0].bounding_boxes[1].y1 == 6 assert bbs_norm[0].bounding_boxes[1].x2 == 7 assert bbs_norm[0].bounding_boxes[1].y2 == 8 # may only be used for single images with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( [ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of BoundingBoxesOnImage # ---- bbs_norm = normalization.normalize_bounding_boxes( [ ia.BoundingBoxesOnImage( [ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)], shape=(1, 1, 3)), ia.BoundingBoxesOnImage( [ia.BoundingBox(x1=5, y1=6, x2=7, y2=8)], shape=(1, 1, 3)) ], shapes=None ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 1 assert bbs_norm[0].bounding_boxes[0].x1 == 1 assert bbs_norm[0].bounding_boxes[0].y1 == 2 assert bbs_norm[0].bounding_boxes[0].x2 == 3 assert bbs_norm[0].bounding_boxes[0].y2 == 4 assert isinstance(bbs_norm[1], ia.BoundingBoxesOnImage) assert len(bbs_norm[1].bounding_boxes) == 1 assert bbs_norm[1].bounding_boxes[0].x1 == 5 assert bbs_norm[1].bounding_boxes[0].y1 == 6 assert bbs_norm[1].bounding_boxes[0].x2 == 7 assert bbs_norm[1].bounding_boxes[0].y2 == 8 # ---- # iterable of empty interables # ---- bbs_norm = normalization.normalize_bounding_boxes( [[]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert bbs_norm is None # ---- # iterable of iterable of (x1,y1,x2,y2) # ---- bbs_norm = normalization.normalize_bounding_boxes( [ [(1, 2, 3, 4)], [(5, 6, 7, 8), (9, 10, 11, 12)] ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 1 assert bbs_norm[0].bounding_boxes[0].x1 == 1 assert bbs_norm[0].bounding_boxes[0].y1 == 2 assert bbs_norm[0].bounding_boxes[0].x2 == 3 assert bbs_norm[0].bounding_boxes[0].y2 == 4 assert len(bbs_norm[1].bounding_boxes) == 2 assert bbs_norm[1].bounding_boxes[0].x1 == 5 assert bbs_norm[1].bounding_boxes[0].y1 == 6 assert bbs_norm[1].bounding_boxes[0].x2 == 7 assert bbs_norm[1].bounding_boxes[0].y2 == 8 assert bbs_norm[1].bounding_boxes[1].x1 == 9 assert bbs_norm[1].bounding_boxes[1].y1 == 10 assert bbs_norm[1].bounding_boxes[1].x2 == 11 assert bbs_norm[1].bounding_boxes[1].y2 == 12 # --> images None with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( [ [(1, 2, 3, 4), (3, 4, 5, 6)], [(5, 6, 7, 8), (7, 8, 9, 10)] ], shapes=None ) # --> different number of images with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( [ [(1, 2, 3, 4)], [(5, 6, 7, 8)] ], [np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of iterable of Keypoint # ---- bbs_norm = normalization.normalize_bounding_boxes( [ [ia.BoundingBox(x1=1, y1=2, x2=3, y2=4), ia.BoundingBox(x1=5, y1=6, x2=7, y2=8)], [ia.BoundingBox(x1=9, y1=10, x2=11, y2=12), ia.BoundingBox(x1=13, y1=14, x2=15, y2=16)] ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(bbs_norm, list) assert isinstance(bbs_norm[0], ia.BoundingBoxesOnImage) assert len(bbs_norm[0].bounding_boxes) == 2 assert bbs_norm[0].bounding_boxes[0].x1 == 1 assert bbs_norm[0].bounding_boxes[0].y1 == 2 assert bbs_norm[0].bounding_boxes[0].x2 == 3 assert bbs_norm[0].bounding_boxes[0].y2 == 4 assert bbs_norm[0].bounding_boxes[1].x1 == 5 assert bbs_norm[0].bounding_boxes[1].y1 == 6 assert bbs_norm[0].bounding_boxes[1].x2 == 7 assert bbs_norm[0].bounding_boxes[1].y2 == 8 assert len(bbs_norm[1].bounding_boxes) == 2 assert bbs_norm[1].bounding_boxes[0].x1 == 9 assert bbs_norm[1].bounding_boxes[0].y1 == 10 assert bbs_norm[1].bounding_boxes[0].x2 == 11 assert bbs_norm[1].bounding_boxes[0].y2 == 12 assert bbs_norm[1].bounding_boxes[1].x1 == 13 assert bbs_norm[1].bounding_boxes[1].y1 == 14 assert bbs_norm[1].bounding_boxes[1].x2 == 15 assert bbs_norm[1].bounding_boxes[1].y2 == 16 # --> images None with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( [ [ia.BoundingBox(x1=1, y1=2, x2=3, y2=4), ia.BoundingBox(x1=5, y1=6, x2=7, y2=8)], [ia.BoundingBox(x1=9, y1=10, x2=11, y2=12), ia.BoundingBox(x1=13, y1=14, x2=15, y2=16)] ], shapes=None ) # --> different number of images with self.assertRaises(ValueError): _bbs_norm = normalization.normalize_bounding_boxes( [ [ia.BoundingBox(x1=1, y1=2, x2=3, y2=4), ia.BoundingBox(x1=5, y1=6, x2=7, y2=8)], [ia.BoundingBox(x1=9, y1=10, x2=11, y2=12), ia.BoundingBox(x1=13, y1=14, x2=15, y2=16)] ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) def test_normalize_polygons(self): def _assert_single_image_expected(inputs): # --> images None with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( inputs, shapes=None) # --> too many images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( inputs, shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8)) # --> too many images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( inputs, shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) coords1 = [(0, 0), (10, 0), (10, 10)] coords2 = [(5, 5), (15, 5), (15, 15)] coords3 = [(0, 0), (10, 0), (10, 10), (0, 10)] coords4 = [(5, 5), (15, 5), (15, 15), (5, 15)] coords1_kps = [ia.Keypoint(x=x, y=y) for x, y in coords1] coords2_kps = [ia.Keypoint(x=x, y=y) for x, y in coords2] coords3_kps = [ia.Keypoint(x=x, y=y) for x, y in coords3] coords4_kps = [ia.Keypoint(x=x, y=y) for x, y in coords4] coords1_arr = np.float32(coords1) coords2_arr = np.float32(coords2) coords3_arr = np.float32(coords3) coords4_arr = np.float32(coords4) # ---- # None # ---- polygons_norm = normalization.normalize_polygons(None) assert polygons_norm is None # ---- # array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: polygons_norm = normalization.normalize_polygons( coords1_arr[np.newaxis, np.newaxis, ...].astype(dt), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 1 assert np.allclose(polygons_norm[0].polygons[0].exterior, coords1_arr) polygons_norm = normalization.normalize_polygons( np.tile( coords1_arr[np.newaxis, np.newaxis, ...].astype(dt), (1, 5, 1, 1) ), shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 5 assert np.allclose(polygons_norm[0].polygons[0].exterior, coords1_arr) # --> polygons for too many images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( np.tile( coords1_arr[np.newaxis, np.newaxis, ...].astype(dt), (2, 1, 1, 1) ), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few polygons with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( np.tile( coords1_arr[np.newaxis, np.newaxis, ...].astype(dt), (1, 1, 1, 1) ), shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> wrong polygons shape with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( np.tile( coords1_arr[np.newaxis, np.newaxis, ...].astype(dt), (1, 1, 1, 10) ), shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) _assert_single_image_expected( coords1_arr[np.newaxis, np.newaxis, ...].astype(dt)) # ---- # single Polygon instance # ---- polygons_norm = normalization.normalize_polygons( ia.Polygon(coords1), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 1 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) _assert_single_image_expected(ia.Polygon(coords1)) # ---- # single PolygonsOnImage instance # ---- polygons_norm = normalization.normalize_polygons( ia.PolygonsOnImage([ia.Polygon(coords1)], shape=(1, 1, 3)), shapes=None ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 1 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) # ---- # empty iterable # ---- polygons_norm = normalization.normalize_polygons( [], shapes=None ) assert polygons_norm is None # ---- # iterable of array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: polygons_norm = normalization.normalize_polygons( [coords1_arr[np.newaxis, ...].astype(dt)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 1 assert np.allclose(polygons_norm[0].polygons[0].exterior, coords1_arr) polygons_norm = normalization.normalize_polygons( [np.tile( coords1_arr[np.newaxis, ...].astype(dt), (5, 1, 1) )], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 5 assert np.allclose(polygons_norm[0].polygons[0].exterior, coords1_arr) # --> polygons for too many images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [coords1_arr[np.newaxis, ...].astype(dt), coords2_arr[np.newaxis, ...].astype(dt)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few polygons with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [coords1_arr[np.newaxis, ...].astype(dt)], shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> wrong polygons shape with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [np.tile( coords1_arr[np.newaxis, ...].astype(dt), (1, 1, 10) )], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) _assert_single_image_expected( [coords1_arr[np.newaxis, ...].astype(dt)] ) # ---- # iterable of (x,y) # ---- polygons_norm = normalization.normalize_polygons( coords1, shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 1 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) # may only be used for single images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( coords1, shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of Keypoint # ---- polygons_norm = normalization.normalize_polygons( coords1_kps, shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 1 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) # may only be used for single images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( coords1_kps, shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of Polygon # ---- polygons_norm = normalization.normalize_polygons( [ia.Polygon(coords1), ia.Polygon(coords2)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 2 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) assert polygons_norm[0].polygons[1].exterior_almost_equals(coords2) # may only be used for single images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [ia.Polygon(coords1)], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of PolygonsOnImage # ---- polygons_norm = normalization.normalize_polygons( [ ia.PolygonsOnImage([ia.Polygon(coords1)], shape=(1, 1, 3)), ia.PolygonsOnImage([ia.Polygon(coords2)], shape=(1, 1, 3)) ], shapes=None ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 1 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) assert isinstance(polygons_norm[1], ia.PolygonsOnImage) assert len(polygons_norm[1].polygons) == 1 assert polygons_norm[1].polygons[0].exterior_almost_equals(coords2) # ---- # iterable of empty iterables # ---- polygons_norm = normalization.normalize_polygons( [[]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert polygons_norm is None # ---- # iterable of iterable of array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: polygons_norm = normalization.normalize_polygons( [[coords1_arr.astype(dt)]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 1 assert np.allclose(polygons_norm[0].polygons[0].exterior, coords1_arr) polygons_norm = normalization.normalize_polygons( [[ np.copy(coords1_arr).astype(dt) for _ in sm.xrange(5) ]], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 5 assert np.allclose(polygons_norm[0].polygons[0].exterior, coords1_arr) # --> polygons for too many images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [[coords1_arr.astype(dt)], [coords2_arr.astype(dt)]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) # --> too few polygons with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [[coords1_arr.astype(dt)]], shapes=np.zeros((2, 1, 1, 3), dtype=np.uint8) ) # --> wrong polygons shape with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [[np.tile( coords1_arr.astype(dt), (1, 1, 10) )]], shapes=np.zeros((1, 1, 1, 3), dtype=np.uint8) ) _assert_single_image_expected( [[coords1_arr.astype(dt)]] ) # ---- # iterable of iterable of (x,y) # ---- polygons_norm = normalization.normalize_polygons( [coords1, coords2], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 2 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) assert polygons_norm[0].polygons[1].exterior_almost_equals(coords2) # --> images None with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [coords1, coords2], shapes=None ) # --> different number of images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [coords1, coords2], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of iterable of Keypoint # ---- polygons_norm = normalization.normalize_polygons( [coords1_kps, coords2_kps], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 2 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) assert polygons_norm[0].polygons[1].exterior_almost_equals(coords2) # --> images None with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [coords1_kps, coords2_kps], shapes=None ) # --> different number of images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [coords1_kps, coords2_kps], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of iterable of Polygon # ---- polygons_norm = normalization.normalize_polygons( [ [ia.Polygon(coords1), ia.Polygon(coords2)], [ia.Polygon(coords3), ia.Polygon(coords4)] ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert isinstance(polygons_norm[1], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 2 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) assert polygons_norm[0].polygons[1].exterior_almost_equals(coords2) assert len(polygons_norm[1].polygons) == 2 assert polygons_norm[1].polygons[0].exterior_almost_equals(coords3) assert polygons_norm[1].polygons[1].exterior_almost_equals(coords4) # --> images None with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [ [ia.Polygon(coords1), ia.Polygon(coords2)], [ia.Polygon(coords3), ia.Polygon(coords4)] ], shapes=None ) # --> different number of images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [ [ia.Polygon(coords1), ia.Polygon(coords2)], [ia.Polygon(coords3), ia.Polygon(coords4)] ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of iterable of empty iterable # ---- polygons_norm = normalization.normalize_polygons( [[[]]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert polygons_norm is None # ---- # iterable of iterable of iterable of (x,y) # ---- polygons_norm = normalization.normalize_polygons( [[coords1, coords2], [coords3, coords4]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 2 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) assert polygons_norm[0].polygons[1].exterior_almost_equals(coords2) assert len(polygons_norm[0].polygons) == 2 assert polygons_norm[1].polygons[0].exterior_almost_equals(coords3) assert polygons_norm[1].polygons[1].exterior_almost_equals(coords4) # --> images None with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [[coords1, coords2]], shapes=None ) # --> different number of images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [[coords1, coords2], [coords3]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # ---- # iterable of iterable of iterable of Keypoint # ---- polygons_norm = normalization.normalize_polygons( [[coords1_kps, coords2_kps], [coords3_kps, coords4_kps]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(polygons_norm, list) assert isinstance(polygons_norm[0], ia.PolygonsOnImage) assert len(polygons_norm[0].polygons) == 2 assert polygons_norm[0].polygons[0].exterior_almost_equals(coords1) assert polygons_norm[0].polygons[1].exterior_almost_equals(coords2) assert len(polygons_norm[0].polygons) == 2 assert polygons_norm[1].polygons[0].exterior_almost_equals(coords3) assert polygons_norm[1].polygons[1].exterior_almost_equals(coords4) # --> images None with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [[coords1_kps, coords2_kps]], shapes=None ) # --> different number of images with self.assertRaises(ValueError): _polygons_norm = normalization.normalize_polygons( [[coords1_kps, coords2_kps], [coords3_kps]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) # essentially already tested via polygons, as they are based on the # same methods, hence a short test here def test_normalize_line_strings(self): coords1 = [(0, 0), (10, 0), (10, 10)] coords2 = [(5, 5), (15, 5), (15, 15)] coords3 = [(0, 0), (10, 0), (10, 10), (0, 10)] coords4 = [(5, 5), (15, 5), (15, 15), (5, 15)] coords1_arr = np.float32(coords1) # ---- # None # ---- lss_norm = normalization.normalize_line_strings(None) assert lss_norm is None # ---- # array # ---- for dt in [np.dtype("float32"), np.dtype("int16"), np.dtype("uint16")]: lss_norm = normalization.normalize_line_strings( coords1_arr[np.newaxis, np.newaxis, ...].astype(dt), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(lss_norm, list) assert isinstance(lss_norm[0], ia.LineStringsOnImage) assert len(lss_norm[0].line_strings) == 1 assert np.allclose(lss_norm[0].line_strings[0].coords, coords1_arr) # ---- # single LineString instance # ---- lss_norm = normalization.normalize_line_strings( ia.LineString(coords1), shapes=[np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(lss_norm, list) assert isinstance(lss_norm[0], ia.LineStringsOnImage) assert len(lss_norm[0].line_strings) == 1 assert np.allclose(lss_norm[0].line_strings[0].coords, coords1) # ---- # single LineStringOnImage instance # ---- lss_norm = normalization.normalize_line_strings( ia.LineStringsOnImage([ia.LineString(coords1)], shape=(1, 1, 3)), shapes=None ) assert isinstance(lss_norm, list) assert isinstance(lss_norm[0], ia.LineStringsOnImage) assert len(lss_norm[0].line_strings) == 1 assert np.allclose(lss_norm[0].line_strings[0].coords, coords1) # ---- # empty iterable # ---- lss_norm = normalization.normalize_line_strings( [], shapes=None ) assert lss_norm is None # ---- # iterable of LineStringOnImage # ---- lss_norm = normalization.normalize_line_strings( [ ia.LineStringsOnImage( [ia.LineString(coords1)], shape=(1, 1, 3)), ia.LineStringsOnImage( [ia.LineString(coords2)], shape=(1, 1, 3)) ], shapes=None ) assert isinstance(lss_norm, list) assert isinstance(lss_norm[0], ia.LineStringsOnImage) assert len(lss_norm[0].line_strings) == 1 assert np.allclose(lss_norm[0].line_strings[0].coords, coords1) assert isinstance(lss_norm[1], ia.LineStringsOnImage) assert len(lss_norm[1].line_strings) == 1 assert np.allclose(lss_norm[1].line_strings[0].coords, coords2) # ---- # iterable of iterable of LineString # ---- lss_norm = normalization.normalize_line_strings( [ [ia.LineString(coords1), ia.LineString(coords2)], [ia.LineString(coords3), ia.LineString(coords4)] ], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(lss_norm, list) assert isinstance(lss_norm[0], ia.LineStringsOnImage) assert isinstance(lss_norm[1], ia.LineStringsOnImage) assert len(lss_norm[0].line_strings) == 2 assert np.allclose(lss_norm[0].line_strings[0].coords, coords1) assert np.allclose(lss_norm[0].line_strings[1].coords, coords2) assert len(lss_norm[1].line_strings) == 2 assert np.allclose(lss_norm[1].line_strings[0].coords, coords3) assert np.allclose(lss_norm[1].line_strings[1].coords, coords4) # ---- # iterable of iterable of iterable of (x,y) # ---- lss_norm = normalization.normalize_line_strings( [[coords1, coords2], [coords3, coords4]], shapes=[np.zeros((1, 1, 3), dtype=np.uint8), np.zeros((1, 1, 3), dtype=np.uint8)] ) assert isinstance(lss_norm, list) assert isinstance(lss_norm[0], ia.LineStringsOnImage) assert len(lss_norm[0].line_strings) == 2 assert np.allclose(lss_norm[0].line_strings[0].coords, coords1) assert np.allclose(lss_norm[0].line_strings[1].coords, coords2) assert len(lss_norm[0].line_strings) == 2 assert np.allclose(lss_norm[1].line_strings[0].coords, coords3) assert np.allclose(lss_norm[1].line_strings[1].coords, coords4) def test__find_first_nonempty(self): # None observed = normalization.find_first_nonempty(None) assert observed[0] is None assert observed[1] is True assert len(observed[2]) == 0 # None with parents observed = normalization.find_first_nonempty(None, parents=["foo"]) assert observed[0] is None assert observed[1] is True assert len(observed[2]) == 1 assert observed[2][0] == "foo" # array observed = normalization.find_first_nonempty(np.zeros((4, 4, 3))) assert ia.is_np_array(observed[0]) assert observed[0].shape == (4, 4, 3) assert observed[1] is True assert len(observed[2]) == 0 # int observed = normalization.find_first_nonempty(0) assert observed[0] == 0 assert observed[1] is True assert len(observed[2]) == 0 # str observed = normalization.find_first_nonempty("foo") assert observed[0] == "foo" assert observed[1] is True assert len(observed[2]) == 0 # empty list observed = normalization.find_first_nonempty([]) assert observed[0] is None assert observed[1] is False assert len(observed[2]) == 0 # empty list of empty lists observed = normalization.find_first_nonempty([[], [], []]) assert observed[0] is None assert observed[1] is False assert len(observed[2]) == 1 # empty list of empty lists of empty lists observed = normalization.find_first_nonempty([[], [[]], []]) assert observed[0] is None assert observed[1] is False assert len(observed[2]) == 2 # list of None observed = normalization.find_first_nonempty([None, None]) assert observed[0] is None assert observed[1] is True assert len(observed[2]) == 1 # list of array observed = normalization.find_first_nonempty([ np.zeros((4, 4, 3)), np.zeros((5, 5, 3))]) assert ia.is_np_array(observed[0]) assert observed[0].shape == (4, 4, 3) assert observed[1] is True assert len(observed[2]) == 1 # list of list of array observed = normalization.find_first_nonempty( [[np.zeros((4, 4, 3))], [np.zeros((5, 5, 3))]] ) assert ia.is_np_array(observed[0]) assert observed[0].shape == (4, 4, 3) assert observed[1] is True assert len(observed[2]) == 2 # list of tuple of array observed = normalization.find_first_nonempty( [ ( np.zeros((4, 4, 3)), np.zeros((5, 5, 3)) ), ( np.zeros((6, 6, 3)), np.zeros((7, 7, 3)) ) ] ) assert ia.is_np_array(observed[0]) assert observed[0].shape == (4, 4, 3) assert observed[1] is True assert len(observed[2]) == 2 def test__nonempty_info_to_type_str(self): ntype = normalization._nonempty_info_to_type_str( None, True, []) assert ntype == "None" ntype = normalization._nonempty_info_to_type_str( None, False, []) assert ntype == "iterable[empty]" ntype = normalization._nonempty_info_to_type_str( None, False, [[]]) assert ntype == "iterable-iterable[empty]" ntype = normalization._nonempty_info_to_type_str( None, False, [[], []]) assert ntype == "iterable-iterable-iterable[empty]" ntype = normalization._nonempty_info_to_type_str( None, False, [tuple(), []]) assert ntype == "iterable-iterable-iterable[empty]" ntype = normalization._nonempty_info_to_type_str( 1, True, [tuple([1, 2])]) assert ntype == "tuple[number,size=2]" ntype = normalization._nonempty_info_to_type_str( 1, True, [[], tuple([1, 2])]) assert ntype == "iterable-tuple[number,size=2]" ntype = normalization._nonempty_info_to_type_str( 1, True, [tuple([1, 2, 3, 4])]) assert ntype == "tuple[number,size=4]" ntype = normalization._nonempty_info_to_type_str( 1, True, [[], tuple([1, 2, 3, 4])]) assert ntype == "iterable-tuple[number,size=4]" with self.assertRaises(AssertionError): ntype = normalization._nonempty_info_to_type_str( 1, True, [tuple([1, 2, 3])]) assert ntype == "tuple[number,size=4]" ntype = normalization._nonempty_info_to_type_str( np.zeros((4, 4, 3), dtype=np.uint8), True, []) assert ntype == "array[uint]" ntype = normalization._nonempty_info_to_type_str( np.zeros((4, 4, 3), dtype=np.float32), True, []) assert ntype == "array[float]" ntype = normalization._nonempty_info_to_type_str( np.zeros((4, 4, 3), dtype=np.int32), True, []) assert ntype == "array[int]" ntype = normalization._nonempty_info_to_type_str( np.zeros((4, 4, 3), dtype=bool), True, []) assert ntype == "array[bool]" ntype = normalization._nonempty_info_to_type_str( np.zeros((4, 4, 3), dtype=np.dtype("complex")), True, []) assert ntype == "array[c]" ntype = normalization._nonempty_info_to_type_str( np.zeros((4, 4, 3), dtype=np.uint8), True, [[]]) assert ntype == "iterable-array[uint]" ntype = normalization._nonempty_info_to_type_str( np.zeros((4, 4, 3), dtype=np.uint8), True, [[], []]) assert ntype == "iterable-iterable-array[uint]" cls_names = ["Keypoint", "KeypointsOnImage", "BoundingBox", "BoundingBoxesOnImage", "Polygon", "PolygonsOnImage", "HeatmapsOnImage", "SegmentationMapsOnImage"] clss = [ ia.Keypoint(x=1, y=1), ia.KeypointsOnImage([], shape=(1, 1, 3)), ia.BoundingBox(x1=1, y1=2, x2=3, y2=4), ia.BoundingBoxesOnImage([], shape=(1, 1, 3)), ia.Polygon([(1, 1), (1, 2), (2, 2)]), ia.PolygonsOnImage([], shape=(1,)), ia.HeatmapsOnImage(np.zeros((1, 1, 1), dtype=np.float32), shape=(1, 1, 3)), ia.SegmentationMapsOnImage(np.zeros((1, 1, 1), dtype=np.int32), shape=(1, 1, 3)) ] for cls_name, cls in zip(cls_names, clss): ntype = normalization._nonempty_info_to_type_str( cls, True, []) assert ntype == cls_name ntype = normalization._nonempty_info_to_type_str( cls, True, [[]]) assert ntype == "iterable-%s" % (cls_name,) ntype = normalization._nonempty_info_to_type_str( cls, True, [[], tuple()]) assert ntype == "iterable-iterable-%s" % (cls_name,) def test_estimate_heatmaps_norm_type(self): ntype = normalization.estimate_heatmaps_norm_type(None) assert ntype == "None" ntype = normalization.estimate_heatmaps_norm_type( np.zeros((1, 1, 1, 1), dtype=np.float32)) assert ntype == "array[float]" ntype = normalization.estimate_heatmaps_norm_type( ia.HeatmapsOnImage( np.zeros((1, 1, 1), dtype=np.float32), shape=(1, 1, 1) ) ) assert ntype == "HeatmapsOnImage" ntype = normalization.estimate_heatmaps_norm_type([]) assert ntype == "iterable[empty]" ntype = normalization.estimate_heatmaps_norm_type( [np.zeros((1, 1, 1), dtype=np.float32)]) assert ntype == "iterable-array[float]" ntype = normalization.estimate_heatmaps_norm_type([ ia.HeatmapsOnImage(np.zeros((1, 1, 1), dtype=np.float32), shape=(1, 1, 1)) ]) assert ntype == "iterable-HeatmapsOnImage" # -- # error cases # -- with self.assertRaises(AssertionError): _ntype = normalization.estimate_heatmaps_norm_type(1) with self.assertRaises(AssertionError): _ntype = normalization.estimate_heatmaps_norm_type("foo") with self.assertRaises(AssertionError): _ntype = normalization.estimate_heatmaps_norm_type( np.zeros((1, 1, 1), dtype=np.int32)) with self.assertRaises(AssertionError): _ntype = normalization.estimate_heatmaps_norm_type([1]) # wrong class with self.assertRaises(AssertionError): _ntype = normalization.estimate_heatmaps_norm_type( ia.KeypointsOnImage([], shape=(1, 1, 1))) with self.assertRaises(AssertionError): _ntype = normalization.estimate_heatmaps_norm_type([[]]) # list of list of Heatmaps, only list of Heatmaps is max with self.assertRaises(AssertionError): _ntype = normalization.estimate_heatmaps_norm_type([ [ia.HeatmapsOnImage(np.zeros((1, 1, 1), dtype=np.float32), shape=(1, 1, 1))] ]) def test_estimate_segmaps_norm_type(self): ntype = normalization.estimate_segmaps_norm_type(None) assert ntype == "None" for name, dt in zip(["int", "uint", "bool"], [np.int32, np.uint16, bool]): ntype = normalization.estimate_segmaps_norm_type( np.zeros((1, 1, 1, 1), dtype=dt)) assert ntype == "array[%s]" % (name,) ntype = normalization.estimate_segmaps_norm_type( ia.SegmentationMapsOnImage( np.zeros((1, 1, 1), dtype=np.int32), shape=(1, 1, 1) ) ) assert ntype == "SegmentationMapsOnImage" ntype = normalization.estimate_segmaps_norm_type([]) assert ntype == "iterable[empty]" ntype = normalization.estimate_segmaps_norm_type( [np.zeros((1, 1, 1), dtype=np.int32)]) assert ntype == "iterable-array[int]" ntype = normalization.estimate_segmaps_norm_type([ ia.SegmentationMapsOnImage(np.zeros((1, 1, 1), dtype=np.int32), shape=(1, 1, 1)) ]) assert ntype == "iterable-SegmentationMapsOnImage" # -- # error cases # -- with self.assertRaises(AssertionError): _ntype = normalization.estimate_segmaps_norm_type(1) with self.assertRaises(AssertionError): _ntype = normalization.estimate_segmaps_norm_type("foo") with self.assertRaises(AssertionError): _ntype = normalization.estimate_segmaps_norm_type([1]) # wrong class with self.assertRaises(AssertionError): _ntype = normalization.estimate_segmaps_norm_type( ia.KeypointsOnImage([], shape=(1, 1, 1))) with self.assertRaises(AssertionError): _ntype = normalization.estimate_segmaps_norm_type([[]]) # list of list of SegMap, only list of SegMap is max with self.assertRaises(AssertionError): _ntype = normalization.estimate_segmaps_norm_type([ [ia.SegmentationMapsOnImage( np.zeros((1, 1, 1, 1), dtype=np.int32), shape=(1, 1, 1))] ]) def test_estimate_keypoints_norm_type(self): ntype = normalization.estimate_keypoints_norm_type(None) assert ntype == "None" for name, dt in zip(["float", "int", "uint"], [np.float32, np.int32, np.uint16]): ntype = normalization.estimate_keypoints_norm_type( np.zeros((1, 5, 2), dtype=dt)) assert ntype == "array[%s]" % (name,) ntype = normalization.estimate_keypoints_norm_type((1, 2)) assert ntype == "tuple[number,size=2]" ntype = normalization.estimate_keypoints_norm_type( ia.Keypoint(x=1, y=2)) assert ntype == "Keypoint" ntype = normalization.estimate_keypoints_norm_type( ia.KeypointsOnImage([ia.Keypoint(x=1, y=2)], shape=(1, 1, 3))) assert ntype == "KeypointsOnImage" ntype = normalization.estimate_keypoints_norm_type([]) assert ntype == "iterable[empty]" for name, dt in zip(["float", "int", "uint"], [np.float32, np.int32, np.uint16]): ntype = normalization.estimate_keypoints_norm_type( [np.zeros((5, 2), dtype=dt)]) assert ntype == "iterable-array[%s]" % (name,) ntype = normalization.estimate_keypoints_norm_type([(1, 2)]) assert ntype == "iterable-tuple[number,size=2]" ntype = normalization.estimate_keypoints_norm_type( [ia.Keypoint(x=1, y=2)]) assert ntype == "iterable-Keypoint" ntype = normalization.estimate_keypoints_norm_type([ ia.KeypointsOnImage([ia.Keypoint(x=1, y=2)], shape=(1, 1, 3))]) assert ntype == "iterable-KeypointsOnImage" ntype = normalization.estimate_keypoints_norm_type([[]]) assert ntype == "iterable-iterable[empty]" ntype = normalization.estimate_keypoints_norm_type([[(1, 2)]]) assert ntype == "iterable-iterable-tuple[number,size=2]" ntype = normalization.estimate_keypoints_norm_type( [[ia.Keypoint(x=1, y=2)]]) assert ntype == "iterable-iterable-Keypoint" # -- # error cases # -- with self.assertRaises(AssertionError): _ntype = normalization.estimate_keypoints_norm_type(1) with self.assertRaises(AssertionError): _ntype = normalization.estimate_keypoints_norm_type("foo") with self.assertRaises(AssertionError): _ntype = normalization.estimate_keypoints_norm_type([1]) # wrong class with self.assertRaises(AssertionError): _ntype = normalization.estimate_keypoints_norm_type( ia.HeatmapsOnImage(np.zeros((1, 1, 1), dtype=np.float32), shape=(1, 1, 1))) with self.assertRaises(AssertionError): _ntype = normalization.estimate_keypoints_norm_type([[[]]]) # list of list of list of keypoints, # only list of list of keypoints is max with self.assertRaises(AssertionError): _ntype = normalization.estimate_keypoints_norm_type( [[[ia.Keypoint(x=1, y=2)]]]) def test_estimate_bounding_boxes_norm_type(self): ntype = normalization.estimate_bounding_boxes_norm_type(None) assert ntype == "None" for name, dt in zip(["float", "int", "uint"], [np.float32, np.int32, np.uint16]): ntype = normalization.estimate_bounding_boxes_norm_type( np.zeros((1, 5, 4), dtype=dt)) assert ntype == "array[%s]" % (name,) ntype = normalization.estimate_bounding_boxes_norm_type((1, 2, 3, 4)) assert ntype == "tuple[number,size=4]" ntype = normalization.estimate_bounding_boxes_norm_type( ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)) assert ntype == "BoundingBox" ntype = normalization.estimate_bounding_boxes_norm_type( ia.BoundingBoxesOnImage( [ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)], shape=(1, 1, 3))) assert ntype == "BoundingBoxesOnImage" ntype = normalization.estimate_bounding_boxes_norm_type([]) assert ntype == "iterable[empty]" for name, dt in zip(["float", "int", "uint"], [np.float32, np.int32, np.uint16]): ntype = normalization.estimate_bounding_boxes_norm_type( [np.zeros((5, 4), dtype=dt)]) assert ntype == "iterable-array[%s]" % (name,) ntype = normalization.estimate_bounding_boxes_norm_type([(1, 2, 3, 4)]) assert ntype == "iterable-tuple[number,size=4]" ntype = normalization.estimate_bounding_boxes_norm_type([ ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)]) assert ntype == "iterable-BoundingBox" ntype = normalization.estimate_bounding_boxes_norm_type([ ia.BoundingBoxesOnImage([ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)], shape=(1, 1, 3))]) assert ntype == "iterable-BoundingBoxesOnImage" ntype = normalization.estimate_bounding_boxes_norm_type([[]]) assert ntype == "iterable-iterable[empty]" ntype = normalization.estimate_bounding_boxes_norm_type( [[(1, 2, 3, 4)]]) assert ntype == "iterable-iterable-tuple[number,size=4]" ntype = normalization.estimate_bounding_boxes_norm_type( [[ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)]]) assert ntype == "iterable-iterable-BoundingBox" # -- # error cases # -- with self.assertRaises(AssertionError): _ntype = normalization.estimate_bounding_boxes_norm_type(1) with self.assertRaises(AssertionError): _ntype = normalization.estimate_bounding_boxes_norm_type("foo") with self.assertRaises(AssertionError): _ntype = normalization.estimate_bounding_boxes_norm_type([1]) # wrong class with self.assertRaises(AssertionError): _ntype = normalization.estimate_bounding_boxes_norm_type( ia.HeatmapsOnImage( np.zeros((1, 1, 1), dtype=np.float32), shape=(1, 1, 1)) ) with self.assertRaises(AssertionError): _ntype = normalization.estimate_bounding_boxes_norm_type([[[]]]) # list of list of list of bounding boxes, # only list of list of bounding boxes is max with self.assertRaises(AssertionError): _ntype = normalization.estimate_bounding_boxes_norm_type([[[ ia.BoundingBox(x1=1, y1=2, x2=3, y2=4)]]]) def test_estimate_polygons_norm_type(self): points = [(0, 0), (10, 0), (10, 10)] ntype = normalization.estimate_polygons_norm_type(None) assert ntype == "None" for name, dt in zip(["float", "int", "uint"], [np.float32, np.int32, np.uint16]): ntype = normalization.estimate_polygons_norm_type( np.zeros((1, 2, 5, 2), dtype=dt) ) assert ntype == "array[%s]" % (name,) ntype = normalization.estimate_polygons_norm_type( ia.Polygon(points) ) assert ntype == "Polygon" ntype = normalization.estimate_polygons_norm_type( ia.PolygonsOnImage( [ia.Polygon(points)], shape=(1, 1, 3)) ) assert ntype == "PolygonsOnImage" ntype = normalization.estimate_polygons_norm_type([]) assert ntype == "iterable[empty]" for name, dt in zip(["float", "int", "uint"], [np.float32, np.int32, np.uint16]): ntype = normalization.estimate_polygons_norm_type( [np.zeros((5, 4), dtype=dt)] ) assert ntype == "iterable-array[%s]" % (name,) ntype = normalization.estimate_polygons_norm_type(points) assert ntype == "iterable-tuple[number,size=2]" ntype = normalization.estimate_polygons_norm_type( [ia.Keypoint(x=x, y=y) for x, y in points] ) assert ntype == "iterable-Keypoint" ntype = normalization.estimate_polygons_norm_type([ia.Polygon(points)]) assert ntype == "iterable-Polygon" ntype = normalization.estimate_polygons_norm_type( [ia.PolygonsOnImage([ia.Polygon(points)], shape=(1, 1, 3))] ) assert ntype == "iterable-PolygonsOnImage" ntype = normalization.estimate_polygons_norm_type([[]]) assert ntype == "iterable-iterable[empty]" for name, dt in zip(["float", "int", "uint"], [np.float32, np.int32, np.uint16]): ntype = normalization.estimate_polygons_norm_type( [[np.zeros((5, 4), dtype=dt)]] ) assert ntype == "iterable-iterable-array[%s]" % (name,) ntype = normalization.estimate_polygons_norm_type([points]) assert ntype == "iterable-iterable-tuple[number,size=2]" ntype = normalization.estimate_polygons_norm_type([[ ia.Keypoint(x=x, y=y) for x, y in points ]]) assert ntype == "iterable-iterable-Keypoint" ntype = normalization.estimate_polygons_norm_type( [[ia.Polygon(points)]] ) assert ntype == "iterable-iterable-Polygon" ntype = normalization.estimate_polygons_norm_type([[[]]]) assert ntype == "iterable-iterable-iterable[empty]" ntype = normalization.estimate_polygons_norm_type([[points]]) assert ntype == "iterable-iterable-iterable-tuple[number,size=2]" ntype = normalization.estimate_polygons_norm_type( [[[ia.Keypoint(x=x, y=y) for x, y in points]]] ) assert ntype == "iterable-iterable-iterable-Keypoint" # -- # error cases # -- with self.assertRaises(AssertionError): _ntype = normalization.estimate_polygons_norm_type(1) with self.assertRaises(AssertionError): _ntype = normalization.estimate_polygons_norm_type("foo") with self.assertRaises(AssertionError): _ntype = normalization.estimate_polygons_norm_type([1]) # wrong class with self.assertRaises(AssertionError): _ntype = normalization.estimate_polygons_norm_type( ia.HeatmapsOnImage( np.zeros((1, 1, 1), dtype=np.float32), shape=(1, 1, 1)) ) with self.assertRaises(AssertionError): _ntype = normalization.estimate_polygons_norm_type([[[[]]]]) # list of list of list of polygons, # only list of list of polygons is max with self.assertRaises(AssertionError): _ntype = normalization.estimate_polygons_norm_type([[[ ia.Polygon(points)]]] ) def test_estimate_line_strings_norm_type(self): points = [(0, 0), (10, 0), (10, 10)] ntype = normalization.estimate_line_strings_norm_type(None) assert ntype == "None" for name, dt in zip(["float", "int", "uint"], [np.float32, np.int32, np.uint16]): ntype = normalization.estimate_line_strings_norm_type( np.zeros((1, 2, 5, 2), dtype=dt) ) assert ntype == "array[%s]" % (name,) ntype = normalization.estimate_line_strings_norm_type( ia.LineString(points) ) assert ntype == "LineString" ntype = normalization.estimate_line_strings_norm_type( ia.LineStringsOnImage( [ia.LineString(points)], shape=(1, 1, 3)) ) assert ntype == "LineStringsOnImage" ntype = normalization.estimate_line_strings_norm_type([]) assert ntype == "iterable[empty]" for name, dt in zip(["float", "int", "uint"], [np.float32, np.int32, np.uint16]): ntype = normalization.estimate_line_strings_norm_type( [np.zeros((5, 4), dtype=dt)] ) assert ntype == "iterable-array[%s]" % (name,) ntype = normalization.estimate_line_strings_norm_type(points) assert ntype == "iterable-tuple[number,size=2]" ntype = normalization.estimate_line_strings_norm_type( [ia.Keypoint(x=x, y=y) for x, y in points] ) assert ntype == "iterable-Keypoint" ntype = normalization.estimate_line_strings_norm_type( [ia.LineString(points)]) assert ntype == "iterable-LineString" ntype = normalization.estimate_line_strings_norm_type( [ia.LineStringsOnImage([ia.LineString(points)], shape=(1, 1, 3))] ) assert ntype == "iterable-LineStringsOnImage" ntype = normalization.estimate_line_strings_norm_type([[]]) assert ntype == "iterable-iterable[empty]" for name, dt in zip(["float", "int", "uint"], [np.float32, np.int32, np.uint16]): ntype = normalization.estimate_line_strings_norm_type( [[np.zeros((5, 4), dtype=dt)]] ) assert ntype == "iterable-iterable-array[%s]" % (name,) ntype = normalization.estimate_line_strings_norm_type([points]) assert ntype == "iterable-iterable-tuple[number,size=2]" ntype = normalization.estimate_line_strings_norm_type([[ ia.Keypoint(x=x, y=y) for x, y in points ]]) assert ntype == "iterable-iterable-Keypoint" ntype = normalization.estimate_line_strings_norm_type( [[ia.LineString(points)]] ) assert ntype == "iterable-iterable-LineString" ntype = normalization.estimate_line_strings_norm_type([[[]]]) assert ntype == "iterable-iterable-iterable[empty]" ntype = normalization.estimate_line_strings_norm_type([[points]]) assert ntype == "iterable-iterable-iterable-tuple[number,size=2]" ntype = normalization.estimate_line_strings_norm_type( [[[ia.Keypoint(x=x, y=y) for x, y in points]]] ) assert ntype == "iterable-iterable-iterable-Keypoint" # -- # error cases # -- with self.assertRaises(AssertionError): _ntype = normalization.estimate_line_strings_norm_type(1) with self.assertRaises(AssertionError): _ntype = normalization.estimate_line_strings_norm_type("foo") with self.assertRaises(AssertionError): _ntype = normalization.estimate_line_strings_norm_type([1]) # wrong class with self.assertRaises(AssertionError): _ntype = normalization.estimate_line_strings_norm_type( ia.HeatmapsOnImage( np.zeros((1, 1, 1), dtype=np.float32), shape=(1, 1, 1)) ) with self.assertRaises(AssertionError): _ntype = normalization.estimate_line_strings_norm_type([[[[]]]]) # list of list of list of LineStrings, # only list of list of LineStrings is max with self.assertRaises(AssertionError): _ntype = normalization.estimate_line_strings_norm_type([[[ ia.LineString(points)]]] )
39.142527
81
0.537665
15,504
135,668
4.563983
0.016383
0.014076
0.029847
0.030144
0.942058
0.907151
0.882052
0.861956
0.848997
0.840517
0
0.04925
0.33055
135,668
3,465
82
39.153824
0.729848
0.057538
0
0.676917
0
0
0.020254
0.009526
0
0
0
0.000289
0.404553
1
0.01278
false
0
0.004792
0.002396
0.020367
0.000399
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
121d88441245e523ca7feb60b8a54bde3b04fee6
105
py
Python
ShellCreator/Utils/Operators.py
mewais/ShellCreator
ed98ca87a979dee2e39f77ba5663cd99c438b913
[ "MIT" ]
2
2020-08-03T17:18:52.000Z
2021-12-21T08:04:20.000Z
ShellCreator/Utils/Operators.py
mewais/ShellCreator
ed98ca87a979dee2e39f77ba5663cd99c438b913
[ "MIT" ]
null
null
null
ShellCreator/Utils/Operators.py
mewais/ShellCreator
ed98ca87a979dee2e39f77ba5663cd99c438b913
[ "MIT" ]
null
null
null
#!/usr/bin/python3 def operator_and(a, b): return a and b def operator_or(a, b): return a or b
13.125
23
0.638095
21
105
3.095238
0.47619
0.338462
0.246154
0.276923
0
0
0
0
0
0
0
0.0125
0.238095
105
7
24
15
0.8
0.161905
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
8
122e0d9c71799ac1bb9fa7fab6a16d415e851bdd
217
py
Python
msbd/scraping/__init__.py
mnslarcher/metodi-statistici-big-data
4587b4e4104557e50d09d028259d6c42c44d2814
[ "MIT" ]
1
2019-02-17T09:28:04.000Z
2019-02-17T09:28:04.000Z
msbd/scraping/__init__.py
mnslarcher/metodi-statistici-big-data
4587b4e4104557e50d09d028259d6c42c44d2814
[ "MIT" ]
null
null
null
msbd/scraping/__init__.py
mnslarcher/metodi-statistici-big-data
4587b4e4104557e50d09d028259d6c42c44d2814
[ "MIT" ]
null
null
null
from .scraping import ottieni_contenuto_url from .scraping import ottieni_contenuto_urls_sequenziale from .scraping import ottieni_contenuto_urls_multiprocessing from .scraping import ottieni_contenuto_urls_threading
43.4
60
0.907834
27
217
6.888889
0.37037
0.258065
0.387097
0.537634
0.795699
0.612903
0
0
0
0
0
0
0.073733
217
4
61
54.25
0.925373
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
123f3013a22ca49adf85e7f74e2792fe02df6076
15,044
py
Python
ballot/migrations/0001_initial.py
HiroshiFuu/cs-balloting
565eb3ee88769d88b27705828c10c7b5be964ef5
[ "MIT" ]
null
null
null
ballot/migrations/0001_initial.py
HiroshiFuu/cs-balloting
565eb3ee88769d88b27705828c10c7b5be964ef5
[ "MIT" ]
null
null
null
ballot/migrations/0001_initial.py
HiroshiFuu/cs-balloting
565eb3ee88769d88b27705828c10c7b5be964ef5
[ "MIT" ]
null
null
null
# Generated by Django 2.2.16 on 2020-11-13 19:32 from django.conf import settings from django.db import migrations, models import django.db.models.deletion import jsonfield.fields class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('authentication', '0001_initial'), ] operations = [ migrations.CreateModel( name='LivePoll', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('modified_at', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Modified At')), ('title', models.CharField(max_length=255)), ('is_chosen', models.BooleanField(default=False, verbose_name='Is Chosen')), ('company', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='authentication.Company')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepoll_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepoll_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified By')), ], options={ 'verbose_name': 'Live Poll', 'verbose_name_plural': 'Live Polls', 'managed': True, }, ), migrations.CreateModel( name='LivePollBatch', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('modified_at', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Modified At')), ('batch_no', models.PositiveIntegerField(verbose_name='Batch No.')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepollbatch_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepollbatch_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified By')), ('poll', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='batches', to='ballot.LivePoll')), ], options={ 'verbose_name': 'Live Poll Batch', 'verbose_name_plural': 'Live Poll Batches', 'managed': True, }, ), migrations.CreateModel( name='LivePollItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('modified_at', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Modified At')), ('order', models.PositiveSmallIntegerField(default=0, verbose_name='Sequence Order')), ('text', models.CharField(max_length=255)), ('is_open', models.BooleanField(default=False, verbose_name='Is Open')), ('opened_at', models.DateTimeField(blank=True, null=True, verbose_name='Vote Opened At')), ('opening_duration_minustes', models.PositiveSmallIntegerField(default=5, verbose_name='Vote Opening Duration Minustes')), ('poll_type', models.PositiveSmallIntegerField(choices=[(1, 'By Share'), (2, 'By Lot')], default=1, verbose_name='Poll Type')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepollitem_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepollitem_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified By')), ('poll', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='items', to='ballot.LivePoll')), ], options={ 'verbose_name': 'Live Poll Item', 'verbose_name_plural': 'Live Poll Items', 'ordering': ['order'], 'managed': True, 'unique_together': {('text', 'poll')}, }, ), migrations.CreateModel( name='Survey', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('modified_at', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Modified At')), ('title', models.CharField(max_length=255)), ('end_date', models.DateField(null=True, verbose_name='End Date')), ('company', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='authentication.Company')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_survey_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_survey_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified By')), ], options={ 'verbose_name': 'Survey', 'verbose_name_plural': 'Surveys', 'managed': True, }, ), migrations.CreateModel( name='SurveyOption', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('modified_at', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Modified At')), ('text', models.CharField(max_length=255)), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_surveyoption_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_surveyoption_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified By')), ('survey', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='options', to='ballot.Survey')), ], options={ 'verbose_name': 'Survey Option', 'verbose_name_plural': 'Survey Options', 'managed': True, 'unique_together': {('text', 'survey')}, }, ), migrations.CreateModel( name='SurveyVote', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('modified_at', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Modified At')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_surveyvote_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_surveyvote_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified By')), ('survey_option', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='votes', to='ballot.SurveyOption')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)), ], options={ 'verbose_name': 'Survey Vote', 'verbose_name_plural': 'Survey Votes', 'managed': True, }, ), migrations.CreateModel( name='SurveyResult', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('modified_at', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Modified At')), ('result', jsonfield.fields.JSONField(blank=True, null=True)), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_surveyresult_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_surveyresult_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified By')), ('survey', models.OneToOneField(on_delete=django.db.models.deletion.PROTECT, to='ballot.Survey')), ], options={ 'verbose_name': 'Survey Result', 'verbose_name_plural': 'Survey Results', 'managed': True, }, ), migrations.CreateModel( name='LivePollResult', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('modified_at', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Modified At')), ('result', jsonfield.fields.JSONField(blank=True, null=True)), ('voting_date', models.DateField(blank=True, null=True, verbose_name='Voting Date')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepollresult_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('live_poll', models.OneToOneField(on_delete=django.db.models.deletion.PROTECT, to='ballot.LivePollItem')), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepollresult_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified By')), ], options={ 'verbose_name': 'Live Poll Result', 'verbose_name_plural': 'Live Poll Results', 'managed': True, }, ), migrations.CreateModel( name='LivePollItemVote', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('modified_at', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Modified At')), ('ip_address', models.CharField(blank=True, max_length=15, null=True, verbose_name='IP Address')), ('user_agent', models.CharField(blank=True, max_length=255, null=True, verbose_name='User Agent')), ('vote_option', models.PositiveSmallIntegerField(verbose_name='Vote Option')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepollitemvote_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepollitemvote_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified By')), ('poll_batch', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='item_batches', to='ballot.LivePollBatch')), ('poll_item', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='item_votes', to='ballot.LivePollItem')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)), ], options={ 'verbose_name': 'Live Poll Item Vote', 'verbose_name_plural': 'Live Poll Item Votes', 'managed': True, }, ), migrations.CreateModel( name='LivePollProxy', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('modified_at', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Modified At')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepollproxy_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('main_user', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='main_user', to=settings.AUTH_USER_MODEL)), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ballot_livepollproxy_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified By')), ('poll_batch', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='proxy_batches', to='ballot.LivePollBatch')), ('proxy_users', models.ManyToManyField(related_name='proxy_users', to=settings.AUTH_USER_MODEL)), ], options={ 'verbose_name': 'Live Poll Proxy', 'verbose_name_plural': 'Live Poll Proxys', 'managed': True, 'unique_together': {('poll_batch', 'main_user')}, }, ), ]
72.676329
228
0.642316
1,668
15,044
5.564149
0.078537
0.097188
0.052796
0.082965
0.834608
0.798513
0.759293
0.7427
0.733649
0.733649
0
0.003584
0.220952
15,044
206
229
73.029126
0.788311
0.003058
0
0.502513
1
0
0.204655
0.044745
0
0
0
0
0
1
0
false
0
0.020101
0
0.040201
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
126c58c21745222df2fe2545ac555ce9447262d9
41,044
py
Python
ButtonRun.py
Sydfree/132Project
cbe7a3df39cafff3c61133a26ba30a93c76ecffb
[ "MIT" ]
1
2020-03-13T19:11:21.000Z
2020-03-13T19:11:21.000Z
ButtonRun.py
Sydfree/132Project
cbe7a3df39cafff3c61133a26ba30a93c76ecffb
[ "MIT" ]
2
2020-05-17T19:48:29.000Z
2020-05-17T20:22:20.000Z
ButtonRun.py
Sydfree/132Project
cbe7a3df39cafff3c61133a26ba30a93c76ecffb
[ "MIT" ]
null
null
null
################################################################### # Name: Sydney Holland, Aidan Cheatham, Anassas Anderson # Date: May 1, 2020 # Description: Program for Demonstration #################################################################### import webbrowser from tkinter import * import tkinter as tkinter import random class MyGUI: def __init__(self): self.main_window = tkinter.Tk() self.fullScreenState = False self.main_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.main_window.winfo_screenwidth(), self.main_window.winfo_screenheight() self.main_window.geometry("%dx%d" % (self.w, self.h)) self.button1 = tkinter.Button(self.main_window,text='Click Here To Start Your Quarantine Adventure!',command=self.Category, height = 15) self.button1.pack() self.main_window.configure(bg = 'blue') tkinter.mainloop() def quitFullScreen(self, event): self.fullScreenState = False self.window.attributes("-fullscreen", self.fullScreenState) def Category(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'red') self.avg_mess = tkinter.Label(self.mini_window,text='What Are You Interested In Doing Today?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button2 = tkinter.Button(self.mini_window,text='Netflix?',command=self.NetflixOptions, height = 5, width = 15) self.button2.pack() self.button3 = tkinter.Button(self.mini_window,text='Music?',command=self.MusicOptions, height = 5, width = 15) self.button3.pack() self.button4 = tkinter.Button(self.mini_window,text='Gaming?',command=self.GamingOptions, height = 5, width = 15) self.button4.pack() self.button5 = tkinter.Button(self.mini_window,text='Cooking?',command=self.CookingOptions, height = 5, width = 15) self.button5.pack() self.button6 = tkinter.Button(self.mini_window,text='Productivity?',command=self.ProductivityOptions, height = 5, width = 15) self.button6.pack() self.button7 = tkinter.Button(self.mini_window,text='Workout?',command=self.WorkoutOptions, height = 5, width = 15) self.button7.pack() def NetflixOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = '#E50914') self.avg_mess = tkinter.Label(self.mini_window,text='Show or Movie?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button8 = tkinter.Button(self.mini_window,text='Show',command=self.ShowOptions, height = 5, width = 15) self.button8.pack() self.button9 = tkinter.Button(self.mini_window,text='Movie',command=self.MovieOptions, height = 5, width = 15) self.button9.pack() def MovieOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'Black') self.avg_mess = tkinter.Label(self.mini_window,text='What Genre Are We Talking?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button10 = tkinter.Button(self.mini_window,text='Action',command=self.ActionMovieOptions, height = 5, width = 15) self.button10.pack() self.button11 = tkinter.Button(self.mini_window,text='Romance',command=self.RomanceMovieOptions, height = 5, width = 15) self.button11.pack() self.button12 = tkinter.Button(self.mini_window,text='Horror',command=self.HorrorMovieOptions, height = 5, width = 15) self.button12.pack() self.button13 = tkinter.Button(self.mini_window,text='Comedy',command=self.ComedyMovieOptions, height = 5, width = 15) self.button13.pack() self.button14 = tkinter.Button(self.mini_window,text='Documentary',command=self.DocumentaryMovieOptions, height = 5, width = 15) self.button14.pack() def ActionMovieOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'Black') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_action = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/1365?bc=34399'), height = 5, width = 15) self.button_action.pack() def RomanceMovieOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'Black') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_romance = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/8883?bc=34399'), height = 5, width = 15) self.button_romance.pack() def HorrorMovieOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'Black') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_horror = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/8711?bc=34399'), height = 5, width = 15) self.button_horror.pack() def ComedyMovieOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'Black') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_comedy = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/6548?bc=34399'), height = 5, width = 15) self.button_comedy.pack() def DocumentaryMovieOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'Black') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_documentary = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/2243108?bc=34399'), height = 5, width = 15) self.button_documentary.pack() def ShowOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'Gold') self.avg_mess = tkinter.Label(self.mini_window,text='What Genre Are We Talking?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button15 = tkinter.Button(self.mini_window,text='Reality',command=self.RealityShowOptions, height = 5, width = 15) self.button15.pack() self.button16 = tkinter.Button(self.mini_window,text='Action',command=self.ActionShowOptions, height = 5, width = 15) self.button16.pack() self.button17 = tkinter.Button(self.mini_window,text='Romance',command=self.RomanceShowOptions, height = 5, width = 15) self.button17.pack() self.button18 = tkinter.Button(self.mini_window,text='Horror',command=self.HorrorShowOptions, height = 5, width = 15) self.button18.pack() self.button19 = tkinter.Button(self.mini_window,text='Comedy',command=self.ComedyShowOptions, height = 5, width = 15) self.button19.pack() self.button20 = tkinter.Button(self.mini_window,text='Documentary',command=self.DocumentaryShowOptions, height = 5, width = 15) self.button20.pack() def RealityShowOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'gold') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_reality_show = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/9833?bc=83'), height = 5, width = 15) self.button_reality_show.pack() def ActionShowOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'gold') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_action_show = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/10673?bc=83'), height = 5, width = 15) self.button_action_show.pack() def RomanceShowOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'gold') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_romance_show = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/26156?bc=83'), height = 5, width = 15) self.button_romance_show.pack() def HorrorShowOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'gold') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_horror_show = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/83059?bc=83'), height = 5, width = 15) self.button_horror_show.pack() def ComedyShowOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'gold') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_comedy_show = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/10375?bc=83'), height = 5, width = 15) self.button_comedy_show.pack() def DocumentaryShowOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'gold') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Watch...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_documentary_show = tkinter.Button(self.mini_window, text='You Should Watch...', command= lambda: webbrowser.open('https://www.netflix.com/browse/genre/10105?bc=83'), height = 5, width = 15) self.button_documentary_show.pack() def MusicOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'green') self.avg_mess = tkinter.Label(self.mini_window,text='What Genre Are We Talking?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button21 = tkinter.Button(self.mini_window,text='Country',command=self.CountryOptions, height = 5, width = 15) self.button21.pack() self.button22 = tkinter.Button(self.mini_window,text='Rock',command=self.RockOptions, height = 5, width = 15) self.button22.pack() self.button23 = tkinter.Button(self.mini_window,text='Rap',command=self.RapOptions, height = 5, width = 15) self.button23.pack() self.button24 = tkinter.Button(self.mini_window,text='R&B',command=self.RandBOptions, height = 5, width = 15) self.button24.pack() self.button25 = tkinter.Button(self.mini_window,text='Lo-Fi',command=self.LoFiOptions, height = 5, width = 15) self.button25.pack() def CountryOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'green') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_country = tkinter.Button(self.mini_window, text='You Should Listen To...', command= lambda: webbrowser.open('https://www.youtube.com/results?search_query=Luke+Combs'), height = 5, width = 25) self.button_country.pack() def RockOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'green') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_rock = tkinter.Button(self.mini_window, text='You Should Listen To...', command= lambda: webbrowser.open('https://www.youtube.com/results?search_query=Paramore'), height = 5, width = 25) self.button_rock.pack() def RapOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'green') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_rap = tkinter.Button(self.mini_window, text='You Should Listen To...', command= lambda: webbrowser.open('https://www.youtube.com/results?search_query=Tyler+the+Creator'), height = 5, width = 25) self.button_rap.pack() def RandBOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'green') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_randb = tkinter.Button(self.mini_window, text='You Should Listen To...', command= lambda: webbrowser.open('https://www.youtube.com/results?search_query=Michael+Jackson'), height = 5, width = 25) self.button_randb.pack() def LoFiOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'green') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_lofi = tkinter.Button(self.mini_window, text='You Should Listen To...', command= lambda: webbrowser.open('https://www.youtube.com/results?search_query=slipfunc'), height = 5, width = 25) self.button_lofi.pack() def GamingOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'purple') self.avg_mess = tkinter.Label(self.mini_window,text='What Kind of Games Are We Talking?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button26 = tkinter.Button(self.mini_window,text='Console Games?',command=self.ConsoleOptions, height = 5, width = 15) self.button26.pack() self.button27 = tkinter.Button(self.mini_window,text='PC Games?',command=self.PCOptions, height = 5, width = 15) self.button27.pack() def ConsoleOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'purple') self.avg_mess = tkinter.Label(self.mini_window,text='What Kind of Games Are We Talking?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_ConsoleOptions = tkinter.Button(self.mini_window, text='The Best Game Is!...', command= lambda: webbrowser.open('https://www.ranker.com/app/search.htm?q=Best%20Console%20Video%20Games'), height = 5, width = 15) self.button_ConsoleOptions.pack() def PCOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'purple') self.avg_mess = tkinter.Label(self.mini_window,text='What Kind of Games Are We Talking?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_PCBestGames = tkinter.Button(self.mini_window, text='The Best Game Is!...', command= lambda: webbrowser.open('https://www.pcgamer.com/search/?searchTerm=best+pc+games'), height = 5, width = 15) self.button_PCBestGames.pack() def CookingOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'yellow') self.avg_mess = tkinter.Label(self.mini_window,text='What Are You Interested In Cooking Today?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button28 = tkinter.Button(self.mini_window,text='Breakfast?',command=self.BreakfastOptions, height = 5, width = 15) self.button28.pack() self.button29 = tkinter.Button(self.mini_window,text='Lunch?',command=self.LunchOptions, height = 5, width = 15) self.button29.pack() self.button30 = tkinter.Button(self.mini_window,text='Dinner?',command=self.DinnerOptions, height = 5, width = 15) self.button30.pack() self.button31 = tkinter.Button(self.mini_window,text='Snack?',command=self.SnackOptions, height = 5, width = 15) self.button31.pack() def BreakfastOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'yellow') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_breakfast = tkinter.Button(self.mini_window, text='You Should Cook...', command= lambda: webbrowser.open('https://www.scoopwhoop.com/food/16-healthy-3-ingredient-breakfast-recipes-to-try-out-while-in-quarantine/'), height = 5, width = 15) self.button_breakfast.pack() def LunchOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'yellow') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_lunch = tkinter.Button(self.mini_window, text='You Should Cook...', command= lambda: webbrowser.open('https://www.tasteofhome.com/collection/creative-quarantine-meals/'), height = 5, width = 15) self.button_lunch.pack() def DinnerOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'yellow') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_dinner = tkinter.Button(self.mini_window, text='You Should Cook...', command= lambda: webbrowser.open('https://www.eatthis.com/quarantine-recipes/'), height = 5, width = 15) self.button_dinner.pack() def SnackOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'yellow') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_snack = tkinter.Button(self.mini_window, text='You Should Cook...', command= lambda: webbrowser.open('https://health.ucdavis.edu/good-food/recipes/healthy-snacks-covid-19.html'), height = 5, width = 15) self.button_snack.pack() def ProductivityOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'silver') self.avg_mess = tkinter.Label(self.mini_window,text='What Are You Interested In Doing Today?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button32 = tkinter.Button(self.mini_window,text='Homework?',command=self.Homework, height = 5, width = 15) self.button32.pack() self.button33 = tkinter.Button(self.mini_window,text='Chores?',command=self.Chores, height = 5, width = 15) self.button33.pack() self.button34 = tkinter.Button(self.mini_window,text='Puzzle?',command=self.Puzzle, height = 5, width = 15) self.button34.pack() def Homework(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'silver') self.avg_mess = tkinter.Label(self.mini_window,text='Yeah, go do that....') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_homework = tkinter.Button(self.mini_window, text='You Should Work On...', command= lambda: webbrowser.open('https://www.youtube.com/watch?v=dQw4w9WgXcQ'), height = 5, width = 25) self.button_homework.pack() def Chores(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'silver') self.avg_mess = tkinter.Label(self.mini_window,text='You Do Not Want Everyone Yelling At You. Go, But Come Back When You Finish!') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_chores = tkinter.Button(self.mini_window, text='You Should Clean...', command= lambda: webbrowser.open('https://www.youtube.com/watch?v=dQw4w9WgXcQ'), height = 5, width = 15) self.button_chores.pack() def Puzzle(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'silver') self.avg_mess = tkinter.Label(self.mini_window,text='Go Do Some Brain Exercises!!') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_puzzle = tkinter.Button(self.mini_window, text='You Should Try...', command= lambda: webbrowser.open('https://www.youtube.com/watch?v=dQw4w9WgXcQ'), height = 5, width = 15) self.button_puzzle.pack() def WorkoutOptions(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'orange') self.avg_mess = tkinter.Label(self.mini_window,text='What Are You Interested In Working Today?') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button35 = tkinter.Button(self.mini_window,text='Arms?',command=self.ArmsWorkouts, height = 5, width = 15) self.button35.pack() self.button36 = tkinter.Button(self.mini_window,text='Legs?',command=self.LegWorkouts, height = 5, width = 15) self.button36.pack() self.button37 = tkinter.Button(self.mini_window,text='Core?',command=self.CoreWorkouts, height = 5, width = 15) self.button37.pack() def ArmsWorkouts(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'orange') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_arms = tkinter.Button(self.mini_window, text='You Should Do This Workout...', command= lambda: webbrowser.open('https://www.workout-generator.com/arm-workout.html'), height = 5, width = 25) self.button_arms.pack() def LegWorkouts(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'orange') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_legs = tkinter.Button(self.mini_window, text='You Should Do This Workout...', command= lambda: webbrowser.open('https://www.workout-generator.com/leg-workout.html'), height = 5, width = 25) self.button_legs.pack() def CoreWorkouts(self): self.mini_window = tkinter.Toplevel() self.fullScreenState = False self.mini_window.attributes('-fullscreen', self.fullScreenState) self.w, self.h = self.mini_window.winfo_screenwidth(), self.mini_window.winfo_screenheight() self.mini_window.geometry("%dx%d" % (self.w, self.h)) self.mini_window.configure(bg = 'orange') self.avg_mess = tkinter.Label(self.mini_window,text='Good Choice! You Should Check Out...') self.avg_result_var = tkinter.StringVar() self.avg_result_display = tkinter.Label(self.mini_window,textvariable=self.avg_result_var) self.avg_mess.pack(fill="both") self.avg_result_display.pack() self.button_core = tkinter.Button(self.mini_window, text='You Should Do This Workout...', command= lambda: webbrowser.open('https://www.workout-generator.com/ab-workout.html'), height = 5, width = 25) self.button_core.pack() gui = MyGUI()
51.95443
157
0.680294
5,228
41,044
5.17062
0.058914
0.10654
0.186446
0.067254
0.859463
0.83342
0.793393
0.79073
0.770457
0.749593
0
0.013173
0.186215
41,044
789
158
52.020279
0.796144
0.002729
0
0.649435
0
0.004847
0.113132
0
0
0
0
0
0
1
0.063005
false
0
0.006462
0
0.071082
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c3cffc1e85231ef8e3067158417c840a7fb55ce9
9,602
py
Python
mayan/apps/document_states/tests/test_workflow_template_state_action_views.py
nattangwiwat/Mayan-EDMS-recitation
fcf16afb56eae812fb99144d65ae1ae6749de0b7
[ "Apache-2.0" ]
336
2019-05-09T07:05:19.000Z
2022-03-25T09:50:22.000Z
mayan/apps/document_states/tests/test_workflow_template_state_action_views.py
nattangwiwat/Mayan-EDMS-recitation
fcf16afb56eae812fb99144d65ae1ae6749de0b7
[ "Apache-2.0" ]
86
2021-09-01T23:53:02.000Z
2021-09-20T02:25:10.000Z
mayan/apps/document_states/tests/test_workflow_template_state_action_views.py
nattangwiwat/Mayan-EDMS-recitation
fcf16afb56eae812fb99144d65ae1ae6749de0b7
[ "Apache-2.0" ]
257
2019-05-14T10:26:37.000Z
2022-03-30T03:37:36.000Z
from mayan.apps.testing.tests.base import GenericViewTestCase from ..events import event_workflow_template_edited from ..permissions import permission_workflow_template_edit from .literals import TEST_WORKFLOW_TEMPLATE_STATE_ACTION_DOTTED_PATH from .mixins.workflow_template_mixins import WorkflowTemplateTestMixin from .mixins.workflow_template_state_mixins import ( WorkflowTemplateStateActionTestMixin, WorkflowTemplateStateActionViewTestMixin ) class WorkflowStateActionViewTestCase( WorkflowTemplateStateActionTestMixin, WorkflowTemplateStateActionViewTestMixin, WorkflowTemplateTestMixin, GenericViewTestCase ): def setUp(self): super().setUp() self._create_test_workflow_template() self._create_test_workflow_template_state() def test_workflow_state_action_create_get_view_no_permission(self): action_count = self.test_workflow_template_state.actions.count() self._clear_events() response = self._request_test_workflow_template_state_action_create_get_view( class_path=TEST_WORKFLOW_TEMPLATE_STATE_ACTION_DOTTED_PATH ) self.assertEqual(response.status_code, 404) self.test_workflow_template_state.refresh_from_db() self.assertEqual( self.test_workflow_template_state.actions.count(), action_count ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_state_action_create_get_view_with_access(self): self.grant_access( obj=self.test_workflow_template, permission=permission_workflow_template_edit ) action_count = self.test_workflow_template_state.actions.count() self._clear_events() response = self._request_test_workflow_template_state_action_create_get_view( class_path=TEST_WORKFLOW_TEMPLATE_STATE_ACTION_DOTTED_PATH ) self.assertEqual(response.status_code, 200) self.test_workflow_template_state.refresh_from_db() self.assertEqual( self.test_workflow_template_state.actions.count(), action_count ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_state_action_create_post_view_no_permission(self): action_count = self.test_workflow_template_state.actions.count() self._clear_events() response = self._request_test_workflow_template_state_action_create_post_view( class_path=TEST_WORKFLOW_TEMPLATE_STATE_ACTION_DOTTED_PATH ) self.assertEqual(response.status_code, 404) self.test_workflow_template_state.refresh_from_db() self.assertEqual( self.test_workflow_template_state.actions.count(), action_count ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_state_action_create_post_view_with_access(self): self.grant_access( obj=self.test_workflow_template, permission=permission_workflow_template_edit ) action_count = self.test_workflow_template_state.actions.count() self._clear_events() response = self._request_test_workflow_template_state_action_create_post_view( class_path=TEST_WORKFLOW_TEMPLATE_STATE_ACTION_DOTTED_PATH ) self.assertEqual(response.status_code, 302) self.test_workflow_template_state.refresh_from_db() self.assertEqual( self.test_workflow_template_state.actions.count(), action_count + 1 ) events = self._get_test_events() self.assertEqual(events.count(), 1) self.assertEqual( events[0].action_object, self.test_workflow_template_state_action ) self.assertEqual(events[0].actor, self._test_case_user) self.assertEqual(events[0].target, self.test_workflow_template) self.assertEqual(events[0].verb, event_workflow_template_edited.id) def test_workflow_state_action_delete_view_no_permission(self): self._create_test_workflow_template_state_action() action_count = self.test_workflow_template_state.actions.count() self._clear_events() response = self._request_test_worflow_template_state_action_delete_view() self.assertEqual(response.status_code, 404) self.assertEqual( self.test_workflow_template_state.actions.count(), action_count ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_state_action_delete_view_with_access(self): self._create_test_workflow_template_state_action() self.grant_access( obj=self.test_workflow_template, permission=permission_workflow_template_edit ) action_count = self.test_workflow_template_state.actions.count() self._clear_events() response = self._request_test_worflow_template_state_action_delete_view() self.assertEqual(response.status_code, 302) self.assertEqual( self.test_workflow_template_state.actions.count(), action_count - 1 ) events = self._get_test_events() self.assertEqual(events.count(), 1) self.assertEqual(events[0].action_object, None) self.assertEqual(events[0].actor, self._test_case_user) self.assertEqual(events[0].target, self.test_workflow_template) self.assertEqual(events[0].verb, event_workflow_template_edited.id) def test_workflow_state_action_edit_view_no_permission(self): self._create_test_workflow_template_state_action() action_label = self.test_workflow_template_state_action.label self._clear_events() response = self._request_test_worflow_template_state_action_edit_view() self.assertEqual(response.status_code, 404) self.test_workflow_template_state_action.refresh_from_db() self.assertEqual( self.test_workflow_template_state_action.label, action_label ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_state_action_edit_view_with_access(self): self._create_test_workflow_template_state_action() self.grant_access( obj=self.test_workflow_template, permission=permission_workflow_template_edit ) action_label = self.test_workflow_template_state_action.label self._clear_events() response = self._request_test_worflow_template_state_action_edit_view() self.assertEqual(response.status_code, 302) self.test_workflow_template_state_action.refresh_from_db() self.assertNotEqual( self.test_workflow_template_state_action.label, action_label ) events = self._get_test_events() self.assertEqual(events.count(), 1) self.assertEqual( events[0].action_object, self.test_workflow_template_state_action ) self.assertEqual(events[0].actor, self._test_case_user) self.assertEqual(events[0].target, self.test_workflow_template) self.assertEqual(events[0].verb, event_workflow_template_edited.id) def test_workflow_state_action_list_view_no_permission(self): self._create_test_workflow_template_state_action() self._clear_events() response = self._request_test_worflow_template_state_action_list_view() self.assertNotContains( response=response, text=self.TestWorkflowAction.label, status_code=404 ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_state_action_list_view_with_access(self): self._create_test_workflow_template_state_action() self.grant_access( obj=self.test_workflow_template, permission=permission_workflow_template_edit ) self._clear_events() response = self._request_test_worflow_template_state_action_list_view() self.assertContains( response=response, text=self.TestWorkflowAction.label, status_code=200 ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_state_action_selection_view_no_permission(self): action_count = self.test_workflow_template_state.actions.count() self._clear_events() response = self._request_test_workflow_template_state_action_selection_view() self.assertEqual(response.status_code, 404) self.test_workflow_template_state.refresh_from_db() self.assertEqual( self.test_workflow_template_state.actions.count(), action_count ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_state_action_selection_view_with_access(self): self.grant_access( obj=self.test_workflow_template, permission=permission_workflow_template_edit ) action_count = self.test_workflow_template_state.actions.count() self._clear_events() response = self._request_test_workflow_template_state_action_selection_view() self.assertEqual(response.status_code, 302) self.test_workflow_template_state.refresh_from_db() self.assertEqual( self.test_workflow_template_state.actions.count(), action_count ) events = self._get_test_events() self.assertEqual(events.count(), 0)
36.371212
86
0.721829
1,095
9,602
5.830137
0.06758
0.177945
0.181704
0.18797
0.908521
0.903822
0.89834
0.887531
0.862782
0.862782
0
0.008136
0.206415
9,602
263
87
36.509506
0.829659
0
0
0.691919
0
0
0
0
0
0
0
0
0.232323
1
0.065657
false
0
0.030303
0
0.10101
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
c3fdd26bc85d15e7b216e67ef2137f0eb2d4e05a
44,905
py
Python
fireant/tests/slicer/widgets/test_reacttable.py
vladaspasic/fireant
2dbae6a97a927ef62fdcd5f37fcb51a7d6d55334
[ "Apache-2.0" ]
null
null
null
fireant/tests/slicer/widgets/test_reacttable.py
vladaspasic/fireant
2dbae6a97a927ef62fdcd5f37fcb51a7d6d55334
[ "Apache-2.0" ]
null
null
null
fireant/tests/slicer/widgets/test_reacttable.py
vladaspasic/fireant
2dbae6a97a927ef62fdcd5f37fcb51a7d6d55334
[ "Apache-2.0" ]
null
null
null
import copy from unittest import TestCase from fireant.slicer.totals import MAX_STRING from fireant.slicer.widgets.reacttable import ReactTable from fireant.slicer.widgets.reacttable import ReferenceItem from fireant.tests.slicer.mocks import ( CumSum, ElectionOverElection, cat_dim_df, cat_uni_dim_df, cont_dim_df, cont_dim_operation_df, cont_uni_dim_all_totals_df, cont_uni_dim_df, cont_uni_dim_ref_df, cont_uni_dim_totals_df, multi_metric_df, single_metric_df, slicer, uni_dim_df, ) from fireant.utils import format_dimension_key as fd class ReactTableTransformerTests(TestCase): maxDiff = None def test_single_metric(self): result = ReactTable(slicer.metrics.votes) \ .transform(single_metric_df, slicer, [], []) self.assertEqual({ 'columns': [{'Header': 'Votes', 'accessor': '$m$votes'}], 'data': [{'$m$votes': {'display': '111,674,336', 'raw': 111674336}}] }, result) def test_multiple_metrics(self): result = ReactTable(slicer.metrics.votes, slicer.metrics.wins) \ .transform(multi_metric_df, slicer, [], []) self.assertEqual({ 'columns': [{'Header': 'Votes', 'accessor': '$m$votes'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$m$votes': {'display': '111,674,336', 'raw': 111674336}, '$m$wins': {'display': '12', 'raw': 12} }] }, result) def test_multiple_metrics_reversed(self): result = ReactTable(slicer.metrics.wins, slicer.metrics.votes) \ .transform(multi_metric_df, slicer, [], []) self.assertEqual({ 'columns': [{'Header': 'Wins', 'accessor': '$m$wins'}, {'Header': 'Votes', 'accessor': '$m$votes'}], 'data': [{ '$m$votes': {'display': '111,674,336', 'raw': 111674336}, '$m$wins': {'display': '12', 'raw': 12} }] }, result) def test_time_series_dim(self): result = ReactTable(slicer.metrics.wins) \ .transform(cont_dim_df, slicer, [slicer.dimensions.timestamp], []) self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$d$timestamp': {'raw': '1996-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$timestamp': {'raw': '2000-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$timestamp': {'raw': '2004-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$timestamp': {'raw': '2008-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$timestamp': {'raw': '2012-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$timestamp': {'raw': '2016-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }] }, result) def test_time_series_dim_with_operation(self): result = ReactTable(CumSum(slicer.metrics.votes)) \ .transform(cont_dim_operation_df, slicer, [slicer.dimensions.timestamp], []) self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, {'Header': 'CumSum(Votes)', 'accessor': '$m$cumsum(votes)'}], 'data': [{ '$d$timestamp': {'raw': '1996-01-01'}, '$m$cumsum(votes)': {'display': '15,220,449', 'raw': 15220449} }, { '$d$timestamp': {'raw': '2000-01-01'}, '$m$cumsum(votes)': {'display': '31,882,466', 'raw': 31882466} }, { '$d$timestamp': {'raw': '2004-01-01'}, '$m$cumsum(votes)': {'display': '51,497,398', 'raw': 51497398} }, { '$d$timestamp': {'raw': '2008-01-01'}, '$m$cumsum(votes)': {'display': '72,791,613', 'raw': 72791613} }, { '$d$timestamp': {'raw': '2012-01-01'}, '$m$cumsum(votes)': {'display': '93,363,823', 'raw': 93363823} }, { '$d$timestamp': {'raw': '2016-01-01'}, '$m$cumsum(votes)': {'display': '111,674,336', 'raw': 111674336} }] }, result) def test_cat_dim(self): result = ReactTable(slicer.metrics.wins) \ .transform(cat_dim_df, slicer, [slicer.dimensions.political_party], []) self.assertEqual({ 'columns': [{'Header': 'Party', 'accessor': '$d$political_party'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$d$political_party': {'display': 'Democrat', 'raw': 'd'}, '$m$wins': {'display': '6', 'raw': 6} }, { '$d$political_party': {'display': 'Independent', 'raw': 'i'}, '$m$wins': {'display': '0', 'raw': 0} }, { '$d$political_party': {'display': 'Republican', 'raw': 'r'}, '$m$wins': {'display': '6', 'raw': 6} }] }, result) def test_uni_dim(self): result = ReactTable(slicer.metrics.wins) \ .transform(uni_dim_df, slicer, [slicer.dimensions.candidate], []) self.assertEqual({ 'columns': [{'Header': 'Candidate', 'accessor': '$d$candidate'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$d$candidate': {'display': 'Bill Clinton', 'raw': '1'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$candidate': {'display': 'Bob Dole', 'raw': '2'}, '$m$wins': {'display': '0', 'raw': 0} }, { '$d$candidate': {'display': 'Ross Perot', 'raw': '3'}, '$m$wins': {'display': '0', 'raw': 0} }, { '$d$candidate': {'display': 'George Bush', 'raw': '4'}, '$m$wins': {'display': '4', 'raw': 4} }, { '$d$candidate': {'display': 'Al Gore', 'raw': '5'}, '$m$wins': {'display': '0', 'raw': 0} }, { '$d$candidate': {'display': 'John Kerry', 'raw': '6'}, '$m$wins': {'display': '0', 'raw': 0} }, { '$d$candidate': {'display': 'Barrack Obama', 'raw': '7'}, '$m$wins': {'display': '4', 'raw': 4} }, { '$d$candidate': {'display': 'John McCain', 'raw': '8'}, '$m$wins': {'display': '0', 'raw': 0} }, { '$d$candidate': {'display': 'Mitt Romney', 'raw': '9'}, '$m$wins': {'display': '0', 'raw': 0} }, { '$d$candidate': {'display': 'Donald Trump', 'raw': '10'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$candidate': {'display': 'Hillary Clinton', 'raw': '11'}, '$m$wins': {'display': '0', 'raw': 0} }] }, result) def test_uni_dim_no_display_definition(self): import copy candidate = copy.copy(slicer.dimensions.candidate) uni_dim_df_copy = uni_dim_df.copy() del uni_dim_df_copy[fd(slicer.dimensions.candidate.display.key)] del candidate.display result = ReactTable(slicer.metrics.wins) \ .transform(uni_dim_df_copy, slicer, [candidate], []) self.assertEqual({ 'columns': [{'Header': 'Candidate', 'accessor': '$d$candidate'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{'$d$candidate': {'raw': '1'}, '$m$wins': {'display': '2', 'raw': 2}}, {'$d$candidate': {'raw': '2'}, '$m$wins': {'display': '0', 'raw': 0}}, {'$d$candidate': {'raw': '3'}, '$m$wins': {'display': '0', 'raw': 0}}, {'$d$candidate': {'raw': '4'}, '$m$wins': {'display': '4', 'raw': 4}}, {'$d$candidate': {'raw': '5'}, '$m$wins': {'display': '0', 'raw': 0}}, {'$d$candidate': {'raw': '6'}, '$m$wins': {'display': '0', 'raw': 0}}, {'$d$candidate': {'raw': '7'}, '$m$wins': {'display': '4', 'raw': 4}}, {'$d$candidate': {'raw': '8'}, '$m$wins': {'display': '0', 'raw': 0}}, {'$d$candidate': {'raw': '9'}, '$m$wins': {'display': '0', 'raw': 0}}, { '$d$candidate': {'raw': '10'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$candidate': {'raw': '11'}, '$m$wins': {'display': '0', 'raw': 0} }] }, result) def test_multi_dims_time_series_and_uni(self): result = ReactTable(slicer.metrics.wins) \ .transform(cont_uni_dim_df, slicer, [slicer.dimensions.timestamp, slicer.dimensions.state], []) self.assertIn('data', result) result['data'] = result['data'][:2] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, {'Header': 'State', 'accessor': '$d$state'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$d$state': {'display': 'Texas', 'raw': '1'}, '$d$timestamp': {'raw': '1996-01-01'}, '$m$wins': {'display': '1', 'raw': 1} }, { '$d$state': {'display': 'California', 'raw': '2'}, '$d$timestamp': {'raw': '1996-01-01'}, '$m$wins': {'display': '1', 'raw': 1} }] }, result) def test_multi_dims_with_one_level_totals(self): result = ReactTable(slicer.metrics.wins) \ .transform(cont_uni_dim_totals_df, slicer, [slicer.dimensions.timestamp, slicer.dimensions.state.rollup()], []) self.assertIn('data', result) result['data'] = result['data'][-3:] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, {'Header': 'State', 'accessor': '$d$state'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$d$state': {'display': 'Texas', 'raw': '1'}, '$d$timestamp': {'raw': '2016-01-01'}, '$m$wins': {'display': '1', 'raw': 1} }, { '$d$state': {'display': 'California', 'raw': '2'}, '$d$timestamp': {'raw': '2016-01-01'}, '$m$wins': {'display': '1', 'raw': 1} }, { '$d$state': {'raw': 'Totals'}, '$d$timestamp': {'raw': '2016-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }] }, result) def test_multi_dims_with_all_levels_totals(self): result = ReactTable(slicer.metrics.wins) \ .transform(cont_uni_dim_all_totals_df, slicer, [slicer.dimensions.timestamp.rollup(), slicer.dimensions.state.rollup()], []) self.assertIn('data', result) result['data'] = result['data'][:3] + result['data'][-1:] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, {'Header': 'State', 'accessor': '$d$state'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$d$state': {'display': 'Texas', 'raw': '1'}, '$d$timestamp': {'raw': '1996-01-01'}, '$m$wins': {'display': '1', 'raw': 1} }, { '$d$state': {'display': 'California', 'raw': '2'}, '$d$timestamp': {'raw': '1996-01-01'}, '$m$wins': {'display': '1', 'raw': 1} }, { '$d$state': {'raw': 'Totals'}, '$d$timestamp': {'raw': '1996-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$state': {'raw': 'Totals'}, '$d$timestamp': {'raw': 'Totals'}, '$m$wins': {'display': '12', 'raw': 12} }] }, result) def test_time_series_ref(self): result = ReactTable(slicer.metrics.votes) \ .transform(cont_uni_dim_ref_df, slicer, [ slicer.dimensions.timestamp, slicer.dimensions.state ], [ ElectionOverElection(slicer.dimensions.timestamp) ]) self.assertIn('data', result) result['data'] = result['data'][:2] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, {'Header': 'State', 'accessor': '$d$state'}, {'Header': 'Votes', 'accessor': '$m$votes'}, {'Header': 'Votes (EoE)', 'accessor': '$m$votes_eoe'}], 'data': [{ '$d$state': {'display': 'Texas', 'raw': '1'}, '$d$timestamp': {'raw': '2000-01-01'}, '$m$votes': {'display': '6,233,385', 'raw': 6233385}, '$m$votes_eoe': {'display': '5,574,387', 'raw': 5574387} }, { '$d$state': {'display': 'California', 'raw': '2'}, '$d$timestamp': {'raw': '2000-01-01'}, '$m$votes': {'display': '10,428,632', 'raw': 10428632}, '$m$votes_eoe': {'display': '9,646,062', 'raw': 9646062} }] }, result) def test_time_series_ref_multiple_metrics(self): result = ReactTable(slicer.metrics.votes, slicer.metrics.wins) \ .transform(cont_uni_dim_ref_df, slicer, [ slicer.dimensions.timestamp, slicer.dimensions.state ], [ ElectionOverElection(slicer.dimensions.timestamp) ]) self.assertIn('data', result) result['data'] = result['data'][:2] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, {'Header': 'State', 'accessor': '$d$state'}, {'Header': 'Votes', 'accessor': '$m$votes'}, {'Header': 'Votes (EoE)', 'accessor': '$m$votes_eoe'}, {'Header': 'Wins', 'accessor': '$m$wins'}, {'Header': 'Wins (EoE)', 'accessor': '$m$wins_eoe'}], 'data': [{ '$d$state': {'display': 'Texas', 'raw': '1'}, '$d$timestamp': {'raw': '2000-01-01'}, '$m$votes': {'display': '6,233,385', 'raw': 6233385}, '$m$votes_eoe': {'display': '5,574,387', 'raw': 5574387}, '$m$wins': {'display': '1', 'raw': 1}, '$m$wins_eoe': {'display': '1', 'raw': 1} }, { '$d$state': {'display': 'California', 'raw': '2'}, '$d$timestamp': {'raw': '2000-01-01'}, '$m$votes': {'display': '10,428,632', 'raw': 10428632}, '$m$votes_eoe': {'display': '9,646,062', 'raw': 9646062}, '$m$wins': {'display': '1', 'raw': 1}, '$m$wins_eoe': {'display': '1', 'raw': 1} }] }, result) def test_transpose(self): result = ReactTable(slicer.metrics.wins, transpose=True) \ .transform(cat_dim_df, slicer, [slicer.dimensions.political_party], []) self.assertEqual({ 'columns': [{'Header': '', 'accessor': '$d$metrics'}, {'Header': 'Democrat', 'accessor': 'd'}, {'Header': 'Independent', 'accessor': 'i'}, {'Header': 'Republican', 'accessor': 'r'}], 'data': [{ '$d$metrics': {'raw': 'Wins'}, 'd': {'display': '6', 'raw': 6}, 'i': {'display': '0', 'raw': 0}, 'r': {'display': '6', 'raw': 6} }] }, result) def test_pivot_second_dimension_with_one_metric(self): result = ReactTable(slicer.metrics.wins, pivot=[slicer.dimensions.state]) \ .transform(cont_uni_dim_df, slicer, [slicer.dimensions.timestamp, slicer.dimensions.state], []) self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, {'Header': 'Texas', 'accessor': '1'}, {'Header': 'California', 'accessor': '2'}], 'data': [{ '$d$timestamp': {'raw': '1996-01-01'}, '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} }, { '$d$timestamp': {'raw': '2000-01-01'}, '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} }, { '$d$timestamp': {'raw': '2004-01-01'}, '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} }, { '$d$timestamp': {'raw': '2008-01-01'}, '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} }, { '$d$timestamp': {'raw': '2012-01-01'}, '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} }, { '$d$timestamp': {'raw': '2016-01-01'}, '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} }] }, result) def test_pivot_second_dimension_with_multiple_metrics(self): result = ReactTable(slicer.metrics.wins, slicer.metrics.votes, pivot=[slicer.dimensions.state]) \ .transform(cont_uni_dim_df, slicer, [slicer.dimensions.timestamp, slicer.dimensions.state], []) self.assertIn('data', result) result['data'] = result['data'][:2] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, { 'Header': 'Votes', 'columns': [{'Header': 'Texas', 'accessor': '$m$votes.1'}, {'Header': 'California', 'accessor': '$m$votes.2'}] }, { 'Header': 'Wins', 'columns': [{'Header': 'Texas', 'accessor': '$m$wins.1'}, {'Header': 'California', 'accessor': '$m$wins.2'}] }], 'data': [{ '$d$timestamp': {'raw': '1996-01-01'}, '$m$votes': { '1': {'display': '5,574,387', 'raw': 5574387}, '2': {'display': '9,646,062', 'raw': 9646062} }, '$m$wins': { '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} } }, { '$d$timestamp': {'raw': '2000-01-01'}, '$m$votes': { '1': {'display': '6,233,385', 'raw': 6233385}, '2': {'display': '10,428,632', 'raw': 10428632} }, '$m$wins': { '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} } }] }, result) def test_pivot_second_dimension_with_multiple_metrics_and_references(self): result = ReactTable(slicer.metrics.votes, slicer.metrics.wins, pivot=[slicer.dimensions.state]) \ .transform(cont_uni_dim_ref_df, slicer, [ slicer.dimensions.timestamp, slicer.dimensions.state ], [ ElectionOverElection(slicer.dimensions.timestamp) ]) self.assertIn('data', result) result['data'] = result['data'][:2] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, { 'Header': 'Votes', 'columns': [{'Header': 'Texas', 'accessor': '$m$votes.1'}, {'Header': 'California', 'accessor': '$m$votes.2'}] }, { 'Header': 'Votes (EoE)', 'columns': [{'Header': 'Texas', 'accessor': '$m$votes_eoe.1'}, { 'Header': 'California', 'accessor': '$m$votes_eoe.2' }] }, { 'Header': 'Wins', 'columns': [{'Header': 'Texas', 'accessor': '$m$wins.1'}, {'Header': 'California', 'accessor': '$m$wins.2'}] }, { 'Header': 'Wins (EoE)', 'columns': [{'Header': 'Texas', 'accessor': '$m$wins_eoe.1'}, { 'Header': 'California', 'accessor': '$m$wins_eoe.2' }] }], 'data': [{ '$d$timestamp': {'raw': '2000-01-01'}, '$m$votes': { '1': {'display': '6,233,385', 'raw': 6233385}, '2': {'display': '10,428,632', 'raw': 10428632} }, '$m$votes_eoe': { '1': {'display': '5,574,387', 'raw': 5574387}, '2': {'display': '9,646,062', 'raw': 9646062} }, '$m$wins': { '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} }, '$m$wins_eoe': { '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} } }, { '$d$timestamp': {'raw': '2004-01-01'}, '$m$votes': { '1': {'display': '7,359,621', 'raw': 7359621}, '2': {'display': '12,255,311', 'raw': 12255311} }, '$m$votes_eoe': { '1': {'display': '6,233,385', 'raw': 6233385}, '2': {'display': '10,428,632', 'raw': 10428632} }, '$m$wins': { '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} }, '$m$wins_eoe': { '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1} } }] }, result) def test_pivot_single_dimension_as_rows_single_metric_metrics_automatically_pivoted(self): result = ReactTable(slicer.metrics.wins, pivot=[slicer.dimensions.candidate]) \ .transform(uni_dim_df, slicer, [slicer.dimensions.candidate], []) self.assertEqual({ 'columns': [{'Header': '', 'accessor': '$d$metrics'}, {'Header': 'Bill Clinton', 'accessor': '1'}, {'Header': 'Bob Dole', 'accessor': '2'}, {'Header': 'Ross Perot', 'accessor': '3'}, {'Header': 'George Bush', 'accessor': '4'}, {'Header': 'Al Gore', 'accessor': '5'}, {'Header': 'John Kerry', 'accessor': '6'}, {'Header': 'Barrack Obama', 'accessor': '7'}, {'Header': 'John McCain', 'accessor': '8'}, {'Header': 'Mitt Romney', 'accessor': '9'}, {'Header': 'Donald Trump', 'accessor': '10'}, {'Header': 'Hillary Clinton', 'accessor': '11'}], 'data': [{ '$d$metrics': {'raw': 'Wins'}, '1': {'display': '2', 'raw': 2}, '10': {'display': '2', 'raw': 2}, '11': {'display': '0', 'raw': 0}, '2': {'display': '0', 'raw': 0}, '3': {'display': '0', 'raw': 0}, '4': {'display': '4', 'raw': 4}, '5': {'display': '0', 'raw': 0}, '6': {'display': '0', 'raw': 0}, '7': {'display': '4', 'raw': 4}, '8': {'display': '0', 'raw': 0}, '9': {'display': '0', 'raw': 0} }] }, result) def test_pivot_single_dimension_as_rows_single_metric_and_transpose_set_to_true(self): result = ReactTable(slicer.metrics.wins, pivot=[slicer.dimensions.candidate], transpose=True) \ .transform(uni_dim_df, slicer, [slicer.dimensions.candidate], []) self.assertEqual({ 'columns': [{'Header': '', 'accessor': '$d$metrics'}, {'Header': 'Bill Clinton', 'accessor': '1'}, {'Header': 'Bob Dole', 'accessor': '2'}, {'Header': 'Ross Perot', 'accessor': '3'}, {'Header': 'George Bush', 'accessor': '4'}, {'Header': 'Al Gore', 'accessor': '5'}, {'Header': 'John Kerry', 'accessor': '6'}, {'Header': 'Barrack Obama', 'accessor': '7'}, {'Header': 'John McCain', 'accessor': '8'}, {'Header': 'Mitt Romney', 'accessor': '9'}, {'Header': 'Donald Trump', 'accessor': '10'}, {'Header': 'Hillary Clinton', 'accessor': '11'}], 'data': [{ '$d$metrics': {'raw': 'Wins'}, '1': {'display': '2', 'raw': 2}, '10': {'display': '2', 'raw': 2}, '11': {'display': '0', 'raw': 0}, '2': {'display': '0', 'raw': 0}, '3': {'display': '0', 'raw': 0}, '4': {'display': '4', 'raw': 4}, '5': {'display': '0', 'raw': 0}, '6': {'display': '0', 'raw': 0}, '7': {'display': '4', 'raw': 4}, '8': {'display': '0', 'raw': 0}, '9': {'display': '0', 'raw': 0} }] }, result) def test_pivot_single_dimension_as_rows_multiple_metrics(self): result = ReactTable(slicer.metrics.wins, slicer.metrics.votes, pivot=[slicer.dimensions.candidate]) \ .transform(uni_dim_df, slicer, [slicer.dimensions.candidate], []) self.assertEqual({ 'columns': [{'Header': '', 'accessor': '$d$metrics'}, {'Header': 'Bill Clinton', 'accessor': '1'}, {'Header': 'Bob Dole', 'accessor': '2'}, {'Header': 'Ross Perot', 'accessor': '3'}, {'Header': 'George Bush', 'accessor': '4'}, {'Header': 'Al Gore', 'accessor': '5'}, {'Header': 'John Kerry', 'accessor': '6'}, {'Header': 'Barrack Obama', 'accessor': '7'}, {'Header': 'John McCain', 'accessor': '8'}, {'Header': 'Mitt Romney', 'accessor': '9'}, {'Header': 'Donald Trump', 'accessor': '10'}, {'Header': 'Hillary Clinton', 'accessor': '11'}], 'data': [{ '$d$metrics': {'raw': 'Wins'}, '1': {'display': '2', 'raw': 2}, '10': {'display': '2', 'raw': 2}, '11': {'display': '0', 'raw': 0}, '2': {'display': '0', 'raw': 0}, '3': {'display': '0', 'raw': 0}, '4': {'display': '4', 'raw': 4}, '5': {'display': '0', 'raw': 0}, '6': {'display': '0', 'raw': 0}, '7': {'display': '4', 'raw': 4}, '8': {'display': '0', 'raw': 0}, '9': {'display': '0', 'raw': 0} }, { '$d$metrics': {'raw': 'Votes'}, '1': {'display': '7,579,518', 'raw': 7579518}, '10': {'display': '13,438,835', 'raw': 13438835}, '11': {'display': '4,871,678', 'raw': 4871678}, '2': {'display': '6,564,547', 'raw': 6564547}, '3': {'display': '1,076,384', 'raw': 1076384}, '4': {'display': '18,403,811', 'raw': 18403811}, '5': {'display': '8,294,949', 'raw': 8294949}, '6': {'display': '9,578,189', 'raw': 9578189}, '7': {'display': '24,227,234', 'raw': 24227234}, '8': {'display': '9,491,109', 'raw': 9491109}, '9': {'display': '8,148,082', 'raw': 8148082} }] }, result) def test_pivot_single_metric_time_series_dim(self): result = ReactTable(slicer.metrics.wins) \ .transform(cont_dim_df, slicer, [slicer.dimensions.timestamp], []) self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$d$timestamp': {'raw': '1996-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$timestamp': {'raw': '2000-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$timestamp': {'raw': '2004-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$timestamp': {'raw': '2008-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$timestamp': {'raw': '2012-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$timestamp': {'raw': '2016-01-01'}, '$m$wins': {'display': '2', 'raw': 2} }] }, result) def test_pivot_multi_dims_with_all_levels_totals(self): state = slicer.dimensions.state.rollup() result = ReactTable(slicer.metrics.wins, slicer.metrics.votes, pivot=[state]) \ .transform(cont_uni_dim_all_totals_df, slicer, [slicer.dimensions.timestamp.rollup(), state], []) self.assertIn('data', result) result['data'] = result['data'][:2] + result['data'][-1:] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': 'Timestamp', 'accessor': '$d$timestamp'}, { 'Header': 'Votes', 'columns': [{'Header': 'Texas', 'accessor': '$m$votes.1'}, {'Header': 'California', 'accessor': '$m$votes.2'}, { 'Header': 'Totals', 'accessor': '$m$votes.{}'.format(MAX_STRING), 'className': 'fireant-totals' }] }, { 'Header': 'Wins', 'columns': [{'Header': 'Texas', 'accessor': '$m$wins.1'}, {'Header': 'California', 'accessor': '$m$wins.2'}, { 'Header': 'Totals', 'accessor': '$m$wins.{}'.format(MAX_STRING), 'className': 'fireant-totals' }] }], 'data': [{ '$d$timestamp': {'raw': '1996-01-01'}, '$m$votes': { '1': {'display': '5,574,387', 'raw': 5574387}, '2': {'display': '9,646,062', 'raw': 9646062}, MAX_STRING: {'display': '15,220,449', 'raw': 15220449} }, '$m$wins': { '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1}, MAX_STRING: {'display': '2', 'raw': 2} } }, { '$d$timestamp': {'raw': '2000-01-01'}, '$m$votes': { '1': {'display': '6,233,385', 'raw': 6233385}, '2': {'display': '10,428,632', 'raw': 10428632}, MAX_STRING: {'display': '16,662,017', 'raw': 16662017} }, '$m$wins': { '1': {'display': '1', 'raw': 1}, '2': {'display': '1', 'raw': 1}, MAX_STRING: {'display': '2', 'raw': 2} } }, { '$d$timestamp': {'raw': 'Totals'}, '$m$votes': { '1': {'display': 'null', 'raw': None}, '2': {'display': 'null', 'raw': None}, MAX_STRING: { 'display': '111,674,336', 'raw': 111674336 } }, '$m$wins': { '1': {'display': 'null', 'raw': None}, '2': {'display': 'null', 'raw': None}, MAX_STRING: {'display': '12', 'raw': 12} } }] }, result) def test_pivot_first_dimension_and_transpose_with_all_levels_totals(self): state = slicer.dimensions.state.rollup() result = ReactTable(slicer.metrics.wins, slicer.metrics.votes, pivot=[state], transpose=True) \ .transform(cont_uni_dim_all_totals_df, slicer, [slicer.dimensions.timestamp.rollup(), state], []) self.assertIn('data', result) result['data'] = result['data'][:6:3] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': '', 'accessor': '$d$metrics'}, {'Header': 'State', 'accessor': '$d$state'}, {'Header': '1996-01-01', 'accessor': '1996-01-01'}, {'Header': '2000-01-01', 'accessor': '2000-01-01'}, {'Header': '2004-01-01', 'accessor': '2004-01-01'}, {'Header': '2008-01-01', 'accessor': '2008-01-01'}, {'Header': '2012-01-01', 'accessor': '2012-01-01'}, {'Header': '2016-01-01', 'accessor': '2016-01-01'}, { 'Header': 'Totals', 'accessor': 'Totals', 'className': 'fireant-totals' }], 'data': [{ '$d$metrics': {'raw': 'Wins'}, '$d$state': {'display': 'Texas', 'raw': '1'}, '1996-01-01': {'display': '1', 'raw': 1}, '2000-01-01': {'display': '1', 'raw': 1}, '2004-01-01': {'display': '1', 'raw': 1}, '2008-01-01': {'display': '1', 'raw': 1}, '2012-01-01': {'display': '1', 'raw': 1}, '2016-01-01': {'display': '1', 'raw': 1}, 'Totals': {'display': 'null', 'raw': None} }, { '$d$metrics': {'raw': 'Votes'}, '$d$state': {'display': 'Texas', 'raw': '1'}, '1996-01-01': {'display': '5,574,387', 'raw': 5574387}, '2000-01-01': {'display': '6,233,385', 'raw': 6233385}, '2004-01-01': {'display': '7,359,621', 'raw': 7359621}, '2008-01-01': {'display': '8,007,961', 'raw': 8007961}, '2012-01-01': {'display': '7,877,967', 'raw': 7877967}, '2016-01-01': {'display': '5,072,915', 'raw': 5072915}, 'Totals': {'display': 'null', 'raw': None} }] }, result) def test_pivot_second_dimension_and_transpose_with_all_levels_totals(self): state = slicer.dimensions.state.rollup() result = ReactTable(slicer.metrics.wins, slicer.metrics.votes, pivot=[state], transpose=True) \ .transform(cont_uni_dim_all_totals_df, slicer, [slicer.dimensions.timestamp.rollup(), state], []) self.assertIn('data', result) result['data'] = result['data'][:2] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': '', 'accessor': '$d$metrics'}, {'Header': 'State', 'accessor': '$d$state'}, {'Header': '1996-01-01', 'accessor': '1996-01-01'}, {'Header': '2000-01-01', 'accessor': '2000-01-01'}, {'Header': '2004-01-01', 'accessor': '2004-01-01'}, {'Header': '2008-01-01', 'accessor': '2008-01-01'}, {'Header': '2012-01-01', 'accessor': '2012-01-01'}, {'Header': '2016-01-01', 'accessor': '2016-01-01'}, { 'Header': 'Totals', 'accessor': 'Totals', 'className': 'fireant-totals' }], 'data': [{ '$d$metrics': {'raw': 'Wins'}, '$d$state': {'display': 'Texas', 'raw': '1'}, '1996-01-01': {'display': '1', 'raw': 1}, '2000-01-01': {'display': '1', 'raw': 1}, '2004-01-01': {'display': '1', 'raw': 1}, '2008-01-01': {'display': '1', 'raw': 1}, '2012-01-01': {'display': '1', 'raw': 1}, '2016-01-01': {'display': '1', 'raw': 1}, 'Totals': {'display': 'null', 'raw': None} }, { '$d$metrics': {'raw': 'Wins'}, '$d$state': {'display': 'California', 'raw': '2'}, '1996-01-01': {'display': '1', 'raw': 1}, '2000-01-01': {'display': '1', 'raw': 1}, '2004-01-01': {'display': '1', 'raw': 1}, '2008-01-01': {'display': '1', 'raw': 1}, '2012-01-01': {'display': '1', 'raw': 1}, '2016-01-01': {'display': '1', 'raw': 1}, 'Totals': {'display': 'null', 'raw': None} }] }, result) class ReactTableHyperlinkTransformerTests(TestCase): maxDiff = None @classmethod def setUpClass(cls): cls.slicer = copy.deepcopy(slicer) def test_dim_with_hyperlink_hyperlink_is_always_included(self): slicer = self.slicer slicer.dimensions.political_party.hyperlink_template = 'http://example.com/{political_party}' result = ReactTable(slicer.metrics.wins) \ .transform(cat_dim_df, slicer, [slicer.dimensions.political_party], []) self.assertEqual({ 'columns': [{'Header': 'Party', 'accessor': '$d$political_party'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$d$political_party': {'display': 'Democrat', 'hyperlink': 'http://example.com/d', 'raw': 'd'}, '$m$wins': {'display': '6', 'raw': 6} }, { '$d$political_party': {'display': 'Independent', 'hyperlink': 'http://example.com/i', 'raw': 'i'}, '$m$wins': {'display': '0', 'raw': 0} }, { '$d$political_party': {'display': 'Republican', 'hyperlink': 'http://example.com/r', 'raw': 'r'}, '$m$wins': {'display': '6', 'raw': 6} }] }, result) def test_dim_with_hyperlink_depending_on_another_dim_not_included_if_other_dim_is_not_selected(self): slicer = self.slicer slicer.dimensions.political_party.hyperlink_template = 'http://example.com/{candidate}' result = ReactTable(slicer.metrics.wins) \ .transform(cat_dim_df, slicer, [slicer.dimensions.political_party], []) self.assertIn('data', result) result['data'] = result['data'][:2] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': 'Party', 'accessor': '$d$political_party'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$d$political_party': {'display': 'Democrat', 'raw': 'd'}, '$m$wins': {'display': '6', 'raw': 6} }, { '$d$political_party': {'display': 'Independent', 'raw': 'i'}, '$m$wins': {'display': '0', 'raw': 0} }] }, result) def test_dim_with_hyperlink_depending_on_another_dim_included_if_other_dim_is_selected(self): slicer = self.slicer slicer.dimensions.political_party.hyperlink_template = 'http://example.com/candidates/{candidate}/' result = ReactTable(slicer.metrics.wins) \ .transform(cat_uni_dim_df, slicer, [slicer.dimensions.political_party, slicer.dimensions.candidate], []) self.assertIn('data', result) result['data'] = result['data'][:2] # shorten the results to make the test easier to read self.assertEqual({ 'columns': [{'Header': 'Party', 'accessor': '$d$political_party'}, {'Header': 'Candidate', 'accessor': '$d$candidate'}, {'Header': 'Wins', 'accessor': '$m$wins'}], 'data': [{ '$d$candidate': {'display': 'Bill Clinton', 'raw': '1'}, '$d$political_party': { 'display': 'Democrat', 'hyperlink': 'http://example.com/candidates/1/', 'raw': 'd' }, '$m$wins': {'display': '2', 'raw': 2} }, { '$d$candidate': {'display': 'Al Gore', 'raw': '5'}, '$d$political_party': { 'display': 'Democrat', 'hyperlink': 'http://example.com/candidates/5/', 'raw': 'd' }, '$m$wins': {'display': '0', 'raw': 0} }] }, result) class ReactTableReferenceItemFormatTests(TestCase): @classmethod def setUpClass(cls): cls.ref_item_attrs = ['key', 'label', 'prefix', 'suffix', 'precision'] def assert_object_dict(self, obj, exp, attributes=[]): for attribute in attributes: with self.subTest('{} should be equal'.format(attribute)): self.assertEqual(getattr(obj, attribute), exp[attribute]) def test_base_ref_item(self): exp_ref_item = { 'key': 'wins_with_suffix_and_prefix_eoe', 'label': 'Wins (EoE)', 'prefix': '$', 'suffix': '€', 'precision': None, } ref = ElectionOverElection(slicer.dimensions.timestamp) ref_item = ReferenceItem(slicer.metrics.wins_with_suffix_and_prefix, ref) self.assert_object_dict(ref_item, exp_ref_item, self.ref_item_attrs) def test_ref_item_with_delta_percentage_formats_prefix_suffix(self): exp_ref_item = { 'key': 'wins_with_suffix_and_prefix_eoe_delta_percent', 'label': 'Wins (EoE Δ%)', 'prefix': None, 'suffix': '%', 'precision': None, } ref = ElectionOverElection(slicer.dimensions.timestamp, delta=True, delta_percent=True) ref_item = ReferenceItem(slicer.metrics.wins_with_suffix_and_prefix, ref) self.assert_object_dict(ref_item, exp_ref_item, self.ref_item_attrs)
46.678794
120
0.418417
4,118
44,905
4.466731
0.068723
0.025008
0.037186
0.036534
0.878439
0.861585
0.816734
0.785854
0.774437
0.750027
0
0.072532
0.377352
44,905
961
121
46.727367
0.5853
0.013874
0
0.741419
0
0
0.256861
0.001717
0
0
0
0
0.049199
1
0.036613
false
0
0.009153
0
0.051487
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
7f0d3062462fbe7eaebedf930e4ff6fa10e3992d
14,952
py
Python
pytpp/api/websdk/endpoints/secret_store.py
Venafi/pytpp
42af655b2403b8c9447c86962abd4aaa0201f646
[ "MIT" ]
4
2022-02-04T23:58:55.000Z
2022-02-15T18:53:08.000Z
pytpp/api/websdk/endpoints/secret_store.py
Venafi/pytpp
42af655b2403b8c9447c86962abd4aaa0201f646
[ "MIT" ]
null
null
null
pytpp/api/websdk/endpoints/secret_store.py
Venafi/pytpp
42af655b2403b8c9447c86962abd4aaa0201f646
[ "MIT" ]
null
null
null
from typing import List from pytpp.api.api_base import API, APIResponse, api_response_property from pytpp.properties.response_objects.secret_store import SecretStore class _SecretStore: def __init__(self, api_obj): self.Add = self._Add(api_obj=api_obj) self.Associate = self._Associate(api_obj=api_obj) self.Dissociate = self._Dissociate(api_obj=api_obj) self.EncryptionKeysInUse = self._EncryptionKeysInUse(api_obj=api_obj) self.Lookup = self._Lookup(api_obj=api_obj) self.LookupAllAssociationsbyVaultid = self._LookupAllAssociationsbyVaultid(api_obj=api_obj) self.LookupByAssociation = self._LookupByAssociation(api_obj=api_obj) self.LookupAssociationbyVaultID = self._LookupAssociationbyVaultID(api_obj=api_obj) self.LookupByOwner = self._LookupByOwner(api_obj=api_obj) self.LookupByVaultType = self._LookupByVaultType(api_obj=api_obj) self.Mutate = self._Mutate(api_obj=api_obj) self.OrphanLookup = self._OrphanLookup(api_obj=api_obj) self.OwnerAdd = self._OwnerAdd(api_obj=api_obj) self.OwnerDelete = self._OwnerDelete(api_obj=api_obj) self.OwnerLookup = self._OwnerLookup(api_obj=api_obj) self.Retrieve = self._Retrieve(api_obj=api_obj) class _Add(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/Add') def post(self, base_64_data: str, keyname: str, namespace: str, owner: str, vault_type: int): body = { 'Base64Data': base_64_data, 'Keyname': keyname, 'Namespace': namespace, 'Owner': owner, 'VaultType': vault_type } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) @property @api_response_property() def vault_id(self) -> int: return self._from_json(key='VaultID') return _Response(response=self._post(data=body)) class _Associate(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/Associate') def post(self, name: str, vault_id: int, date_value: int = None, int_value: int = None, string_value: str = None): body = { 'Name': name, 'VaultID': vault_id, 'DateValue': date_value, 'IntValue': int_value, 'StringValue': string_value } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) return _Response(response=self._post(data=body)) class _Dissociate(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/Dissociate') def post(self, vault_id: int, int_value: int = None, name: str = None, string_value: str = None, date_value: int = None): body = { 'VaultID': vault_id, 'IntValue': int_value, 'Name': name, 'StringValue': string_value, 'DateValue': date_value } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) return _Response(response=self._post(data=body)) class _EncryptionKeysInUse(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/EncryptionKeysInUse') def get(self): class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def encryption_keys(self) -> List[str]: return self._from_json(key='EncryptionKeys') @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) return _Response(response=self._get()) class _Lookup(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/Lookup') def get(self): class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) @property @api_response_property() def vault_ids(self) -> List[int]: return self._from_json(key='VaultIDs') return _Response(response=self._get()) class _LookupAllAssociationsbyVaultid(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/LookupAllAssociationsbyVaultid') def post(self, vault_id: int): body = { 'VaultID': vault_id } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) @property @api_response_property() def typed_name_values(self): return [SecretStore.TypedNameValues(tnv) for tnv in self._from_json('TypedNameValues')] return _Response(response=self._post(data=body)) class _LookupByAssociation(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/LookupByAssociation') def post(self, name: str, int_value: int = None ,string_value: str = None, date_value: int = None): body = { 'Name': name, 'IntValue': int_value, 'StringValue': string_value, 'DateValue': date_value } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) @property @api_response_property() def vault_ids(self) -> List[int]: return self._from_json(key='VaultIDs') return _Response(response=self._post(data=body)) class _LookupAssociationbyVaultID(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/LookupAssociationbyVaultID') def post(self, vault_id: int, name: str = None): body = { 'VaultID': vault_id, 'Name': name } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) @property @api_response_property() def value(self) -> str: return self._from_json(key='Value') return _Response(response=self._post(data=body)) class _LookupByOwner(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/LookupByOwner') def post(self, namespace: str, owner: str, vault_type: str = None): body = { 'Namespace': namespace, 'Owner': owner, 'VaultType': vault_type } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) @property @api_response_property() def vault_ids(self) -> List[int]: return self._from_json(key='VaultIDs') return _Response(response=self._post(data=body)) class _LookupByVaultType(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/LookupByVaultType') def post(self, vault_type: int): body = { 'VaultType': vault_type } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) @property @api_response_property() def vault_ids(self) -> List[int]: return self._from_json(key='VaultIDs') return _Response(response=self._post(data=body)) class _Mutate(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/Mutate') def post(self, vault_id: int, vault_type: int): body = { 'VaultID': vault_id, 'VaultType': vault_type } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) return _Response(response=self._post(data=body)) class _OrphanLookup(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/OrphanLookup') def post(self, vault_type: int): body = { 'VaultType': vault_type } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) @property @api_response_property() def vault_ids(self) -> List[int]: return self._from_json(key='VaultIDs') return _Response(response=self._post(data=body)) class _OwnerAdd(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/OwnerAdd') def post(self, namespace: str, owner: str, vault_id: int): body = { 'Namespace': namespace, 'Owner': owner, 'VaultId': vault_id } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) return _Response(response=self._post(data=body)) class _OwnerDelete(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/OwnerDelete') def post(self, namespace: str, owner: str, vault_id: int = None): body = { 'Namespace': namespace, 'Owner': owner, 'VaultId': vault_id } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) return _Response(response=self._post(data=body)) class _OwnerLookup(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/OwnerLookup') def post(self, namespace: str, vault_id: int): body = { 'Namespace': namespace, 'VaultID': vault_id } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) @property @api_response_property() def owners(self) -> List[str]: return self._from_json(key='Owners') return _Response(response=self._post(data=body)) class _Retrieve(API): def __init__(self, api_obj): super().__init__(api_obj=api_obj, url='/SecretStore/Retrieve') def post(self, vault_id: int): body = { 'VaultID': vault_id } class _Response(APIResponse): def __init__(self, response): super().__init__(response=response) @property @api_response_property() def base_64_data(self) -> str: return self._from_json(key='Base64Data') @property @api_response_property() def result(self): return SecretStore.Result(self._from_json(key='Result')) @property @api_response_property() def vault_type(self) -> str: return self._from_json(key='VaultType') return _Response(response=self._post(data=body))
35.6
129
0.544476
1,423
14,952
5.298665
0.054814
0.064456
0.048143
0.050928
0.803183
0.754111
0.727984
0.707162
0.665252
0.665252
0
0.001037
0.35527
14,952
419
130
35.684964
0.78112
0
0
0.70948
0
0
0.060728
0.024211
0
0
0
0
0
1
0.235474
false
0.009174
0.009174
0.085627
0.480122
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
7
f617ae879e8db5471c540384d5549fe2204e7e4b
4,010
py
Python
test/pyaz/kusto/cluster/__init__.py
bigdatamoore/py-az-cli
54383a4ee7cc77556f6183e74e992eec95b28e01
[ "MIT" ]
null
null
null
test/pyaz/kusto/cluster/__init__.py
bigdatamoore/py-az-cli
54383a4ee7cc77556f6183e74e992eec95b28e01
[ "MIT" ]
9
2021-09-24T16:37:24.000Z
2021-12-24T00:39:19.000Z
test/pyaz/kusto/cluster/__init__.py
bigdatamoore/py-az-cli
54383a4ee7cc77556f6183e74e992eec95b28e01
[ "MIT" ]
null
null
null
import json, subprocess from ... pyaz_utils import get_cli_name, get_params def create(resource_group, name, sku, location=None, capacity=None, no_wait=None): params = get_params(locals()) command = "az kusto cluster create " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def stop(resource_group, name, no_wait=None): params = get_params(locals()) command = "az kusto cluster stop " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def start(resource_group, name, no_wait=None): params = get_params(locals()) command = "az kusto cluster start " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def list(resource_group): params = get_params(locals()) command = "az kusto cluster list " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def show(resource_group, name): params = get_params(locals()) command = "az kusto cluster show " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def delete(resource_group, name, yes=None): params = get_params(locals()) command = "az kusto cluster delete " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def update(resource_group, name, sku=None, capacity=None, set=None, add=None, remove=None, force_string=None): params = get_params(locals()) command = "az kusto cluster update " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def wait(resource_group, name, timeout=None, interval=None, deleted=None, created=None, updated=None, exists=None, custom=None): params = get_params(locals()) command = "az kusto cluster wait " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr)
34.568966
128
0.658853
494
4,010
5.299595
0.123482
0.085562
0.061115
0.064171
0.849885
0.849885
0.849885
0.849885
0.8178
0.765088
0
0.005133
0.222693
4,010
115
129
34.869565
0.834777
0
0
0.830189
0
0
0.065586
0
0
0
0
0
0
1
0.075472
false
0
0.018868
0
0.169811
0.226415
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
14427091ebc1bcd29c36b80eb17e0e10f2793e29
63
py
Python
F1/__init__.py
vrthra-forks/F1
b70e03e9a8993e56d597e9c5124bca4dc83d021a
[ "MIT" ]
70
2019-11-20T15:32:17.000Z
2021-11-10T08:20:48.000Z
F1/__init__.py
vrthra-forks/F1
b70e03e9a8993e56d597e9c5124bca4dc83d021a
[ "MIT" ]
5
2019-11-20T20:08:59.000Z
2022-02-15T11:42:11.000Z
F1/__init__.py
abhilashgupta/F1
7c0c8c0b174d602f523debece6e857ae6e413c32
[ "MIT" ]
5
2019-11-20T19:53:23.000Z
2021-08-06T08:33:51.000Z
from .fuzzer import CTrans from .fuzzer import CFWriteCTFuzzer
21
35
0.84127
8
63
6.625
0.625
0.377358
0.603774
0
0
0
0
0
0
0
0
0
0.126984
63
2
36
31.5
0.963636
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
1aed7d280e558451ee5831f3ff2867278c21605b
44,553
py
Python
src/devcenter/azext_devcenter/generated/_params.py
tbyfield/azure-cli-extensions
e7e5f37fdcea3afb5c4aecb61fa72eac72c2128e
[ "MIT" ]
null
null
null
src/devcenter/azext_devcenter/generated/_params.py
tbyfield/azure-cli-extensions
e7e5f37fdcea3afb5c4aecb61fa72eac72c2128e
[ "MIT" ]
null
null
null
src/devcenter/azext_devcenter/generated/_params.py
tbyfield/azure-cli-extensions
e7e5f37fdcea3afb5c4aecb61fa72eac72c2128e
[ "MIT" ]
1
2022-02-14T21:43:29.000Z
2022-02-14T21:43:29.000Z
# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- # pylint: disable=too-many-lines # pylint: disable=too-many-statements from azure.cli.core.commands.parameters import ( tags_type, get_enum_type, resource_group_name_type, get_location_type ) from azure.cli.core.commands.validators import ( get_default_location_from_resource_group, validate_file_or_dict ) from azext_devcenter.action import ( AddGitHub, AddImageReference, AddSku ) def load_arguments(self, _): with self.argument_context('devcenter dev-center list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter dev-center show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', options_list=['--name', '-n', '--dev-center-name'], type=str, help='The name of ' 'the devcenter.', id_part='name') with self.argument_context('devcenter dev-center create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', options_list=['--name', '-n', '--dev-center-name'], type=str, help='The name of ' 'the devcenter.') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['None', 'SystemAssigned', 'UserAssigned', 'SystemAssigned, UserAssigned']), help='Type of managed service identity (where both SystemAssigned and UserAssigned types are ' 'allowed).', arg_group='Identity') c.argument('user_assigned_identities', type=validate_file_or_dict, help='The set of user assigned identities ' 'associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids ' 'in the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microso' 'ft.ManagedIdentity/userAssignedIdentities/{identityName}. The dictionary values can be empty ' 'objects ({}) in requests. Expected value: json-string/json-file/@json-file.', arg_group='Identity') with self.argument_context('devcenter dev-center update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', options_list=['--name', '-n', '--dev-center-name'], type=str, help='The name of ' 'the devcenter.', id_part='name') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['None', 'SystemAssigned', 'UserAssigned', 'SystemAssigned, UserAssigned']), help='Type of managed service identity (where both SystemAssigned and UserAssigned types are ' 'allowed).', arg_group='Identity') c.argument('user_assigned_identities', type=validate_file_or_dict, help='The set of user assigned identities ' 'associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids ' 'in the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microso' 'ft.ManagedIdentity/userAssignedIdentities/{identityName}. The dictionary values can be empty ' 'objects ({}) in requests. Expected value: json-string/json-file/@json-file.', arg_group='Identity') with self.argument_context('devcenter dev-center delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', options_list=['--name', '-n', '--dev-center-name'], type=str, help='The name of ' 'the devcenter.', id_part='name') with self.argument_context('devcenter dev-center wait') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', options_list=['--name', '-n', '--dev-center-name'], type=str, help='The name of ' 'the devcenter.', id_part='name') with self.argument_context('devcenter project list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter project show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', options_list=['--name', '-n', '--project-name'], type=str, help='The name of the ' 'project.', id_part='name') with self.argument_context('devcenter project create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', options_list=['--name', '-n', '--project-name'], type=str, help='The name of the ' 'project.') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('dev_center_id', type=str, help='Resource Id of an associated DevCenter') c.argument('description', type=str, help='Description of the project.') with self.argument_context('devcenter project update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', options_list=['--name', '-n', '--project-name'], type=str, help='The name of the ' 'project.', id_part='name') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('dev_center_id', type=str, help='Resource Id of an associated DevCenter') c.argument('description', type=str, help='Description of the project.') with self.argument_context('devcenter project delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', options_list=['--name', '-n', '--project-name'], type=str, help='The name of the ' 'project.', id_part='name') with self.argument_context('devcenter project wait') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', options_list=['--name', '-n', '--project-name'], type=str, help='The name of the ' 'project.', id_part='name') with self.argument_context('devcenter attached-network list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') c.argument('dev_center_name', type=str, help='The name of the devcenter.') with self.argument_context('devcenter attached-network show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('attached_network_connection_name', type=str, help='The name of the attached NetworkConnection.', id_part='child_name_1') c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') with self.argument_context('devcenter attached-network create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('attached_network_connection_name', type=str, help='The name of the attached NetworkConnection.') c.argument('network_connection_id', type=str, help='The resource ID of the NetworkConnection you want to ' 'attach.') with self.argument_context('devcenter attached-network update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('attached_network_connection_name', type=str, help='The name of the attached NetworkConnection.', id_part='child_name_1') c.argument('network_connection_id', type=str, help='The resource ID of the NetworkConnection you want to ' 'attach.') with self.argument_context('devcenter attached-network delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('attached_network_connection_name', type=str, help='The name of the attached NetworkConnection.', id_part='child_name_1') with self.argument_context('devcenter attached-network wait') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('attached_network_connection_name', type=str, help='The name of the attached NetworkConnection.', id_part='child_name_1') c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') with self.argument_context('devcenter gallery list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter gallery show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('gallery_name', options_list=['--name', '-n', '--gallery-name'], type=str, help='The name of the ' 'gallery.', id_part='child_name_1') with self.argument_context('devcenter gallery create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('gallery_name', options_list=['--name', '-n', '--gallery-name'], type=str, help='The name of the ' 'gallery.') c.argument('gallery_resource_id', type=str, help='The resource ID of the backing Azure Compute Gallery.') with self.argument_context('devcenter gallery update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('gallery_name', options_list=['--name', '-n', '--gallery-name'], type=str, help='The name of the ' 'gallery.', id_part='child_name_1') c.argument('gallery_resource_id', type=str, help='The resource ID of the backing Azure Compute Gallery.') c.ignore('body') with self.argument_context('devcenter gallery delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('gallery_name', options_list=['--name', '-n', '--gallery-name'], type=str, help='The name of the ' 'gallery.', id_part='child_name_1') with self.argument_context('devcenter gallery wait') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('gallery_name', options_list=['--name', '-n', '--gallery-name'], type=str, help='The name of the ' 'gallery.', id_part='child_name_1') with self.argument_context('devcenter image list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('gallery_name', type=str, help='The name of the gallery.') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter image show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('gallery_name', type=str, help='The name of the gallery.', id_part='child_name_1') c.argument('image_name', options_list=['--name', '-n', '--image-name'], type=str, help='The name of the image.', id_part='child_name_2') with self.argument_context('devcenter image-version list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('gallery_name', type=str, help='The name of the gallery.') c.argument('image_name', type=str, help='The name of the image.') with self.argument_context('devcenter image-version show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('gallery_name', type=str, help='The name of the gallery.', id_part='child_name_1') c.argument('image_name', type=str, help='The name of the image.', id_part='child_name_2') c.argument('version_name', type=str, help='The version of the image.', id_part='child_name_3') with self.argument_context('devcenter catalog list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter catalog show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('catalog_name', options_list=['--name', '-n', '--catalog-name'], type=str, help='The name of the ' 'Catalog.', id_part='child_name_1') with self.argument_context('devcenter catalog create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('catalog_name', options_list=['--name', '-n', '--catalog-name'], type=str, help='The name of the ' 'Catalog.') c.argument('git_hub', action=AddGitHub, nargs='+', help='Properties for a GitHub catalog type.') c.argument('ado_git', action=AddGitHub, nargs='+', help='Properties for an Azure DevOps catalog type.') with self.argument_context('devcenter catalog update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('catalog_name', options_list=['--name', '-n', '--catalog-name'], type=str, help='The name of the ' 'Catalog.', id_part='child_name_1') c.argument('tags', tags_type) c.argument('git_hub', action=AddGitHub, nargs='+', help='Properties for a GitHub catalog type.') c.argument('ado_git', action=AddGitHub, nargs='+', help='Properties for an Azure DevOps catalog type.') with self.argument_context('devcenter catalog delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('catalog_name', options_list=['--name', '-n', '--catalog-name'], type=str, help='The name of the ' 'Catalog.', id_part='child_name_1') with self.argument_context('devcenter catalog sync') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('catalog_name', options_list=['--name', '-n', '--catalog-name'], type=str, help='The name of the ' 'Catalog.', id_part='child_name_1') with self.argument_context('devcenter catalog wait') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('catalog_name', options_list=['--name', '-n', '--catalog-name'], type=str, help='The name of the ' 'Catalog.', id_part='child_name_1') with self.argument_context('devcenter environment-type list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter environment-type show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('environment_type_name', options_list=['--name', '-n', '--environment-type-name'], type=str, help='The name of the environment type.', id_part='child_name_1') with self.argument_context('devcenter environment-type create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('environment_type_name', options_list=['--name', '-n', '--environment-type-name'], type=str, help='The name of the environment type.') c.argument('tags', tags_type) with self.argument_context('devcenter environment-type update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('environment_type_name', options_list=['--name', '-n', '--environment-type-name'], type=str, help='The name of the environment type.', id_part='child_name_1') c.argument('tags', tags_type) with self.argument_context('devcenter environment-type delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('environment_type_name', options_list=['--name', '-n', '--environment-type-name'], type=str, help='The name of the environment type.', id_part='child_name_1') with self.argument_context('devcenter project-environment-type list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter project-environment-type show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('environment_type_name', type=str, help='The name of the environment type.', id_part='child_name_1') with self.argument_context('devcenter project-environment-type create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.') c.argument('environment_type_name', type=str, help='The name of the environment type.') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('deployment_target_id', type=str, help='Id of a subscription that the environment type will be ' 'mapped to. The environment\'s resources will be deployed into this subscription.') c.argument('status', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Defines whether this Environment ' 'Type can be used in this Project.') c.argument('creator_role_assignment', type=str, help='The role definition assigned to the environment creator ' 'on backing resources.') c.argument('user_role_assignments', type=validate_file_or_dict, help='Role Assignments created on environment ' 'backing resources. This is a mapping from a user object ID to an object of role definition IDs. ' 'Expected value: json-string/json-file/@json-file.') c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['None', 'SystemAssigned', 'UserAssigned', 'SystemAssigned, UserAssigned']), help='Type of managed service identity (where both SystemAssigned and UserAssigned types are ' 'allowed).', arg_group='Identity') c.argument('user_assigned_identities', type=validate_file_or_dict, help='The set of user assigned identities ' 'associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids ' 'in the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microso' 'ft.ManagedIdentity/userAssignedIdentities/{identityName}. The dictionary values can be empty ' 'objects ({}) in requests. Expected value: json-string/json-file/@json-file.', arg_group='Identity') with self.argument_context('devcenter project-environment-type update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('environment_type_name', type=str, help='The name of the environment type.', id_part='child_name_1') c.argument('tags', tags_type) c.argument('deployment_target_id', type=str, help='Id of a subscription that the environment type will be ' 'mapped to. The environment\'s resources will be deployed into this subscription.') c.argument('status', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Defines whether this Environment ' 'Type can be used in this Project.') c.argument('creator_role_assignment', type=str, help='The role definition assigned to the environment creator ' 'on backing resources.') c.argument('user_role_assignments', type=validate_file_or_dict, help='Role Assignments created on environment ' 'backing resources. This is a mapping from a user object ID to an object of role definition IDs. ' 'Expected value: json-string/json-file/@json-file.') c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['None', 'SystemAssigned', 'UserAssigned', 'SystemAssigned, UserAssigned']), help='Type of managed service identity (where both SystemAssigned and UserAssigned types are ' 'allowed).', arg_group='Identity') c.argument('user_assigned_identities', type=validate_file_or_dict, help='The set of user assigned identities ' 'associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids ' 'in the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microso' 'ft.ManagedIdentity/userAssignedIdentities/{identityName}. The dictionary values can be empty ' 'objects ({}) in requests. Expected value: json-string/json-file/@json-file.', arg_group='Identity') with self.argument_context('devcenter project-environment-type delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('environment_type_name', type=str, help='The name of the environment type.', id_part='child_name_1') with self.argument_context('devcenter dev-box-definition list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') c.argument('project_name', type=str, help='The name of the project.') with self.argument_context('devcenter dev-box-definition show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('dev_box_definition_name', options_list=['--name', '-n', '--dev-box-definition-name'], type=str, help='The name of the Dev Box definition.', id_part='child_name_1') c.argument('project_name', type=str, help='The name of the project.', id_part='name') with self.argument_context('devcenter dev-box-definition create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.') c.argument('dev_box_definition_name', options_list=['--name', '-n', '--dev-box-definition-name'], type=str, help='The name of the Dev Box definition.') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('image_reference', action=AddImageReference, nargs='+', help='Image reference information.') c.argument('sku', action=AddSku, nargs='+', help='The SKU for Dev Boxes created using this definition.') c.argument('os_storage_type', type=str, help='The storage type used for the Operating System disk of Dev Boxes ' 'created using this definition.') with self.argument_context('devcenter dev-box-definition update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('dev_box_definition_name', options_list=['--name', '-n', '--dev-box-definition-name'], type=str, help='The name of the Dev Box definition.', id_part='child_name_1') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('image_reference', action=AddImageReference, nargs='+', help='Image reference information.') c.argument('sku', action=AddSku, nargs='+', help='The SKU for Dev Boxes created using this definition.') c.argument('os_storage_type', type=str, help='The storage type used for the Operating System disk of Dev Boxes ' 'created using this definition.') with self.argument_context('devcenter dev-box-definition delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('dev_box_definition_name', options_list=['--name', '-n', '--dev-box-definition-name'], type=str, help='The name of the Dev Box definition.', id_part='child_name_1') with self.argument_context('devcenter dev-box-definition wait') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('dev_center_name', type=str, help='The name of the devcenter.', id_part='name') c.argument('dev_box_definition_name', options_list=['--name', '-n', '--dev-box-definition-name'], type=str, help='The name of the Dev Box definition.', id_part='child_name_1') c.argument('project_name', type=str, help='The name of the project.', id_part='name') with self.argument_context('devcenter operation-statuses show') as c: c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') c.argument('operation_id', type=str, help='The ID of an ongoing async operation', id_part='child_name_1') with self.argument_context('devcenter usage list') as c: c.argument('location', arg_type=get_location_type(self.cli_ctx)) with self.argument_context('devcenter sku list') as c: c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter pool list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter pool show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('pool_name', options_list=['--name', '-n', '--pool-name'], type=str, help='Name of the pool.', id_part='child_name_1') with self.argument_context('devcenter pool create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.') c.argument('pool_name', options_list=['--name', '-n', '--pool-name'], type=str, help='Name of the pool.') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('dev_box_definition_name', type=str, help='Name of a Dev Box definition in parent Project of this ' 'Pool') c.argument('network_connection_name', type=str, help='Name of a Network Connection in parent Project of this ' 'Pool') c.argument('local_administrator', arg_type=get_enum_type(['Disabled', 'Enabled']), help='Indicates whether ' 'owners of Dev Boxes in this pool are added as local administrators on the Dev Box.') with self.argument_context('devcenter pool update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('pool_name', options_list=['--name', '-n', '--pool-name'], type=str, help='Name of the pool.', id_part='child_name_1') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('dev_box_definition_name', type=str, help='Name of a Dev Box definition in parent Project of this ' 'Pool') c.argument('network_connection_name', type=str, help='Name of a Network Connection in parent Project of this ' 'Pool') c.argument('local_administrator', arg_type=get_enum_type(['Disabled', 'Enabled']), help='Indicates whether ' 'owners of Dev Boxes in this pool are added as local administrators on the Dev Box.') with self.argument_context('devcenter pool delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('pool_name', options_list=['--name', '-n', '--pool-name'], type=str, help='Name of the pool.', id_part='child_name_1') with self.argument_context('devcenter pool wait') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('pool_name', options_list=['--name', '-n', '--pool-name'], type=str, help='Name of the pool.', id_part='child_name_1') with self.argument_context('devcenter schedule list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.') c.argument('pool_name', type=str, help='Name of the pool.') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter schedule show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('pool_name', type=str, help='Name of the pool.', id_part='child_name_1') c.argument('schedule_name', options_list=['--name', '-n', '--schedule-name'], type=str, help='The name of the ' 'schedule that uniquely identifies it.', id_part='child_name_2') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter schedule create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.') c.argument('pool_name', type=str, help='Name of the pool.') c.argument('schedule_name', options_list=['--name', '-n', '--schedule-name'], type=str, help='The name of the ' 'schedule that uniquely identifies it.') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') c.argument('time', type=str, help='The target time to trigger the action. The format is HH:MM.') c.argument('time_zone', type=str, help='The IANA timezone id at which the schedule should execute.') c.argument('state', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Indicates whether or not this ' 'scheduled task is enabled.') with self.argument_context('devcenter schedule update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('pool_name', type=str, help='Name of the pool.', id_part='child_name_1') c.argument('schedule_name', options_list=['--name', '-n', '--schedule-name'], type=str, help='The name of the ' 'schedule that uniquely identifies it.', id_part='child_name_2') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('time', type=str, help='The target time to trigger the action. The format is HH:MM.') c.argument('time_zone', type=str, help='The IANA timezone id at which the schedule should execute.') c.argument('state', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Indicates whether or not this ' 'scheduled task is enabled.') with self.argument_context('devcenter schedule delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('pool_name', type=str, help='Name of the pool.', id_part='child_name_1') c.argument('schedule_name', options_list=['--name', '-n', '--schedule-name'], type=str, help='The name of the ' 'schedule that uniquely identifies it.', id_part='child_name_2') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter schedule wait') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('project_name', type=str, help='The name of the project.', id_part='name') c.argument('pool_name', type=str, help='Name of the pool.', id_part='child_name_1') c.argument('schedule_name', options_list=['--name', '-n', '--schedule-name'], type=str, help='The name of the ' 'schedule that uniquely identifies it.', id_part='child_name_2') c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter network-connection list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') with self.argument_context('devcenter network-connection show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('network_connection_name', options_list=['--name', '-n', '--network-connection-name'], type=str, help='Name of the Network Connection that can be applied to a Pool.', id_part='name') with self.argument_context('devcenter network-connection create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('network_connection_name', options_list=['--name', '-n', '--network-connection-name'], type=str, help='Name of the Network Connection that can be applied to a Pool.') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('subnet_id', type=str, help='The subnet to attach Virtual Machines to') c.argument('domain_name', type=str, help='Active Directory domain name') c.argument('organization_unit', type=str, help='Active Directory domain Organization Unit (OU)') c.argument('domain_username', type=str, help='The username of an Active Directory account (user or service ' 'account) that has permissions to create computer objects in Active Directory. Required format: ' 'admin@contoso.com.') c.argument('domain_password', type=str, help='The password for the account used to join domain') c.argument('networking_resource_group_name', type=str, help='The name for resource group where NICs will be ' 'placed.') c.argument('domain_join_type', arg_type=get_enum_type(['HybridAzureADJoin', 'AzureADJoin']), help='AAD Join ' 'type.') with self.argument_context('devcenter network-connection update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('network_connection_name', options_list=['--name', '-n', '--network-connection-name'], type=str, help='Name of the Network Connection that can be applied to a Pool.', id_part='name') c.argument('tags', tags_type) c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('subnet_id', type=str, help='The subnet to attach Virtual Machines to') c.argument('domain_name', type=str, help='Active Directory domain name') c.argument('organization_unit', type=str, help='Active Directory domain Organization Unit (OU)') c.argument('domain_username', type=str, help='The username of an Active Directory account (user or service ' 'account) that has permissions to create computer objects in Active Directory. Required format: ' 'admin@contoso.com.') c.argument('domain_password', type=str, help='The password for the account used to join domain') with self.argument_context('devcenter network-connection delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('network_connection_name', options_list=['--name', '-n', '--network-connection-name'], type=str, help='Name of the Network Connection that can be applied to a Pool.', id_part='name') with self.argument_context('devcenter network-connection list-health-detail') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('top', type=int, help='The maximum number of resources to return from the operation. Example: ' '\'$top=10\'.') c.argument('network_connection_name', options_list=['--name', '-n', '--network-connection-name'], type=str, help='Name of the Network Connection that can be applied to a Pool.') with self.argument_context('devcenter network-connection run-health-check') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('network_connection_name', options_list=['--name', '-n', '--network-connection-name'], type=str, help='Name of the Network Connection that can be applied to a Pool.', id_part='name') with self.argument_context('devcenter network-connection show-health-detail') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('network_connection_name', options_list=['--name', '-n', '--network-connection-name'], type=str, help='Name of the Network Connection that can be applied to a Pool.', id_part='name') with self.argument_context('devcenter network-connection wait') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('network_connection_name', options_list=['--name', '-n', '--network-connection-name'], type=str, help='Name of the Network Connection that can be applied to a Pool.', id_part='name')
70.051887
120
0.662065
5,930
44,553
4.779933
0.046374
0.097795
0.062868
0.071441
0.974951
0.969166
0.960734
0.947292
0.944858
0.944505
0
0.002231
0.205104
44,553
635
121
70.162205
0.798136
0.011357
0
0.791209
0
0
0.42959
0.046009
0
0
0
0
0
1
0.001832
false
0.003663
0.005495
0
0.007326
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7