hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ad6f52a3ace11108767d9847a1fe078de7dfc2c5 | 1,303 | py | Python | blowup.py | cxy1997/Thunder | 20ee9d37d878f851ea29b7f4ba51cc9e5149df45 | [
"MIT"
] | 2 | 2016-12-08T05:50:40.000Z | 2018-03-24T03:13:30.000Z | blowup.py | cxy1997/Thunder | 20ee9d37d878f851ea29b7f4ba51cc9e5149df45 | [
"MIT"
] | null | null | null | blowup.py | cxy1997/Thunder | 20ee9d37d878f851ea29b7f4ba51cc9e5149df45 | [
"MIT"
] | null | null | null | from Tkinter import PhotoImage
from linked_list import Linked_List, dlt
class Blowups:
def __init__(self, master):
self.master = master
self.img = PhotoImage(file = 'images\\bomb1.gif')
self.data = Linked_List(-100, -100, self)
def new(self, x, y):
self.data.add(x, y)
def upd(self):
p = self.data
while p._next:
p = p.next
p.t += 1
if p.t == 6:
p = p.last
dlt(p.next)
else:
p.master.master.canvas.lift(p.pic)
def clear(self):
p = self.data
while p._next:
dlt(p.next)
class smallBlowups:
def __init__(self, master):
self.master = master
self.img = PhotoImage(file = 'images\\bomb2.gif')
self.data = Linked_List(-100, -100, self)
def new(self, x, y):
self.data.add(x, y)
def upd(self):
p = self.data
while p._next:
p = p.next
p.t += 1
if p.t == 6:
p = p.last
dlt(p.next)
else:
p.master.master.canvas.lift(p.pic)
def clear(self):
p = self.data
while p._next:
dlt(p.next) | 25.057692 | 58 | 0.461243 | 167 | 1,303 | 3.502994 | 0.239521 | 0.08547 | 0.061538 | 0.088889 | 0.830769 | 0.830769 | 0.830769 | 0.830769 | 0.830769 | 0.830769 | 0 | 0.024 | 0.424405 | 1,303 | 52 | 59 | 25.057692 | 0.756 | 0 | 0 | 0.863636 | 0 | 0 | 0.027135 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0 | 0.045455 | 0 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a8f7223d56f1b6aa6d74b1b7d61ba7c49abcfe66 | 175,799 | py | Python | trustlab/lab/scenarios/scale_obs_1000_scenario.py | N0omB/aTLAS | 2277d6bf312b6de9f0da816bdfe28f9c40110211 | [
"MIT"
] | 1 | 2020-11-12T16:17:12.000Z | 2020-11-12T16:17:12.000Z | trustlab/lab/scenarios/scale_obs_1000_scenario.py | N0omB/aTLAS | 2277d6bf312b6de9f0da816bdfe28f9c40110211 | [
"MIT"
] | null | null | null | trustlab/lab/scenarios/scale_obs_1000_scenario.py | N0omB/aTLAS | 2277d6bf312b6de9f0da816bdfe28f9c40110211 | [
"MIT"
] | null | null | null |
NAME = 'Scale Obs 1000'
AGENTS = ['A', 'B', 'C', 'D']
OBSERVATIONS = [{'author': 'A',
'before': [],
'message': 'Redecentralization of the Web',
'observation_id': 0,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [0],
'message': 'Redecentralization of the Web',
'observation_id': 1,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [1],
'message': 'Redecentralization of the Web',
'observation_id': 2,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [2],
'message': 'Redecentralization of the Web',
'observation_id': 3,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [3],
'message': 'Redecentralization of the Web',
'observation_id': 4,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [4],
'message': 'Redecentralization of the Web',
'observation_id': 5,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [5],
'message': 'Redecentralization of the Web',
'observation_id': 6,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [6],
'message': 'Redecentralization of the Web',
'observation_id': 7,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [7],
'message': 'Redecentralization of the Web',
'observation_id': 8,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [8],
'message': 'Redecentralization of the Web',
'observation_id': 9,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [9],
'message': 'Redecentralization of the Web',
'observation_id': 10,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [10],
'message': 'Redecentralization of the Web',
'observation_id': 11,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [11],
'message': 'Redecentralization of the Web',
'observation_id': 12,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [12],
'message': 'Redecentralization of the Web',
'observation_id': 13,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [13],
'message': 'Redecentralization of the Web',
'observation_id': 14,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [14],
'message': 'Redecentralization of the Web',
'observation_id': 15,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [15],
'message': 'Redecentralization of the Web',
'observation_id': 16,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [16],
'message': 'Redecentralization of the Web',
'observation_id': 17,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [17],
'message': 'Redecentralization of the Web',
'observation_id': 18,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [18],
'message': 'Redecentralization of the Web',
'observation_id': 19,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [19],
'message': 'Redecentralization of the Web',
'observation_id': 20,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [20],
'message': 'Redecentralization of the Web',
'observation_id': 21,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [21],
'message': 'Redecentralization of the Web',
'observation_id': 22,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [22],
'message': 'Redecentralization of the Web',
'observation_id': 23,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [23],
'message': 'Redecentralization of the Web',
'observation_id': 24,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [24],
'message': 'Redecentralization of the Web',
'observation_id': 25,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [25],
'message': 'Redecentralization of the Web',
'observation_id': 26,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [26],
'message': 'Redecentralization of the Web',
'observation_id': 27,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [27],
'message': 'Redecentralization of the Web',
'observation_id': 28,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [28],
'message': 'Redecentralization of the Web',
'observation_id': 29,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [29],
'message': 'Redecentralization of the Web',
'observation_id': 30,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [30],
'message': 'Redecentralization of the Web',
'observation_id': 31,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [31],
'message': 'Redecentralization of the Web',
'observation_id': 32,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [32],
'message': 'Redecentralization of the Web',
'observation_id': 33,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [33],
'message': 'Redecentralization of the Web',
'observation_id': 34,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [34],
'message': 'Redecentralization of the Web',
'observation_id': 35,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [35],
'message': 'Redecentralization of the Web',
'observation_id': 36,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [36],
'message': 'Redecentralization of the Web',
'observation_id': 37,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [37],
'message': 'Redecentralization of the Web',
'observation_id': 38,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [38],
'message': 'Redecentralization of the Web',
'observation_id': 39,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [39],
'message': 'Redecentralization of the Web',
'observation_id': 40,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [40],
'message': 'Redecentralization of the Web',
'observation_id': 41,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [41],
'message': 'Redecentralization of the Web',
'observation_id': 42,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [42],
'message': 'Redecentralization of the Web',
'observation_id': 43,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [43],
'message': 'Redecentralization of the Web',
'observation_id': 44,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [44],
'message': 'Redecentralization of the Web',
'observation_id': 45,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [45],
'message': 'Redecentralization of the Web',
'observation_id': 46,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [46],
'message': 'Redecentralization of the Web',
'observation_id': 47,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [47],
'message': 'Redecentralization of the Web',
'observation_id': 48,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [48],
'message': 'Redecentralization of the Web',
'observation_id': 49,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [49],
'message': 'Redecentralization of the Web',
'observation_id': 50,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [50],
'message': 'Redecentralization of the Web',
'observation_id': 51,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [51],
'message': 'Redecentralization of the Web',
'observation_id': 52,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [52],
'message': 'Redecentralization of the Web',
'observation_id': 53,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [53],
'message': 'Redecentralization of the Web',
'observation_id': 54,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [54],
'message': 'Redecentralization of the Web',
'observation_id': 55,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [55],
'message': 'Redecentralization of the Web',
'observation_id': 56,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [56],
'message': 'Redecentralization of the Web',
'observation_id': 57,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [57],
'message': 'Redecentralization of the Web',
'observation_id': 58,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [58],
'message': 'Redecentralization of the Web',
'observation_id': 59,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [59],
'message': 'Redecentralization of the Web',
'observation_id': 60,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [60],
'message': 'Redecentralization of the Web',
'observation_id': 61,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [61],
'message': 'Redecentralization of the Web',
'observation_id': 62,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [62],
'message': 'Redecentralization of the Web',
'observation_id': 63,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [63],
'message': 'Redecentralization of the Web',
'observation_id': 64,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [64],
'message': 'Redecentralization of the Web',
'observation_id': 65,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [65],
'message': 'Redecentralization of the Web',
'observation_id': 66,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [66],
'message': 'Redecentralization of the Web',
'observation_id': 67,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [67],
'message': 'Redecentralization of the Web',
'observation_id': 68,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [68],
'message': 'Redecentralization of the Web',
'observation_id': 69,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [69],
'message': 'Redecentralization of the Web',
'observation_id': 70,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [70],
'message': 'Redecentralization of the Web',
'observation_id': 71,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [71],
'message': 'Redecentralization of the Web',
'observation_id': 72,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [72],
'message': 'Redecentralization of the Web',
'observation_id': 73,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [73],
'message': 'Redecentralization of the Web',
'observation_id': 74,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [74],
'message': 'Redecentralization of the Web',
'observation_id': 75,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [75],
'message': 'Redecentralization of the Web',
'observation_id': 76,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [76],
'message': 'Redecentralization of the Web',
'observation_id': 77,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [77],
'message': 'Redecentralization of the Web',
'observation_id': 78,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [78],
'message': 'Redecentralization of the Web',
'observation_id': 79,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [79],
'message': 'Redecentralization of the Web',
'observation_id': 80,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [80],
'message': 'Redecentralization of the Web',
'observation_id': 81,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [81],
'message': 'Redecentralization of the Web',
'observation_id': 82,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [82],
'message': 'Redecentralization of the Web',
'observation_id': 83,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [83],
'message': 'Redecentralization of the Web',
'observation_id': 84,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [84],
'message': 'Redecentralization of the Web',
'observation_id': 85,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [85],
'message': 'Redecentralization of the Web',
'observation_id': 86,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [86],
'message': 'Redecentralization of the Web',
'observation_id': 87,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [87],
'message': 'Redecentralization of the Web',
'observation_id': 88,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [88],
'message': 'Redecentralization of the Web',
'observation_id': 89,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [89],
'message': 'Redecentralization of the Web',
'observation_id': 90,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [90],
'message': 'Redecentralization of the Web',
'observation_id': 91,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [91],
'message': 'Redecentralization of the Web',
'observation_id': 92,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [92],
'message': 'Redecentralization of the Web',
'observation_id': 93,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [93],
'message': 'Redecentralization of the Web',
'observation_id': 94,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [94],
'message': 'Redecentralization of the Web',
'observation_id': 95,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [95],
'message': 'Redecentralization of the Web',
'observation_id': 96,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [96],
'message': 'Redecentralization of the Web',
'observation_id': 97,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [97],
'message': 'Redecentralization of the Web',
'observation_id': 98,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [98],
'message': 'Redecentralization of the Web',
'observation_id': 99,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [99],
'message': 'Redecentralization of the Web',
'observation_id': 100,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [100],
'message': 'Redecentralization of the Web',
'observation_id': 101,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [101],
'message': 'Redecentralization of the Web',
'observation_id': 102,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [102],
'message': 'Redecentralization of the Web',
'observation_id': 103,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [103],
'message': 'Redecentralization of the Web',
'observation_id': 104,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [104],
'message': 'Redecentralization of the Web',
'observation_id': 105,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [105],
'message': 'Redecentralization of the Web',
'observation_id': 106,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [106],
'message': 'Redecentralization of the Web',
'observation_id': 107,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [107],
'message': 'Redecentralization of the Web',
'observation_id': 108,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [108],
'message': 'Redecentralization of the Web',
'observation_id': 109,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [109],
'message': 'Redecentralization of the Web',
'observation_id': 110,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [110],
'message': 'Redecentralization of the Web',
'observation_id': 111,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [111],
'message': 'Redecentralization of the Web',
'observation_id': 112,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [112],
'message': 'Redecentralization of the Web',
'observation_id': 113,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [113],
'message': 'Redecentralization of the Web',
'observation_id': 114,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [114],
'message': 'Redecentralization of the Web',
'observation_id': 115,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [115],
'message': 'Redecentralization of the Web',
'observation_id': 116,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [116],
'message': 'Redecentralization of the Web',
'observation_id': 117,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [117],
'message': 'Redecentralization of the Web',
'observation_id': 118,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [118],
'message': 'Redecentralization of the Web',
'observation_id': 119,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [119],
'message': 'Redecentralization of the Web',
'observation_id': 120,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [120],
'message': 'Redecentralization of the Web',
'observation_id': 121,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [121],
'message': 'Redecentralization of the Web',
'observation_id': 122,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [122],
'message': 'Redecentralization of the Web',
'observation_id': 123,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [123],
'message': 'Redecentralization of the Web',
'observation_id': 124,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [124],
'message': 'Redecentralization of the Web',
'observation_id': 125,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [125],
'message': 'Redecentralization of the Web',
'observation_id': 126,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [126],
'message': 'Redecentralization of the Web',
'observation_id': 127,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [127],
'message': 'Redecentralization of the Web',
'observation_id': 128,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [128],
'message': 'Redecentralization of the Web',
'observation_id': 129,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [129],
'message': 'Redecentralization of the Web',
'observation_id': 130,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [130],
'message': 'Redecentralization of the Web',
'observation_id': 131,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [131],
'message': 'Redecentralization of the Web',
'observation_id': 132,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [132],
'message': 'Redecentralization of the Web',
'observation_id': 133,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [133],
'message': 'Redecentralization of the Web',
'observation_id': 134,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [134],
'message': 'Redecentralization of the Web',
'observation_id': 135,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [135],
'message': 'Redecentralization of the Web',
'observation_id': 136,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [136],
'message': 'Redecentralization of the Web',
'observation_id': 137,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [137],
'message': 'Redecentralization of the Web',
'observation_id': 138,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [138],
'message': 'Redecentralization of the Web',
'observation_id': 139,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [139],
'message': 'Redecentralization of the Web',
'observation_id': 140,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [140],
'message': 'Redecentralization of the Web',
'observation_id': 141,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [141],
'message': 'Redecentralization of the Web',
'observation_id': 142,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [142],
'message': 'Redecentralization of the Web',
'observation_id': 143,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [143],
'message': 'Redecentralization of the Web',
'observation_id': 144,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [144],
'message': 'Redecentralization of the Web',
'observation_id': 145,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [145],
'message': 'Redecentralization of the Web',
'observation_id': 146,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [146],
'message': 'Redecentralization of the Web',
'observation_id': 147,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [147],
'message': 'Redecentralization of the Web',
'observation_id': 148,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [148],
'message': 'Redecentralization of the Web',
'observation_id': 149,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [149],
'message': 'Redecentralization of the Web',
'observation_id': 150,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [150],
'message': 'Redecentralization of the Web',
'observation_id': 151,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [151],
'message': 'Redecentralization of the Web',
'observation_id': 152,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [152],
'message': 'Redecentralization of the Web',
'observation_id': 153,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [153],
'message': 'Redecentralization of the Web',
'observation_id': 154,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [154],
'message': 'Redecentralization of the Web',
'observation_id': 155,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [155],
'message': 'Redecentralization of the Web',
'observation_id': 156,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [156],
'message': 'Redecentralization of the Web',
'observation_id': 157,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [157],
'message': 'Redecentralization of the Web',
'observation_id': 158,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [158],
'message': 'Redecentralization of the Web',
'observation_id': 159,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [159],
'message': 'Redecentralization of the Web',
'observation_id': 160,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [160],
'message': 'Redecentralization of the Web',
'observation_id': 161,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [161],
'message': 'Redecentralization of the Web',
'observation_id': 162,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [162],
'message': 'Redecentralization of the Web',
'observation_id': 163,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [163],
'message': 'Redecentralization of the Web',
'observation_id': 164,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [164],
'message': 'Redecentralization of the Web',
'observation_id': 165,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [165],
'message': 'Redecentralization of the Web',
'observation_id': 166,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [166],
'message': 'Redecentralization of the Web',
'observation_id': 167,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [167],
'message': 'Redecentralization of the Web',
'observation_id': 168,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [168],
'message': 'Redecentralization of the Web',
'observation_id': 169,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [169],
'message': 'Redecentralization of the Web',
'observation_id': 170,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [170],
'message': 'Redecentralization of the Web',
'observation_id': 171,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [171],
'message': 'Redecentralization of the Web',
'observation_id': 172,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [172],
'message': 'Redecentralization of the Web',
'observation_id': 173,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [173],
'message': 'Redecentralization of the Web',
'observation_id': 174,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [174],
'message': 'Redecentralization of the Web',
'observation_id': 175,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [175],
'message': 'Redecentralization of the Web',
'observation_id': 176,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [176],
'message': 'Redecentralization of the Web',
'observation_id': 177,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [177],
'message': 'Redecentralization of the Web',
'observation_id': 178,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [178],
'message': 'Redecentralization of the Web',
'observation_id': 179,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [179],
'message': 'Redecentralization of the Web',
'observation_id': 180,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [180],
'message': 'Redecentralization of the Web',
'observation_id': 181,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [181],
'message': 'Redecentralization of the Web',
'observation_id': 182,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [182],
'message': 'Redecentralization of the Web',
'observation_id': 183,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [183],
'message': 'Redecentralization of the Web',
'observation_id': 184,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [184],
'message': 'Redecentralization of the Web',
'observation_id': 185,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [185],
'message': 'Redecentralization of the Web',
'observation_id': 186,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [186],
'message': 'Redecentralization of the Web',
'observation_id': 187,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [187],
'message': 'Redecentralization of the Web',
'observation_id': 188,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [188],
'message': 'Redecentralization of the Web',
'observation_id': 189,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [189],
'message': 'Redecentralization of the Web',
'observation_id': 190,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [190],
'message': 'Redecentralization of the Web',
'observation_id': 191,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [191],
'message': 'Redecentralization of the Web',
'observation_id': 192,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [192],
'message': 'Redecentralization of the Web',
'observation_id': 193,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [193],
'message': 'Redecentralization of the Web',
'observation_id': 194,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [194],
'message': 'Redecentralization of the Web',
'observation_id': 195,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [195],
'message': 'Redecentralization of the Web',
'observation_id': 196,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [196],
'message': 'Redecentralization of the Web',
'observation_id': 197,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [197],
'message': 'Redecentralization of the Web',
'observation_id': 198,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [198],
'message': 'Redecentralization of the Web',
'observation_id': 199,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [199],
'message': 'Redecentralization of the Web',
'observation_id': 200,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [200],
'message': 'Redecentralization of the Web',
'observation_id': 201,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [201],
'message': 'Redecentralization of the Web',
'observation_id': 202,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [202],
'message': 'Redecentralization of the Web',
'observation_id': 203,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [203],
'message': 'Redecentralization of the Web',
'observation_id': 204,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [204],
'message': 'Redecentralization of the Web',
'observation_id': 205,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [205],
'message': 'Redecentralization of the Web',
'observation_id': 206,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [206],
'message': 'Redecentralization of the Web',
'observation_id': 207,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [207],
'message': 'Redecentralization of the Web',
'observation_id': 208,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [208],
'message': 'Redecentralization of the Web',
'observation_id': 209,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [209],
'message': 'Redecentralization of the Web',
'observation_id': 210,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [210],
'message': 'Redecentralization of the Web',
'observation_id': 211,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [211],
'message': 'Redecentralization of the Web',
'observation_id': 212,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [212],
'message': 'Redecentralization of the Web',
'observation_id': 213,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [213],
'message': 'Redecentralization of the Web',
'observation_id': 214,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [214],
'message': 'Redecentralization of the Web',
'observation_id': 215,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [215],
'message': 'Redecentralization of the Web',
'observation_id': 216,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [216],
'message': 'Redecentralization of the Web',
'observation_id': 217,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [217],
'message': 'Redecentralization of the Web',
'observation_id': 218,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [218],
'message': 'Redecentralization of the Web',
'observation_id': 219,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [219],
'message': 'Redecentralization of the Web',
'observation_id': 220,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [220],
'message': 'Redecentralization of the Web',
'observation_id': 221,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [221],
'message': 'Redecentralization of the Web',
'observation_id': 222,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [222],
'message': 'Redecentralization of the Web',
'observation_id': 223,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [223],
'message': 'Redecentralization of the Web',
'observation_id': 224,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [224],
'message': 'Redecentralization of the Web',
'observation_id': 225,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [225],
'message': 'Redecentralization of the Web',
'observation_id': 226,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [226],
'message': 'Redecentralization of the Web',
'observation_id': 227,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [227],
'message': 'Redecentralization of the Web',
'observation_id': 228,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [228],
'message': 'Redecentralization of the Web',
'observation_id': 229,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [229],
'message': 'Redecentralization of the Web',
'observation_id': 230,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [230],
'message': 'Redecentralization of the Web',
'observation_id': 231,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [231],
'message': 'Redecentralization of the Web',
'observation_id': 232,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [232],
'message': 'Redecentralization of the Web',
'observation_id': 233,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [233],
'message': 'Redecentralization of the Web',
'observation_id': 234,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [234],
'message': 'Redecentralization of the Web',
'observation_id': 235,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [235],
'message': 'Redecentralization of the Web',
'observation_id': 236,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [236],
'message': 'Redecentralization of the Web',
'observation_id': 237,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [237],
'message': 'Redecentralization of the Web',
'observation_id': 238,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [238],
'message': 'Redecentralization of the Web',
'observation_id': 239,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [239],
'message': 'Redecentralization of the Web',
'observation_id': 240,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [240],
'message': 'Redecentralization of the Web',
'observation_id': 241,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [241],
'message': 'Redecentralization of the Web',
'observation_id': 242,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [242],
'message': 'Redecentralization of the Web',
'observation_id': 243,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [243],
'message': 'Redecentralization of the Web',
'observation_id': 244,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [244],
'message': 'Redecentralization of the Web',
'observation_id': 245,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [245],
'message': 'Redecentralization of the Web',
'observation_id': 246,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [246],
'message': 'Redecentralization of the Web',
'observation_id': 247,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [247],
'message': 'Redecentralization of the Web',
'observation_id': 248,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [248],
'message': 'Redecentralization of the Web',
'observation_id': 249,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [249],
'message': 'Redecentralization of the Web',
'observation_id': 250,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [250],
'message': 'Redecentralization of the Web',
'observation_id': 251,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [251],
'message': 'Redecentralization of the Web',
'observation_id': 252,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [252],
'message': 'Redecentralization of the Web',
'observation_id': 253,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [253],
'message': 'Redecentralization of the Web',
'observation_id': 254,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [254],
'message': 'Redecentralization of the Web',
'observation_id': 255,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [255],
'message': 'Redecentralization of the Web',
'observation_id': 256,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [256],
'message': 'Redecentralization of the Web',
'observation_id': 257,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [257],
'message': 'Redecentralization of the Web',
'observation_id': 258,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [258],
'message': 'Redecentralization of the Web',
'observation_id': 259,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [259],
'message': 'Redecentralization of the Web',
'observation_id': 260,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [260],
'message': 'Redecentralization of the Web',
'observation_id': 261,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [261],
'message': 'Redecentralization of the Web',
'observation_id': 262,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [262],
'message': 'Redecentralization of the Web',
'observation_id': 263,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [263],
'message': 'Redecentralization of the Web',
'observation_id': 264,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [264],
'message': 'Redecentralization of the Web',
'observation_id': 265,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [265],
'message': 'Redecentralization of the Web',
'observation_id': 266,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [266],
'message': 'Redecentralization of the Web',
'observation_id': 267,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [267],
'message': 'Redecentralization of the Web',
'observation_id': 268,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [268],
'message': 'Redecentralization of the Web',
'observation_id': 269,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [269],
'message': 'Redecentralization of the Web',
'observation_id': 270,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [270],
'message': 'Redecentralization of the Web',
'observation_id': 271,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [271],
'message': 'Redecentralization of the Web',
'observation_id': 272,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [272],
'message': 'Redecentralization of the Web',
'observation_id': 273,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [273],
'message': 'Redecentralization of the Web',
'observation_id': 274,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [274],
'message': 'Redecentralization of the Web',
'observation_id': 275,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [275],
'message': 'Redecentralization of the Web',
'observation_id': 276,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [276],
'message': 'Redecentralization of the Web',
'observation_id': 277,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [277],
'message': 'Redecentralization of the Web',
'observation_id': 278,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [278],
'message': 'Redecentralization of the Web',
'observation_id': 279,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [279],
'message': 'Redecentralization of the Web',
'observation_id': 280,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [280],
'message': 'Redecentralization of the Web',
'observation_id': 281,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [281],
'message': 'Redecentralization of the Web',
'observation_id': 282,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [282],
'message': 'Redecentralization of the Web',
'observation_id': 283,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [283],
'message': 'Redecentralization of the Web',
'observation_id': 284,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [284],
'message': 'Redecentralization of the Web',
'observation_id': 285,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [285],
'message': 'Redecentralization of the Web',
'observation_id': 286,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [286],
'message': 'Redecentralization of the Web',
'observation_id': 287,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [287],
'message': 'Redecentralization of the Web',
'observation_id': 288,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [288],
'message': 'Redecentralization of the Web',
'observation_id': 289,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [289],
'message': 'Redecentralization of the Web',
'observation_id': 290,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [290],
'message': 'Redecentralization of the Web',
'observation_id': 291,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [291],
'message': 'Redecentralization of the Web',
'observation_id': 292,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [292],
'message': 'Redecentralization of the Web',
'observation_id': 293,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [293],
'message': 'Redecentralization of the Web',
'observation_id': 294,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [294],
'message': 'Redecentralization of the Web',
'observation_id': 295,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [295],
'message': 'Redecentralization of the Web',
'observation_id': 296,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [296],
'message': 'Redecentralization of the Web',
'observation_id': 297,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [297],
'message': 'Redecentralization of the Web',
'observation_id': 298,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [298],
'message': 'Redecentralization of the Web',
'observation_id': 299,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [299],
'message': 'Redecentralization of the Web',
'observation_id': 300,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [300],
'message': 'Redecentralization of the Web',
'observation_id': 301,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [301],
'message': 'Redecentralization of the Web',
'observation_id': 302,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [302],
'message': 'Redecentralization of the Web',
'observation_id': 303,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [303],
'message': 'Redecentralization of the Web',
'observation_id': 304,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [304],
'message': 'Redecentralization of the Web',
'observation_id': 305,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [305],
'message': 'Redecentralization of the Web',
'observation_id': 306,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [306],
'message': 'Redecentralization of the Web',
'observation_id': 307,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [307],
'message': 'Redecentralization of the Web',
'observation_id': 308,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [308],
'message': 'Redecentralization of the Web',
'observation_id': 309,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [309],
'message': 'Redecentralization of the Web',
'observation_id': 310,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [310],
'message': 'Redecentralization of the Web',
'observation_id': 311,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [311],
'message': 'Redecentralization of the Web',
'observation_id': 312,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [312],
'message': 'Redecentralization of the Web',
'observation_id': 313,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [313],
'message': 'Redecentralization of the Web',
'observation_id': 314,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [314],
'message': 'Redecentralization of the Web',
'observation_id': 315,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [315],
'message': 'Redecentralization of the Web',
'observation_id': 316,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [316],
'message': 'Redecentralization of the Web',
'observation_id': 317,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [317],
'message': 'Redecentralization of the Web',
'observation_id': 318,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [318],
'message': 'Redecentralization of the Web',
'observation_id': 319,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [319],
'message': 'Redecentralization of the Web',
'observation_id': 320,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [320],
'message': 'Redecentralization of the Web',
'observation_id': 321,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [321],
'message': 'Redecentralization of the Web',
'observation_id': 322,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [322],
'message': 'Redecentralization of the Web',
'observation_id': 323,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [323],
'message': 'Redecentralization of the Web',
'observation_id': 324,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [324],
'message': 'Redecentralization of the Web',
'observation_id': 325,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [325],
'message': 'Redecentralization of the Web',
'observation_id': 326,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [326],
'message': 'Redecentralization of the Web',
'observation_id': 327,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [327],
'message': 'Redecentralization of the Web',
'observation_id': 328,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [328],
'message': 'Redecentralization of the Web',
'observation_id': 329,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [329],
'message': 'Redecentralization of the Web',
'observation_id': 330,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [330],
'message': 'Redecentralization of the Web',
'observation_id': 331,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [331],
'message': 'Redecentralization of the Web',
'observation_id': 332,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [332],
'message': 'Redecentralization of the Web',
'observation_id': 333,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [333],
'message': 'Redecentralization of the Web',
'observation_id': 334,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [334],
'message': 'Redecentralization of the Web',
'observation_id': 335,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [335],
'message': 'Redecentralization of the Web',
'observation_id': 336,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [336],
'message': 'Redecentralization of the Web',
'observation_id': 337,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [337],
'message': 'Redecentralization of the Web',
'observation_id': 338,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [338],
'message': 'Redecentralization of the Web',
'observation_id': 339,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [339],
'message': 'Redecentralization of the Web',
'observation_id': 340,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [340],
'message': 'Redecentralization of the Web',
'observation_id': 341,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [341],
'message': 'Redecentralization of the Web',
'observation_id': 342,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [342],
'message': 'Redecentralization of the Web',
'observation_id': 343,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [343],
'message': 'Redecentralization of the Web',
'observation_id': 344,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [344],
'message': 'Redecentralization of the Web',
'observation_id': 345,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [345],
'message': 'Redecentralization of the Web',
'observation_id': 346,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [346],
'message': 'Redecentralization of the Web',
'observation_id': 347,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [347],
'message': 'Redecentralization of the Web',
'observation_id': 348,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [348],
'message': 'Redecentralization of the Web',
'observation_id': 349,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [349],
'message': 'Redecentralization of the Web',
'observation_id': 350,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [350],
'message': 'Redecentralization of the Web',
'observation_id': 351,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [351],
'message': 'Redecentralization of the Web',
'observation_id': 352,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [352],
'message': 'Redecentralization of the Web',
'observation_id': 353,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [353],
'message': 'Redecentralization of the Web',
'observation_id': 354,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [354],
'message': 'Redecentralization of the Web',
'observation_id': 355,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [355],
'message': 'Redecentralization of the Web',
'observation_id': 356,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [356],
'message': 'Redecentralization of the Web',
'observation_id': 357,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [357],
'message': 'Redecentralization of the Web',
'observation_id': 358,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [358],
'message': 'Redecentralization of the Web',
'observation_id': 359,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [359],
'message': 'Redecentralization of the Web',
'observation_id': 360,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [360],
'message': 'Redecentralization of the Web',
'observation_id': 361,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [361],
'message': 'Redecentralization of the Web',
'observation_id': 362,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [362],
'message': 'Redecentralization of the Web',
'observation_id': 363,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [363],
'message': 'Redecentralization of the Web',
'observation_id': 364,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [364],
'message': 'Redecentralization of the Web',
'observation_id': 365,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [365],
'message': 'Redecentralization of the Web',
'observation_id': 366,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [366],
'message': 'Redecentralization of the Web',
'observation_id': 367,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [367],
'message': 'Redecentralization of the Web',
'observation_id': 368,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [368],
'message': 'Redecentralization of the Web',
'observation_id': 369,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [369],
'message': 'Redecentralization of the Web',
'observation_id': 370,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [370],
'message': 'Redecentralization of the Web',
'observation_id': 371,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [371],
'message': 'Redecentralization of the Web',
'observation_id': 372,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [372],
'message': 'Redecentralization of the Web',
'observation_id': 373,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [373],
'message': 'Redecentralization of the Web',
'observation_id': 374,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [374],
'message': 'Redecentralization of the Web',
'observation_id': 375,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [375],
'message': 'Redecentralization of the Web',
'observation_id': 376,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [376],
'message': 'Redecentralization of the Web',
'observation_id': 377,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [377],
'message': 'Redecentralization of the Web',
'observation_id': 378,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [378],
'message': 'Redecentralization of the Web',
'observation_id': 379,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [379],
'message': 'Redecentralization of the Web',
'observation_id': 380,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [380],
'message': 'Redecentralization of the Web',
'observation_id': 381,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [381],
'message': 'Redecentralization of the Web',
'observation_id': 382,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [382],
'message': 'Redecentralization of the Web',
'observation_id': 383,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [383],
'message': 'Redecentralization of the Web',
'observation_id': 384,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [384],
'message': 'Redecentralization of the Web',
'observation_id': 385,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [385],
'message': 'Redecentralization of the Web',
'observation_id': 386,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [386],
'message': 'Redecentralization of the Web',
'observation_id': 387,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [387],
'message': 'Redecentralization of the Web',
'observation_id': 388,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [388],
'message': 'Redecentralization of the Web',
'observation_id': 389,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [389],
'message': 'Redecentralization of the Web',
'observation_id': 390,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [390],
'message': 'Redecentralization of the Web',
'observation_id': 391,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [391],
'message': 'Redecentralization of the Web',
'observation_id': 392,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [392],
'message': 'Redecentralization of the Web',
'observation_id': 393,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [393],
'message': 'Redecentralization of the Web',
'observation_id': 394,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [394],
'message': 'Redecentralization of the Web',
'observation_id': 395,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [395],
'message': 'Redecentralization of the Web',
'observation_id': 396,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [396],
'message': 'Redecentralization of the Web',
'observation_id': 397,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [397],
'message': 'Redecentralization of the Web',
'observation_id': 398,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [398],
'message': 'Redecentralization of the Web',
'observation_id': 399,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [399],
'message': 'Redecentralization of the Web',
'observation_id': 400,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [400],
'message': 'Redecentralization of the Web',
'observation_id': 401,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [401],
'message': 'Redecentralization of the Web',
'observation_id': 402,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [402],
'message': 'Redecentralization of the Web',
'observation_id': 403,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [403],
'message': 'Redecentralization of the Web',
'observation_id': 404,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [404],
'message': 'Redecentralization of the Web',
'observation_id': 405,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [405],
'message': 'Redecentralization of the Web',
'observation_id': 406,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [406],
'message': 'Redecentralization of the Web',
'observation_id': 407,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [407],
'message': 'Redecentralization of the Web',
'observation_id': 408,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [408],
'message': 'Redecentralization of the Web',
'observation_id': 409,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [409],
'message': 'Redecentralization of the Web',
'observation_id': 410,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [410],
'message': 'Redecentralization of the Web',
'observation_id': 411,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [411],
'message': 'Redecentralization of the Web',
'observation_id': 412,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [412],
'message': 'Redecentralization of the Web',
'observation_id': 413,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [413],
'message': 'Redecentralization of the Web',
'observation_id': 414,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [414],
'message': 'Redecentralization of the Web',
'observation_id': 415,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [415],
'message': 'Redecentralization of the Web',
'observation_id': 416,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [416],
'message': 'Redecentralization of the Web',
'observation_id': 417,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [417],
'message': 'Redecentralization of the Web',
'observation_id': 418,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [418],
'message': 'Redecentralization of the Web',
'observation_id': 419,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [419],
'message': 'Redecentralization of the Web',
'observation_id': 420,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [420],
'message': 'Redecentralization of the Web',
'observation_id': 421,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [421],
'message': 'Redecentralization of the Web',
'observation_id': 422,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [422],
'message': 'Redecentralization of the Web',
'observation_id': 423,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [423],
'message': 'Redecentralization of the Web',
'observation_id': 424,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [424],
'message': 'Redecentralization of the Web',
'observation_id': 425,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [425],
'message': 'Redecentralization of the Web',
'observation_id': 426,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [426],
'message': 'Redecentralization of the Web',
'observation_id': 427,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [427],
'message': 'Redecentralization of the Web',
'observation_id': 428,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [428],
'message': 'Redecentralization of the Web',
'observation_id': 429,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [429],
'message': 'Redecentralization of the Web',
'observation_id': 430,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [430],
'message': 'Redecentralization of the Web',
'observation_id': 431,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [431],
'message': 'Redecentralization of the Web',
'observation_id': 432,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [432],
'message': 'Redecentralization of the Web',
'observation_id': 433,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [433],
'message': 'Redecentralization of the Web',
'observation_id': 434,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [434],
'message': 'Redecentralization of the Web',
'observation_id': 435,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [435],
'message': 'Redecentralization of the Web',
'observation_id': 436,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [436],
'message': 'Redecentralization of the Web',
'observation_id': 437,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [437],
'message': 'Redecentralization of the Web',
'observation_id': 438,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [438],
'message': 'Redecentralization of the Web',
'observation_id': 439,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [439],
'message': 'Redecentralization of the Web',
'observation_id': 440,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [440],
'message': 'Redecentralization of the Web',
'observation_id': 441,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [441],
'message': 'Redecentralization of the Web',
'observation_id': 442,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [442],
'message': 'Redecentralization of the Web',
'observation_id': 443,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [443],
'message': 'Redecentralization of the Web',
'observation_id': 444,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [444],
'message': 'Redecentralization of the Web',
'observation_id': 445,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [445],
'message': 'Redecentralization of the Web',
'observation_id': 446,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [446],
'message': 'Redecentralization of the Web',
'observation_id': 447,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [447],
'message': 'Redecentralization of the Web',
'observation_id': 448,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [448],
'message': 'Redecentralization of the Web',
'observation_id': 449,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [449],
'message': 'Redecentralization of the Web',
'observation_id': 450,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [450],
'message': 'Redecentralization of the Web',
'observation_id': 451,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [451],
'message': 'Redecentralization of the Web',
'observation_id': 452,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [452],
'message': 'Redecentralization of the Web',
'observation_id': 453,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [453],
'message': 'Redecentralization of the Web',
'observation_id': 454,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [454],
'message': 'Redecentralization of the Web',
'observation_id': 455,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [455],
'message': 'Redecentralization of the Web',
'observation_id': 456,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [456],
'message': 'Redecentralization of the Web',
'observation_id': 457,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [457],
'message': 'Redecentralization of the Web',
'observation_id': 458,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [458],
'message': 'Redecentralization of the Web',
'observation_id': 459,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [459],
'message': 'Redecentralization of the Web',
'observation_id': 460,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [460],
'message': 'Redecentralization of the Web',
'observation_id': 461,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [461],
'message': 'Redecentralization of the Web',
'observation_id': 462,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [462],
'message': 'Redecentralization of the Web',
'observation_id': 463,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [463],
'message': 'Redecentralization of the Web',
'observation_id': 464,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [464],
'message': 'Redecentralization of the Web',
'observation_id': 465,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [465],
'message': 'Redecentralization of the Web',
'observation_id': 466,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [466],
'message': 'Redecentralization of the Web',
'observation_id': 467,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [467],
'message': 'Redecentralization of the Web',
'observation_id': 468,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [468],
'message': 'Redecentralization of the Web',
'observation_id': 469,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [469],
'message': 'Redecentralization of the Web',
'observation_id': 470,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [470],
'message': 'Redecentralization of the Web',
'observation_id': 471,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [471],
'message': 'Redecentralization of the Web',
'observation_id': 472,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [472],
'message': 'Redecentralization of the Web',
'observation_id': 473,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [473],
'message': 'Redecentralization of the Web',
'observation_id': 474,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [474],
'message': 'Redecentralization of the Web',
'observation_id': 475,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [475],
'message': 'Redecentralization of the Web',
'observation_id': 476,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [476],
'message': 'Redecentralization of the Web',
'observation_id': 477,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [477],
'message': 'Redecentralization of the Web',
'observation_id': 478,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [478],
'message': 'Redecentralization of the Web',
'observation_id': 479,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [479],
'message': 'Redecentralization of the Web',
'observation_id': 480,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [480],
'message': 'Redecentralization of the Web',
'observation_id': 481,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [481],
'message': 'Redecentralization of the Web',
'observation_id': 482,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [482],
'message': 'Redecentralization of the Web',
'observation_id': 483,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [483],
'message': 'Redecentralization of the Web',
'observation_id': 484,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [484],
'message': 'Redecentralization of the Web',
'observation_id': 485,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [485],
'message': 'Redecentralization of the Web',
'observation_id': 486,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [486],
'message': 'Redecentralization of the Web',
'observation_id': 487,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [487],
'message': 'Redecentralization of the Web',
'observation_id': 488,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [488],
'message': 'Redecentralization of the Web',
'observation_id': 489,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [489],
'message': 'Redecentralization of the Web',
'observation_id': 490,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [490],
'message': 'Redecentralization of the Web',
'observation_id': 491,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [491],
'message': 'Redecentralization of the Web',
'observation_id': 492,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [492],
'message': 'Redecentralization of the Web',
'observation_id': 493,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [493],
'message': 'Redecentralization of the Web',
'observation_id': 494,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [494],
'message': 'Redecentralization of the Web',
'observation_id': 495,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [495],
'message': 'Redecentralization of the Web',
'observation_id': 496,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [496],
'message': 'Redecentralization of the Web',
'observation_id': 497,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [497],
'message': 'Redecentralization of the Web',
'observation_id': 498,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [498],
'message': 'Redecentralization of the Web',
'observation_id': 499,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [499],
'message': 'Redecentralization of the Web',
'observation_id': 500,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [500],
'message': 'Redecentralization of the Web',
'observation_id': 501,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [501],
'message': 'Redecentralization of the Web',
'observation_id': 502,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [502],
'message': 'Redecentralization of the Web',
'observation_id': 503,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [503],
'message': 'Redecentralization of the Web',
'observation_id': 504,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [504],
'message': 'Redecentralization of the Web',
'observation_id': 505,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [505],
'message': 'Redecentralization of the Web',
'observation_id': 506,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [506],
'message': 'Redecentralization of the Web',
'observation_id': 507,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [507],
'message': 'Redecentralization of the Web',
'observation_id': 508,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [508],
'message': 'Redecentralization of the Web',
'observation_id': 509,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [509],
'message': 'Redecentralization of the Web',
'observation_id': 510,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [510],
'message': 'Redecentralization of the Web',
'observation_id': 511,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [511],
'message': 'Redecentralization of the Web',
'observation_id': 512,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [512],
'message': 'Redecentralization of the Web',
'observation_id': 513,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [513],
'message': 'Redecentralization of the Web',
'observation_id': 514,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [514],
'message': 'Redecentralization of the Web',
'observation_id': 515,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [515],
'message': 'Redecentralization of the Web',
'observation_id': 516,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [516],
'message': 'Redecentralization of the Web',
'observation_id': 517,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [517],
'message': 'Redecentralization of the Web',
'observation_id': 518,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [518],
'message': 'Redecentralization of the Web',
'observation_id': 519,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [519],
'message': 'Redecentralization of the Web',
'observation_id': 520,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [520],
'message': 'Redecentralization of the Web',
'observation_id': 521,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [521],
'message': 'Redecentralization of the Web',
'observation_id': 522,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [522],
'message': 'Redecentralization of the Web',
'observation_id': 523,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [523],
'message': 'Redecentralization of the Web',
'observation_id': 524,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [524],
'message': 'Redecentralization of the Web',
'observation_id': 525,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [525],
'message': 'Redecentralization of the Web',
'observation_id': 526,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [526],
'message': 'Redecentralization of the Web',
'observation_id': 527,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [527],
'message': 'Redecentralization of the Web',
'observation_id': 528,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [528],
'message': 'Redecentralization of the Web',
'observation_id': 529,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [529],
'message': 'Redecentralization of the Web',
'observation_id': 530,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [530],
'message': 'Redecentralization of the Web',
'observation_id': 531,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [531],
'message': 'Redecentralization of the Web',
'observation_id': 532,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [532],
'message': 'Redecentralization of the Web',
'observation_id': 533,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [533],
'message': 'Redecentralization of the Web',
'observation_id': 534,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [534],
'message': 'Redecentralization of the Web',
'observation_id': 535,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [535],
'message': 'Redecentralization of the Web',
'observation_id': 536,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [536],
'message': 'Redecentralization of the Web',
'observation_id': 537,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [537],
'message': 'Redecentralization of the Web',
'observation_id': 538,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [538],
'message': 'Redecentralization of the Web',
'observation_id': 539,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [539],
'message': 'Redecentralization of the Web',
'observation_id': 540,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [540],
'message': 'Redecentralization of the Web',
'observation_id': 541,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [541],
'message': 'Redecentralization of the Web',
'observation_id': 542,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [542],
'message': 'Redecentralization of the Web',
'observation_id': 543,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [543],
'message': 'Redecentralization of the Web',
'observation_id': 544,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [544],
'message': 'Redecentralization of the Web',
'observation_id': 545,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [545],
'message': 'Redecentralization of the Web',
'observation_id': 546,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [546],
'message': 'Redecentralization of the Web',
'observation_id': 547,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [547],
'message': 'Redecentralization of the Web',
'observation_id': 548,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [548],
'message': 'Redecentralization of the Web',
'observation_id': 549,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [549],
'message': 'Redecentralization of the Web',
'observation_id': 550,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [550],
'message': 'Redecentralization of the Web',
'observation_id': 551,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [551],
'message': 'Redecentralization of the Web',
'observation_id': 552,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [552],
'message': 'Redecentralization of the Web',
'observation_id': 553,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [553],
'message': 'Redecentralization of the Web',
'observation_id': 554,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [554],
'message': 'Redecentralization of the Web',
'observation_id': 555,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [555],
'message': 'Redecentralization of the Web',
'observation_id': 556,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [556],
'message': 'Redecentralization of the Web',
'observation_id': 557,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [557],
'message': 'Redecentralization of the Web',
'observation_id': 558,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [558],
'message': 'Redecentralization of the Web',
'observation_id': 559,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [559],
'message': 'Redecentralization of the Web',
'observation_id': 560,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [560],
'message': 'Redecentralization of the Web',
'observation_id': 561,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [561],
'message': 'Redecentralization of the Web',
'observation_id': 562,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [562],
'message': 'Redecentralization of the Web',
'observation_id': 563,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [563],
'message': 'Redecentralization of the Web',
'observation_id': 564,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [564],
'message': 'Redecentralization of the Web',
'observation_id': 565,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [565],
'message': 'Redecentralization of the Web',
'observation_id': 566,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [566],
'message': 'Redecentralization of the Web',
'observation_id': 567,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [567],
'message': 'Redecentralization of the Web',
'observation_id': 568,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [568],
'message': 'Redecentralization of the Web',
'observation_id': 569,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [569],
'message': 'Redecentralization of the Web',
'observation_id': 570,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [570],
'message': 'Redecentralization of the Web',
'observation_id': 571,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [571],
'message': 'Redecentralization of the Web',
'observation_id': 572,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [572],
'message': 'Redecentralization of the Web',
'observation_id': 573,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [573],
'message': 'Redecentralization of the Web',
'observation_id': 574,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [574],
'message': 'Redecentralization of the Web',
'observation_id': 575,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [575],
'message': 'Redecentralization of the Web',
'observation_id': 576,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [576],
'message': 'Redecentralization of the Web',
'observation_id': 577,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [577],
'message': 'Redecentralization of the Web',
'observation_id': 578,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [578],
'message': 'Redecentralization of the Web',
'observation_id': 579,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [579],
'message': 'Redecentralization of the Web',
'observation_id': 580,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [580],
'message': 'Redecentralization of the Web',
'observation_id': 581,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [581],
'message': 'Redecentralization of the Web',
'observation_id': 582,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [582],
'message': 'Redecentralization of the Web',
'observation_id': 583,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [583],
'message': 'Redecentralization of the Web',
'observation_id': 584,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [584],
'message': 'Redecentralization of the Web',
'observation_id': 585,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [585],
'message': 'Redecentralization of the Web',
'observation_id': 586,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [586],
'message': 'Redecentralization of the Web',
'observation_id': 587,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [587],
'message': 'Redecentralization of the Web',
'observation_id': 588,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [588],
'message': 'Redecentralization of the Web',
'observation_id': 589,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [589],
'message': 'Redecentralization of the Web',
'observation_id': 590,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [590],
'message': 'Redecentralization of the Web',
'observation_id': 591,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [591],
'message': 'Redecentralization of the Web',
'observation_id': 592,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [592],
'message': 'Redecentralization of the Web',
'observation_id': 593,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [593],
'message': 'Redecentralization of the Web',
'observation_id': 594,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [594],
'message': 'Redecentralization of the Web',
'observation_id': 595,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [595],
'message': 'Redecentralization of the Web',
'observation_id': 596,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [596],
'message': 'Redecentralization of the Web',
'observation_id': 597,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [597],
'message': 'Redecentralization of the Web',
'observation_id': 598,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [598],
'message': 'Redecentralization of the Web',
'observation_id': 599,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [599],
'message': 'Redecentralization of the Web',
'observation_id': 600,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [600],
'message': 'Redecentralization of the Web',
'observation_id': 601,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [601],
'message': 'Redecentralization of the Web',
'observation_id': 602,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [602],
'message': 'Redecentralization of the Web',
'observation_id': 603,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [603],
'message': 'Redecentralization of the Web',
'observation_id': 604,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [604],
'message': 'Redecentralization of the Web',
'observation_id': 605,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [605],
'message': 'Redecentralization of the Web',
'observation_id': 606,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [606],
'message': 'Redecentralization of the Web',
'observation_id': 607,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [607],
'message': 'Redecentralization of the Web',
'observation_id': 608,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [608],
'message': 'Redecentralization of the Web',
'observation_id': 609,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [609],
'message': 'Redecentralization of the Web',
'observation_id': 610,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [610],
'message': 'Redecentralization of the Web',
'observation_id': 611,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [611],
'message': 'Redecentralization of the Web',
'observation_id': 612,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [612],
'message': 'Redecentralization of the Web',
'observation_id': 613,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [613],
'message': 'Redecentralization of the Web',
'observation_id': 614,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [614],
'message': 'Redecentralization of the Web',
'observation_id': 615,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [615],
'message': 'Redecentralization of the Web',
'observation_id': 616,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [616],
'message': 'Redecentralization of the Web',
'observation_id': 617,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [617],
'message': 'Redecentralization of the Web',
'observation_id': 618,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [618],
'message': 'Redecentralization of the Web',
'observation_id': 619,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [619],
'message': 'Redecentralization of the Web',
'observation_id': 620,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [620],
'message': 'Redecentralization of the Web',
'observation_id': 621,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [621],
'message': 'Redecentralization of the Web',
'observation_id': 622,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [622],
'message': 'Redecentralization of the Web',
'observation_id': 623,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [623],
'message': 'Redecentralization of the Web',
'observation_id': 624,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [624],
'message': 'Redecentralization of the Web',
'observation_id': 625,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [625],
'message': 'Redecentralization of the Web',
'observation_id': 626,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [626],
'message': 'Redecentralization of the Web',
'observation_id': 627,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [627],
'message': 'Redecentralization of the Web',
'observation_id': 628,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [628],
'message': 'Redecentralization of the Web',
'observation_id': 629,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [629],
'message': 'Redecentralization of the Web',
'observation_id': 630,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [630],
'message': 'Redecentralization of the Web',
'observation_id': 631,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [631],
'message': 'Redecentralization of the Web',
'observation_id': 632,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [632],
'message': 'Redecentralization of the Web',
'observation_id': 633,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [633],
'message': 'Redecentralization of the Web',
'observation_id': 634,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [634],
'message': 'Redecentralization of the Web',
'observation_id': 635,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [635],
'message': 'Redecentralization of the Web',
'observation_id': 636,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [636],
'message': 'Redecentralization of the Web',
'observation_id': 637,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [637],
'message': 'Redecentralization of the Web',
'observation_id': 638,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [638],
'message': 'Redecentralization of the Web',
'observation_id': 639,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [639],
'message': 'Redecentralization of the Web',
'observation_id': 640,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [640],
'message': 'Redecentralization of the Web',
'observation_id': 641,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [641],
'message': 'Redecentralization of the Web',
'observation_id': 642,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [642],
'message': 'Redecentralization of the Web',
'observation_id': 643,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [643],
'message': 'Redecentralization of the Web',
'observation_id': 644,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [644],
'message': 'Redecentralization of the Web',
'observation_id': 645,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [645],
'message': 'Redecentralization of the Web',
'observation_id': 646,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [646],
'message': 'Redecentralization of the Web',
'observation_id': 647,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [647],
'message': 'Redecentralization of the Web',
'observation_id': 648,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [648],
'message': 'Redecentralization of the Web',
'observation_id': 649,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [649],
'message': 'Redecentralization of the Web',
'observation_id': 650,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [650],
'message': 'Redecentralization of the Web',
'observation_id': 651,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [651],
'message': 'Redecentralization of the Web',
'observation_id': 652,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [652],
'message': 'Redecentralization of the Web',
'observation_id': 653,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [653],
'message': 'Redecentralization of the Web',
'observation_id': 654,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [654],
'message': 'Redecentralization of the Web',
'observation_id': 655,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [655],
'message': 'Redecentralization of the Web',
'observation_id': 656,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [656],
'message': 'Redecentralization of the Web',
'observation_id': 657,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [657],
'message': 'Redecentralization of the Web',
'observation_id': 658,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [658],
'message': 'Redecentralization of the Web',
'observation_id': 659,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [659],
'message': 'Redecentralization of the Web',
'observation_id': 660,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [660],
'message': 'Redecentralization of the Web',
'observation_id': 661,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [661],
'message': 'Redecentralization of the Web',
'observation_id': 662,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [662],
'message': 'Redecentralization of the Web',
'observation_id': 663,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [663],
'message': 'Redecentralization of the Web',
'observation_id': 664,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [664],
'message': 'Redecentralization of the Web',
'observation_id': 665,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [665],
'message': 'Redecentralization of the Web',
'observation_id': 666,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [666],
'message': 'Redecentralization of the Web',
'observation_id': 667,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [667],
'message': 'Redecentralization of the Web',
'observation_id': 668,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [668],
'message': 'Redecentralization of the Web',
'observation_id': 669,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [669],
'message': 'Redecentralization of the Web',
'observation_id': 670,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [670],
'message': 'Redecentralization of the Web',
'observation_id': 671,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [671],
'message': 'Redecentralization of the Web',
'observation_id': 672,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [672],
'message': 'Redecentralization of the Web',
'observation_id': 673,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [673],
'message': 'Redecentralization of the Web',
'observation_id': 674,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [674],
'message': 'Redecentralization of the Web',
'observation_id': 675,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [675],
'message': 'Redecentralization of the Web',
'observation_id': 676,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [676],
'message': 'Redecentralization of the Web',
'observation_id': 677,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [677],
'message': 'Redecentralization of the Web',
'observation_id': 678,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [678],
'message': 'Redecentralization of the Web',
'observation_id': 679,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [679],
'message': 'Redecentralization of the Web',
'observation_id': 680,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [680],
'message': 'Redecentralization of the Web',
'observation_id': 681,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [681],
'message': 'Redecentralization of the Web',
'observation_id': 682,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [682],
'message': 'Redecentralization of the Web',
'observation_id': 683,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [683],
'message': 'Redecentralization of the Web',
'observation_id': 684,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [684],
'message': 'Redecentralization of the Web',
'observation_id': 685,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [685],
'message': 'Redecentralization of the Web',
'observation_id': 686,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [686],
'message': 'Redecentralization of the Web',
'observation_id': 687,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [687],
'message': 'Redecentralization of the Web',
'observation_id': 688,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [688],
'message': 'Redecentralization of the Web',
'observation_id': 689,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [689],
'message': 'Redecentralization of the Web',
'observation_id': 690,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [690],
'message': 'Redecentralization of the Web',
'observation_id': 691,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [691],
'message': 'Redecentralization of the Web',
'observation_id': 692,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [692],
'message': 'Redecentralization of the Web',
'observation_id': 693,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [693],
'message': 'Redecentralization of the Web',
'observation_id': 694,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [694],
'message': 'Redecentralization of the Web',
'observation_id': 695,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [695],
'message': 'Redecentralization of the Web',
'observation_id': 696,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [696],
'message': 'Redecentralization of the Web',
'observation_id': 697,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [697],
'message': 'Redecentralization of the Web',
'observation_id': 698,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [698],
'message': 'Redecentralization of the Web',
'observation_id': 699,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [699],
'message': 'Redecentralization of the Web',
'observation_id': 700,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [700],
'message': 'Redecentralization of the Web',
'observation_id': 701,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [701],
'message': 'Redecentralization of the Web',
'observation_id': 702,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [702],
'message': 'Redecentralization of the Web',
'observation_id': 703,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [703],
'message': 'Redecentralization of the Web',
'observation_id': 704,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [704],
'message': 'Redecentralization of the Web',
'observation_id': 705,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [705],
'message': 'Redecentralization of the Web',
'observation_id': 706,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [706],
'message': 'Redecentralization of the Web',
'observation_id': 707,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [707],
'message': 'Redecentralization of the Web',
'observation_id': 708,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [708],
'message': 'Redecentralization of the Web',
'observation_id': 709,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [709],
'message': 'Redecentralization of the Web',
'observation_id': 710,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [710],
'message': 'Redecentralization of the Web',
'observation_id': 711,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [711],
'message': 'Redecentralization of the Web',
'observation_id': 712,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [712],
'message': 'Redecentralization of the Web',
'observation_id': 713,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [713],
'message': 'Redecentralization of the Web',
'observation_id': 714,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [714],
'message': 'Redecentralization of the Web',
'observation_id': 715,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [715],
'message': 'Redecentralization of the Web',
'observation_id': 716,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [716],
'message': 'Redecentralization of the Web',
'observation_id': 717,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [717],
'message': 'Redecentralization of the Web',
'observation_id': 718,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [718],
'message': 'Redecentralization of the Web',
'observation_id': 719,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [719],
'message': 'Redecentralization of the Web',
'observation_id': 720,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [720],
'message': 'Redecentralization of the Web',
'observation_id': 721,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [721],
'message': 'Redecentralization of the Web',
'observation_id': 722,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [722],
'message': 'Redecentralization of the Web',
'observation_id': 723,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [723],
'message': 'Redecentralization of the Web',
'observation_id': 724,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [724],
'message': 'Redecentralization of the Web',
'observation_id': 725,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [725],
'message': 'Redecentralization of the Web',
'observation_id': 726,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [726],
'message': 'Redecentralization of the Web',
'observation_id': 727,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [727],
'message': 'Redecentralization of the Web',
'observation_id': 728,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [728],
'message': 'Redecentralization of the Web',
'observation_id': 729,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [729],
'message': 'Redecentralization of the Web',
'observation_id': 730,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [730],
'message': 'Redecentralization of the Web',
'observation_id': 731,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [731],
'message': 'Redecentralization of the Web',
'observation_id': 732,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [732],
'message': 'Redecentralization of the Web',
'observation_id': 733,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [733],
'message': 'Redecentralization of the Web',
'observation_id': 734,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [734],
'message': 'Redecentralization of the Web',
'observation_id': 735,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [735],
'message': 'Redecentralization of the Web',
'observation_id': 736,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [736],
'message': 'Redecentralization of the Web',
'observation_id': 737,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [737],
'message': 'Redecentralization of the Web',
'observation_id': 738,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [738],
'message': 'Redecentralization of the Web',
'observation_id': 739,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [739],
'message': 'Redecentralization of the Web',
'observation_id': 740,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [740],
'message': 'Redecentralization of the Web',
'observation_id': 741,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [741],
'message': 'Redecentralization of the Web',
'observation_id': 742,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [742],
'message': 'Redecentralization of the Web',
'observation_id': 743,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [743],
'message': 'Redecentralization of the Web',
'observation_id': 744,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [744],
'message': 'Redecentralization of the Web',
'observation_id': 745,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [745],
'message': 'Redecentralization of the Web',
'observation_id': 746,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [746],
'message': 'Redecentralization of the Web',
'observation_id': 747,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [747],
'message': 'Redecentralization of the Web',
'observation_id': 748,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [748],
'message': 'Redecentralization of the Web',
'observation_id': 749,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [749],
'message': 'Redecentralization of the Web',
'observation_id': 750,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [750],
'message': 'Redecentralization of the Web',
'observation_id': 751,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [751],
'message': 'Redecentralization of the Web',
'observation_id': 752,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [752],
'message': 'Redecentralization of the Web',
'observation_id': 753,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [753],
'message': 'Redecentralization of the Web',
'observation_id': 754,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [754],
'message': 'Redecentralization of the Web',
'observation_id': 755,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [755],
'message': 'Redecentralization of the Web',
'observation_id': 756,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [756],
'message': 'Redecentralization of the Web',
'observation_id': 757,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [757],
'message': 'Redecentralization of the Web',
'observation_id': 758,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [758],
'message': 'Redecentralization of the Web',
'observation_id': 759,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [759],
'message': 'Redecentralization of the Web',
'observation_id': 760,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [760],
'message': 'Redecentralization of the Web',
'observation_id': 761,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [761],
'message': 'Redecentralization of the Web',
'observation_id': 762,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [762],
'message': 'Redecentralization of the Web',
'observation_id': 763,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [763],
'message': 'Redecentralization of the Web',
'observation_id': 764,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [764],
'message': 'Redecentralization of the Web',
'observation_id': 765,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [765],
'message': 'Redecentralization of the Web',
'observation_id': 766,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [766],
'message': 'Redecentralization of the Web',
'observation_id': 767,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [767],
'message': 'Redecentralization of the Web',
'observation_id': 768,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [768],
'message': 'Redecentralization of the Web',
'observation_id': 769,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [769],
'message': 'Redecentralization of the Web',
'observation_id': 770,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [770],
'message': 'Redecentralization of the Web',
'observation_id': 771,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [771],
'message': 'Redecentralization of the Web',
'observation_id': 772,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [772],
'message': 'Redecentralization of the Web',
'observation_id': 773,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [773],
'message': 'Redecentralization of the Web',
'observation_id': 774,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [774],
'message': 'Redecentralization of the Web',
'observation_id': 775,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [775],
'message': 'Redecentralization of the Web',
'observation_id': 776,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [776],
'message': 'Redecentralization of the Web',
'observation_id': 777,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [777],
'message': 'Redecentralization of the Web',
'observation_id': 778,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [778],
'message': 'Redecentralization of the Web',
'observation_id': 779,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [779],
'message': 'Redecentralization of the Web',
'observation_id': 780,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [780],
'message': 'Redecentralization of the Web',
'observation_id': 781,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [781],
'message': 'Redecentralization of the Web',
'observation_id': 782,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [782],
'message': 'Redecentralization of the Web',
'observation_id': 783,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [783],
'message': 'Redecentralization of the Web',
'observation_id': 784,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [784],
'message': 'Redecentralization of the Web',
'observation_id': 785,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [785],
'message': 'Redecentralization of the Web',
'observation_id': 786,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [786],
'message': 'Redecentralization of the Web',
'observation_id': 787,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [787],
'message': 'Redecentralization of the Web',
'observation_id': 788,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [788],
'message': 'Redecentralization of the Web',
'observation_id': 789,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [789],
'message': 'Redecentralization of the Web',
'observation_id': 790,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [790],
'message': 'Redecentralization of the Web',
'observation_id': 791,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [791],
'message': 'Redecentralization of the Web',
'observation_id': 792,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [792],
'message': 'Redecentralization of the Web',
'observation_id': 793,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [793],
'message': 'Redecentralization of the Web',
'observation_id': 794,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [794],
'message': 'Redecentralization of the Web',
'observation_id': 795,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [795],
'message': 'Redecentralization of the Web',
'observation_id': 796,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [796],
'message': 'Redecentralization of the Web',
'observation_id': 797,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [797],
'message': 'Redecentralization of the Web',
'observation_id': 798,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [798],
'message': 'Redecentralization of the Web',
'observation_id': 799,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [799],
'message': 'Redecentralization of the Web',
'observation_id': 800,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [800],
'message': 'Redecentralization of the Web',
'observation_id': 801,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [801],
'message': 'Redecentralization of the Web',
'observation_id': 802,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [802],
'message': 'Redecentralization of the Web',
'observation_id': 803,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [803],
'message': 'Redecentralization of the Web',
'observation_id': 804,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [804],
'message': 'Redecentralization of the Web',
'observation_id': 805,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [805],
'message': 'Redecentralization of the Web',
'observation_id': 806,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [806],
'message': 'Redecentralization of the Web',
'observation_id': 807,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [807],
'message': 'Redecentralization of the Web',
'observation_id': 808,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [808],
'message': 'Redecentralization of the Web',
'observation_id': 809,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [809],
'message': 'Redecentralization of the Web',
'observation_id': 810,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [810],
'message': 'Redecentralization of the Web',
'observation_id': 811,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [811],
'message': 'Redecentralization of the Web',
'observation_id': 812,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [812],
'message': 'Redecentralization of the Web',
'observation_id': 813,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [813],
'message': 'Redecentralization of the Web',
'observation_id': 814,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [814],
'message': 'Redecentralization of the Web',
'observation_id': 815,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [815],
'message': 'Redecentralization of the Web',
'observation_id': 816,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [816],
'message': 'Redecentralization of the Web',
'observation_id': 817,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [817],
'message': 'Redecentralization of the Web',
'observation_id': 818,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [818],
'message': 'Redecentralization of the Web',
'observation_id': 819,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [819],
'message': 'Redecentralization of the Web',
'observation_id': 820,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [820],
'message': 'Redecentralization of the Web',
'observation_id': 821,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [821],
'message': 'Redecentralization of the Web',
'observation_id': 822,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [822],
'message': 'Redecentralization of the Web',
'observation_id': 823,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [823],
'message': 'Redecentralization of the Web',
'observation_id': 824,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [824],
'message': 'Redecentralization of the Web',
'observation_id': 825,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [825],
'message': 'Redecentralization of the Web',
'observation_id': 826,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [826],
'message': 'Redecentralization of the Web',
'observation_id': 827,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [827],
'message': 'Redecentralization of the Web',
'observation_id': 828,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [828],
'message': 'Redecentralization of the Web',
'observation_id': 829,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [829],
'message': 'Redecentralization of the Web',
'observation_id': 830,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [830],
'message': 'Redecentralization of the Web',
'observation_id': 831,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [831],
'message': 'Redecentralization of the Web',
'observation_id': 832,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [832],
'message': 'Redecentralization of the Web',
'observation_id': 833,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [833],
'message': 'Redecentralization of the Web',
'observation_id': 834,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [834],
'message': 'Redecentralization of the Web',
'observation_id': 835,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [835],
'message': 'Redecentralization of the Web',
'observation_id': 836,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [836],
'message': 'Redecentralization of the Web',
'observation_id': 837,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [837],
'message': 'Redecentralization of the Web',
'observation_id': 838,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [838],
'message': 'Redecentralization of the Web',
'observation_id': 839,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [839],
'message': 'Redecentralization of the Web',
'observation_id': 840,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [840],
'message': 'Redecentralization of the Web',
'observation_id': 841,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [841],
'message': 'Redecentralization of the Web',
'observation_id': 842,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [842],
'message': 'Redecentralization of the Web',
'observation_id': 843,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [843],
'message': 'Redecentralization of the Web',
'observation_id': 844,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [844],
'message': 'Redecentralization of the Web',
'observation_id': 845,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [845],
'message': 'Redecentralization of the Web',
'observation_id': 846,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [846],
'message': 'Redecentralization of the Web',
'observation_id': 847,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [847],
'message': 'Redecentralization of the Web',
'observation_id': 848,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [848],
'message': 'Redecentralization of the Web',
'observation_id': 849,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [849],
'message': 'Redecentralization of the Web',
'observation_id': 850,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [850],
'message': 'Redecentralization of the Web',
'observation_id': 851,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [851],
'message': 'Redecentralization of the Web',
'observation_id': 852,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [852],
'message': 'Redecentralization of the Web',
'observation_id': 853,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [853],
'message': 'Redecentralization of the Web',
'observation_id': 854,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [854],
'message': 'Redecentralization of the Web',
'observation_id': 855,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [855],
'message': 'Redecentralization of the Web',
'observation_id': 856,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [856],
'message': 'Redecentralization of the Web',
'observation_id': 857,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [857],
'message': 'Redecentralization of the Web',
'observation_id': 858,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [858],
'message': 'Redecentralization of the Web',
'observation_id': 859,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [859],
'message': 'Redecentralization of the Web',
'observation_id': 860,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [860],
'message': 'Redecentralization of the Web',
'observation_id': 861,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [861],
'message': 'Redecentralization of the Web',
'observation_id': 862,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [862],
'message': 'Redecentralization of the Web',
'observation_id': 863,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [863],
'message': 'Redecentralization of the Web',
'observation_id': 864,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [864],
'message': 'Redecentralization of the Web',
'observation_id': 865,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [865],
'message': 'Redecentralization of the Web',
'observation_id': 866,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [866],
'message': 'Redecentralization of the Web',
'observation_id': 867,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [867],
'message': 'Redecentralization of the Web',
'observation_id': 868,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [868],
'message': 'Redecentralization of the Web',
'observation_id': 869,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [869],
'message': 'Redecentralization of the Web',
'observation_id': 870,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [870],
'message': 'Redecentralization of the Web',
'observation_id': 871,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [871],
'message': 'Redecentralization of the Web',
'observation_id': 872,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [872],
'message': 'Redecentralization of the Web',
'observation_id': 873,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [873],
'message': 'Redecentralization of the Web',
'observation_id': 874,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [874],
'message': 'Redecentralization of the Web',
'observation_id': 875,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [875],
'message': 'Redecentralization of the Web',
'observation_id': 876,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [876],
'message': 'Redecentralization of the Web',
'observation_id': 877,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [877],
'message': 'Redecentralization of the Web',
'observation_id': 878,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [878],
'message': 'Redecentralization of the Web',
'observation_id': 879,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [879],
'message': 'Redecentralization of the Web',
'observation_id': 880,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [880],
'message': 'Redecentralization of the Web',
'observation_id': 881,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [881],
'message': 'Redecentralization of the Web',
'observation_id': 882,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [882],
'message': 'Redecentralization of the Web',
'observation_id': 883,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [883],
'message': 'Redecentralization of the Web',
'observation_id': 884,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [884],
'message': 'Redecentralization of the Web',
'observation_id': 885,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [885],
'message': 'Redecentralization of the Web',
'observation_id': 886,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [886],
'message': 'Redecentralization of the Web',
'observation_id': 887,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [887],
'message': 'Redecentralization of the Web',
'observation_id': 888,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [888],
'message': 'Redecentralization of the Web',
'observation_id': 889,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [889],
'message': 'Redecentralization of the Web',
'observation_id': 890,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [890],
'message': 'Redecentralization of the Web',
'observation_id': 891,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [891],
'message': 'Redecentralization of the Web',
'observation_id': 892,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [892],
'message': 'Redecentralization of the Web',
'observation_id': 893,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [893],
'message': 'Redecentralization of the Web',
'observation_id': 894,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [894],
'message': 'Redecentralization of the Web',
'observation_id': 895,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [895],
'message': 'Redecentralization of the Web',
'observation_id': 896,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [896],
'message': 'Redecentralization of the Web',
'observation_id': 897,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [897],
'message': 'Redecentralization of the Web',
'observation_id': 898,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [898],
'message': 'Redecentralization of the Web',
'observation_id': 899,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [899],
'message': 'Redecentralization of the Web',
'observation_id': 900,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [900],
'message': 'Redecentralization of the Web',
'observation_id': 901,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [901],
'message': 'Redecentralization of the Web',
'observation_id': 902,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [902],
'message': 'Redecentralization of the Web',
'observation_id': 903,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [903],
'message': 'Redecentralization of the Web',
'observation_id': 904,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [904],
'message': 'Redecentralization of the Web',
'observation_id': 905,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [905],
'message': 'Redecentralization of the Web',
'observation_id': 906,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [906],
'message': 'Redecentralization of the Web',
'observation_id': 907,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [907],
'message': 'Redecentralization of the Web',
'observation_id': 908,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [908],
'message': 'Redecentralization of the Web',
'observation_id': 909,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [909],
'message': 'Redecentralization of the Web',
'observation_id': 910,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [910],
'message': 'Redecentralization of the Web',
'observation_id': 911,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [911],
'message': 'Redecentralization of the Web',
'observation_id': 912,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [912],
'message': 'Redecentralization of the Web',
'observation_id': 913,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [913],
'message': 'Redecentralization of the Web',
'observation_id': 914,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [914],
'message': 'Redecentralization of the Web',
'observation_id': 915,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [915],
'message': 'Redecentralization of the Web',
'observation_id': 916,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [916],
'message': 'Redecentralization of the Web',
'observation_id': 917,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [917],
'message': 'Redecentralization of the Web',
'observation_id': 918,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [918],
'message': 'Redecentralization of the Web',
'observation_id': 919,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [919],
'message': 'Redecentralization of the Web',
'observation_id': 920,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [920],
'message': 'Redecentralization of the Web',
'observation_id': 921,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [921],
'message': 'Redecentralization of the Web',
'observation_id': 922,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [922],
'message': 'Redecentralization of the Web',
'observation_id': 923,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [923],
'message': 'Redecentralization of the Web',
'observation_id': 924,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [924],
'message': 'Redecentralization of the Web',
'observation_id': 925,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [925],
'message': 'Redecentralization of the Web',
'observation_id': 926,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [926],
'message': 'Redecentralization of the Web',
'observation_id': 927,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [927],
'message': 'Redecentralization of the Web',
'observation_id': 928,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [928],
'message': 'Redecentralization of the Web',
'observation_id': 929,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [929],
'message': 'Redecentralization of the Web',
'observation_id': 930,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [930],
'message': 'Redecentralization of the Web',
'observation_id': 931,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [931],
'message': 'Redecentralization of the Web',
'observation_id': 932,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [932],
'message': 'Redecentralization of the Web',
'observation_id': 933,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [933],
'message': 'Redecentralization of the Web',
'observation_id': 934,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [934],
'message': 'Redecentralization of the Web',
'observation_id': 935,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [935],
'message': 'Redecentralization of the Web',
'observation_id': 936,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [936],
'message': 'Redecentralization of the Web',
'observation_id': 937,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [937],
'message': 'Redecentralization of the Web',
'observation_id': 938,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [938],
'message': 'Redecentralization of the Web',
'observation_id': 939,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [939],
'message': 'Redecentralization of the Web',
'observation_id': 940,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [940],
'message': 'Redecentralization of the Web',
'observation_id': 941,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [941],
'message': 'Redecentralization of the Web',
'observation_id': 942,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [942],
'message': 'Redecentralization of the Web',
'observation_id': 943,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [943],
'message': 'Redecentralization of the Web',
'observation_id': 944,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [944],
'message': 'Redecentralization of the Web',
'observation_id': 945,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [945],
'message': 'Redecentralization of the Web',
'observation_id': 946,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [946],
'message': 'Redecentralization of the Web',
'observation_id': 947,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [947],
'message': 'Redecentralization of the Web',
'observation_id': 948,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [948],
'message': 'Redecentralization of the Web',
'observation_id': 949,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [949],
'message': 'Redecentralization of the Web',
'observation_id': 950,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [950],
'message': 'Redecentralization of the Web',
'observation_id': 951,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [951],
'message': 'Redecentralization of the Web',
'observation_id': 952,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [952],
'message': 'Redecentralization of the Web',
'observation_id': 953,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [953],
'message': 'Redecentralization of the Web',
'observation_id': 954,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [954],
'message': 'Redecentralization of the Web',
'observation_id': 955,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [955],
'message': 'Redecentralization of the Web',
'observation_id': 956,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [956],
'message': 'Redecentralization of the Web',
'observation_id': 957,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [957],
'message': 'Redecentralization of the Web',
'observation_id': 958,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [958],
'message': 'Redecentralization of the Web',
'observation_id': 959,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [959],
'message': 'Redecentralization of the Web',
'observation_id': 960,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [960],
'message': 'Redecentralization of the Web',
'observation_id': 961,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [961],
'message': 'Redecentralization of the Web',
'observation_id': 962,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [962],
'message': 'Redecentralization of the Web',
'observation_id': 963,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [963],
'message': 'Redecentralization of the Web',
'observation_id': 964,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [964],
'message': 'Redecentralization of the Web',
'observation_id': 965,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [965],
'message': 'Redecentralization of the Web',
'observation_id': 966,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [966],
'message': 'Redecentralization of the Web',
'observation_id': 967,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [967],
'message': 'Redecentralization of the Web',
'observation_id': 968,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [968],
'message': 'Redecentralization of the Web',
'observation_id': 969,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [969],
'message': 'Redecentralization of the Web',
'observation_id': 970,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [970],
'message': 'Redecentralization of the Web',
'observation_id': 971,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [971],
'message': 'Redecentralization of the Web',
'observation_id': 972,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [972],
'message': 'Redecentralization of the Web',
'observation_id': 973,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [973],
'message': 'Redecentralization of the Web',
'observation_id': 974,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [974],
'message': 'Redecentralization of the Web',
'observation_id': 975,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [975],
'message': 'Redecentralization of the Web',
'observation_id': 976,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [976],
'message': 'Redecentralization of the Web',
'observation_id': 977,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [977],
'message': 'Redecentralization of the Web',
'observation_id': 978,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [978],
'message': 'Redecentralization of the Web',
'observation_id': 979,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [979],
'message': 'Redecentralization of the Web',
'observation_id': 980,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [980],
'message': 'Redecentralization of the Web',
'observation_id': 981,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [981],
'message': 'Redecentralization of the Web',
'observation_id': 982,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [982],
'message': 'Redecentralization of the Web',
'observation_id': 983,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [983],
'message': 'Redecentralization of the Web',
'observation_id': 984,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [984],
'message': 'Redecentralization of the Web',
'observation_id': 985,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [985],
'message': 'Redecentralization of the Web',
'observation_id': 986,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [986],
'message': 'Redecentralization of the Web',
'observation_id': 987,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [987],
'message': 'Redecentralization of the Web',
'observation_id': 988,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [988],
'message': 'Redecentralization of the Web',
'observation_id': 989,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [989],
'message': 'Redecentralization of the Web',
'observation_id': 990,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [990],
'message': 'Redecentralization of the Web',
'observation_id': 991,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [991],
'message': 'Redecentralization of the Web',
'observation_id': 992,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [992],
'message': 'Redecentralization of the Web',
'observation_id': 993,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [993],
'message': 'Redecentralization of the Web',
'observation_id': 994,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [994],
'message': 'Redecentralization of the Web',
'observation_id': 995,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [995],
'message': 'Redecentralization of the Web',
'observation_id': 996,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [996],
'message': 'Redecentralization of the Web',
'observation_id': 997,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'},
{'author': 'A',
'before': [997],
'message': 'Redecentralization of the Web',
'observation_id': 998,
'receiver': 'B',
'sender': 'A',
'topic': 'Web Engineering'},
{'author': 'C',
'before': [998],
'message': 'Redecentralization of the Web',
'observation_id': 999,
'receiver': 'B',
'sender': 'C',
'topic': 'Web Engineering'}]
HISTORY = {'A': {'B': 1.0, 'C': 1.0, 'D': 1.0},
'B': {'A': 0, 'C': 0, 'D': 1.0},
'C': {'A': 1.0, 'B': 1.0, 'D': 1.0},
'D': {'A': 1.0, 'B': 1.0, 'C': 1.0}}
SCALES_PER_AGENT = {'A': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'},
'B': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'},
'C': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'},
'D': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'}}
METRICS_PER_AGENT = {'A': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}},
'B': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}},
'C': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}},
'D': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}}}
DESCRIPTION = 'Scalability Test with observation upscaling for WI 2020'
| 24.900708 | 84 | 0.579525 | 19,239 | 175,799 | 5.241021 | 0.05494 | 0.247937 | 0.267772 | 0.297525 | 0.94107 | 0.941011 | 0.940803 | 0.484251 | 0.484251 | 0.484251 | 0 | 0.040619 | 0.178937 | 175,799 | 7,059 | 85 | 24.904236 | 0.657944 | 0 | 0 | 0.713658 | 0 | 0 | 0.568957 | 0.001889 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d16b6ead2527d4298cff6db95163fe066587a238 | 7,572 | py | Python | supplementary_material/supmat_linreg_truncatedN.py | themisbo/Rule-based-Bayesian-regr | 9dc3e896e67117a43580f0a58199d3b8203f6f9d | [
"Apache-2.0"
] | null | null | null | supplementary_material/supmat_linreg_truncatedN.py | themisbo/Rule-based-Bayesian-regr | 9dc3e896e67117a43580f0a58199d3b8203f6f9d | [
"Apache-2.0"
] | null | null | null | supplementary_material/supmat_linreg_truncatedN.py | themisbo/Rule-based-Bayesian-regr | 9dc3e896e67117a43580f0a58199d3b8203f6f9d | [
"Apache-2.0"
] | 1 | 2022-02-11T14:20:12.000Z | 2022-02-11T14:20:12.000Z | import matplotlib.pyplot as plt
plt.style.use("ggplot")
import numpy as np
import pymc3 as pm
### Synthetic data generation ###
sample_size = 500
sigma_e = 3.0 # true value of parameter error sigma
np.random.seed(1)
random_num_generator = np.random.RandomState(0)
x = 10.0 * random_num_generator.rand(sample_size)
x_lt = x[x < 4]
x_gt = x[x > 5]
x = x[x > 4]
x = x[x < 5]
e = random_num_generator.normal(0, sigma_e, len(x))
y = 1.0 + 2.0 * x + e # a = 1.0; b = 2.0; y = a + b*x
y_lt = 1.0 + 2.0 * x_lt + random_num_generator.normal(0, sigma_e, len(x_lt))
y_gt = 1.0 + 2.0 * x_gt + random_num_generator.normal(0, sigma_e, len(x_gt))
x_disc = np.linspace(0, 10, 20)
y_true = 1.0 + 2.0 * x_disc
fig = plt.figure(figsize=(6, 4))
ax = fig.gca()
plt.scatter(x, y, color="blue", label="Visible data")
plt.scatter(x_lt, y_lt, color="mediumaquamarine", label="Non visible data")
plt.scatter(x_gt, y_gt, color="mediumaquamarine")
plt.plot(
x_disc, y_true, linewidth=3, color="darkolivegreen", label="True regression line"
)
ax.set_ylabel("y")
ax.set_xlabel("x")
ax.legend()
###########################################
### Standard Bayesian linear regression ###
###########################################
with pm.Model() as basic_model:
# Priors for unknown model parameters
a = pm.Normal("alpha", mu=0.5, sigma=0.5)
b = pm.Normal("beta", mu=0.5, sigma=0.5)
# Expected value of outcome
mu = a + b * x
# Likelihood (sampling distribution) of observations
Y_obs = pm.Normal("Y_obs", mu=mu, sigma=sigma_e, observed=y)
# MCMC
with basic_model:
step = pm.Metropolis()
trace = pm.sample(draws=100000, tune=20000, step=step, cores=1, chains=1)
# Vizualization
thin = 100
xvals = np.linspace(0, 10, 20)
fig = plt.figure(figsize=(6, 4))
ax = fig.gca()
for iter in range(int(trace["alpha"].shape[0] / thin)):
# select alternate samples to decrease auto corr for now
a = trace["alpha"][iter * thin]
b = trace["beta"][iter * thin]
yvals = a + b * xvals
plt.plot(xvals, yvals, color="red", alpha=0.1)
plt.plot(xvals, yvals, color="red", alpha=1, label="Posterior regression lines")
ymean = trace["alpha"].mean() + trace["beta"].mean() * xvals
plt.plot(xvals, ymean, color="yellow", label="Mean posterior regression line")
plt.scatter(x, y, color="blue", label="Data")
plt.plot(
x_disc, y_true, linewidth=3, color="darkolivegreen", label="True regression line"
)
ax.set_ylabel("y")
ax.set_xlabel("x")
ax.legend()
plt.show()
####################################################################
### Rule-based Bayesian linear regression - rule hyperparameters ###
####################################################################
def logp_rule(a, b, xlow, xhi, ylow, yhi):
rule_log_lik = 0
rule_log_lik = rule_log_lik + pm.math.switch(
pm.math.or_(
pm.math.or_(pm.math.lt(a + b * 0, 0), pm.math.lt(a + b * xlow, 0)),
pm.math.or_(pm.math.gt(a + b * 0, ylow), pm.math.gt(a + b * xlow, ylow)),
),
1,
0,
)
rule_log_lik = rule_log_lik + pm.math.switch(
pm.math.or_(
pm.math.or_(pm.math.lt(a + b * xhi, yhi), pm.math.lt(a + b * 10, yhi)),
pm.math.or_(pm.math.gt(a + b * xhi, 22), pm.math.gt(a + b * 10, 22)),
),
1,
0,
)
rule_ratio = rule_log_lik / 2
return pm.Beta.dist(alpha=1.0, beta=100.0).logp(rule_ratio)
with pm.Model() as rule_model:
# Priors for unknown model parameters
a = pm.Normal("alpha", mu=0.5, sigma=0.5)
b = pm.Normal("beta", mu=0.5, sigma=0.5)
xlow = pm.Normal("xlow", mu=1.5, sigma=0.5)
xhi = pm.Normal("xhi", mu=8.5, sigma=0.5)
ylow = pm.Normal("ylow", mu=4.5, sigma=0.5)
yhi = pm.Normal("yhi", mu=18.5, sigma=0.5)
# sigma = pm.HalfNormal('sigma', sigma=1)
# Expected value of outcome
mu = a + b * x
Y_obs = pm.Normal("Y_obs", mu=mu, sigma=sigma_e, observed=y)
LL_rule = pm.Potential("LL_rule", logp_rule(a, b, xlow, xhi, ylow, yhi))
# MCMC
with rule_model:
step = pm.Metropolis()
trace = pm.sample(draws=100000, tune=20000, step=step, cores=1, chains=1)
thin = 100
fig = plt.figure(figsize=(6, 4))
ax = fig.gca()
for iter in range(int(trace["alpha"].shape[0] / thin)):
# select alternate samples to decrease auto corr for now
a = trace["alpha"][iter * thin]
b = trace["beta"][iter * thin]
yvals = a + b * xvals
plt.plot(xvals, yvals, color="red", alpha=0.1)
plt.plot(xvals, yvals, color="red", alpha=1, label="Posterior regression lines")
ymean = trace["alpha"].mean() + trace["beta"].mean() * xvals
plt.plot(xvals, ymean, color="yellow", label="Mean posterior regression line")
plt.scatter(x, y, color="blue", label="Data")
plt.plot(
x_disc, y_true, linewidth=3, color="darkolivegreen", label="True regression line"
)
ax.set_ylabel("y")
ax.set_xlabel("x")
ax.legend()
plt.show()
####################################################################
### Rule-based Bayesian linear regression - rule hyperparameters ###
######################### Truncated Normal priors ##################
####################################################################
def logp_rule(a, b, xlow, xhi, ylow, yhi):
rule_log_lik = 0
rule_log_lik = rule_log_lik + pm.math.switch(
pm.math.or_(
pm.math.or_(pm.math.lt(a + b * 0, 0), pm.math.lt(a + b * xlow, 0)),
pm.math.or_(pm.math.gt(a + b * 0, ylow), pm.math.gt(a + b * xlow, ylow)),
),
1,
0,
)
rule_log_lik = rule_log_lik + pm.math.switch(
pm.math.or_(
pm.math.or_(pm.math.lt(a + b * xhi, yhi), pm.math.lt(a + b * 10, yhi)),
pm.math.or_(pm.math.gt(a + b * xhi, 22), pm.math.gt(a + b * 10, 22)),
),
1,
0,
)
rule_ratio = rule_log_lik / 2
return pm.Beta.dist(alpha=1.0, beta=100.0).logp(rule_ratio)
with pm.Model() as rule_model:
# Priors for unknown model parameters
a = pm.Normal("alpha", mu=0.5, sigma=0.5)
b = pm.Normal("beta", mu=0.5, sigma=0.5)
xlow = pm.TruncatedNormal("xlow", mu=1.5, sigma=0.5, lower=0)
xhi = pm.TruncatedNormal("xhi", mu=8.5, sigma=0.5, upper=10)
ylow = pm.Normal("ylow", mu=4.5, sigma=0.5)
yhi = pm.Normal("yhi", mu=18.5, sigma=0.5)
# sigma = pm.HalfNormal('sigma', sigma=1)
# Expected value of outcome
mu = a + b * x
Y_obs = pm.Normal("Y_obs", mu=mu, sigma=sigma_e, observed=y)
LL_rule = pm.Potential("LL_rule", logp_rule(a, b, xlow, xhi, ylow, yhi))
# MCMC
with rule_model:
step = pm.Metropolis()
trace = pm.sample(draws=100000, tune=20000, step=step, cores=1, chains=1)
thin = 100
fig = plt.figure(figsize=(6, 4))
ax = fig.gca()
for iter in range(int(trace["alpha"].shape[0] / thin)):
# select alternate samples to decrease auto corr for now
a = trace["alpha"][iter * thin]
b = trace["beta"][iter * thin]
yvals = a + b * xvals
plt.plot(xvals, yvals, color="red", alpha=0.1)
plt.plot(xvals, yvals, color="red", alpha=1, label="Posterior regression lines")
ymean = trace["alpha"].mean() + trace["beta"].mean() * xvals
plt.plot(xvals, ymean, color="yellow", label="Mean posterior regression line")
plt.scatter(x, y, color="blue", label="Data")
plt.plot(
x_disc, y_true, linewidth=3, color="darkolivegreen", label="True regression line"
)
ax.set_ylabel("y")
ax.set_xlabel("x")
ax.legend()
plt.show()
# Note: There was no significant difference by using the truncated Normal priors in the result.
# The posterior shape is virtually identical with the Guassian priors.
| 31.55 | 95 | 0.593899 | 1,219 | 7,572 | 3.598852 | 0.137818 | 0.043766 | 0.022339 | 0.02553 | 0.838386 | 0.816959 | 0.816959 | 0.797812 | 0.791657 | 0.761112 | 0 | 0.037641 | 0.189514 | 7,572 | 239 | 96 | 31.682008 | 0.677204 | 0.125726 | 0 | 0.771605 | 0 | 0 | 0.096372 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.012346 | false | 0 | 0.018519 | 0 | 0.04321 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0f0cea41b8697f6754c9a2555c19534f727d271a | 12,769 | py | Python | communicator/dynamo_primitive.py | DS3Lab/LambdaML | 0afca7819e08632ba116fec8e102084e4040a47a | [
"Apache-2.0"
] | 23 | 2021-05-17T09:24:24.000Z | 2022-01-29T18:40:44.000Z | communicator/dynamo_primitive.py | DS3Lab/LambdaML | 0afca7819e08632ba116fec8e102084e4040a47a | [
"Apache-2.0"
] | 2 | 2021-05-17T16:15:12.000Z | 2021-07-20T09:11:22.000Z | communicator/dynamo_primitive.py | DS3Lab/LambdaML | 0afca7819e08632ba116fec8e102084e4040a47a | [
"Apache-2.0"
] | 3 | 2021-05-17T09:31:53.000Z | 2021-12-02T16:29:59.000Z | import urllib
import numpy as np
from storage import DynamoTable
def async_reduce(table, vector, key_col, vector_name):
assert isinstance(table, DynamoTable)
# vector is supposed to be a 1-d numpy array
vec_shape = vector.shape
vec_dtype = vector.dtype
data = table.load_or_wait(vector_name, key_col, 0.1)['value'].value
new_vec = np.frombuffer(data, dtype=vec_dtype).reshape(vec_shape)
table.save(vector.tobytes(), vector_name, key_col)
return new_vec
def reduce_batch(tmp_table, merged_table, vector, key_col, n_workers, worker_index, cur_epoch, cur_batch):
assert isinstance(tmp_table, DynamoTable)
assert isinstance(merged_table, DynamoTable)
# vector is supposed to be a 1-d numpy array
vec_shape = vector.shape
vec_dtype = vector.dtype
merged_vec = np.zeros(vec_shape, dtype=vec_dtype)
# put object to tmp table, format of key: workerID_epoch_batch
my_key = "{}_{}".format(cur_epoch, cur_batch)
tmp_table.save(vector.tobytes(), "{}_{}".format(worker_index, my_key), key_col)
# the first worker read and aggregate
if worker_index == 0:
n_files = 0
while n_files < n_workers:
items = tmp_table.list()
if items is not None and len(items) > 0:
delete_keys = []
for item in items:
tmp_key = item[key_col]
key_splits = tmp_key.split("_")
key_epoch = key_splits[-2]
key_batch = key_splits[-1]
if key_epoch == str(cur_epoch) and key_batch == str(cur_batch):
bytes_data = item['value'].value
tmp_vec = np.frombuffer(bytes_data, dtype=vec_dtype).reshape(vec_shape)
merged_vec += tmp_vec
n_files += 1
delete_keys.append(tmp_key)
tmp_table.delete(delete_keys, key_col)
# write the merged data to merged table
merged_key = 'merged_{}'.format(my_key)
merged_table.save(merged_vec.tobytes(), merged_key, key_col)
delete_expired_batch(merged_table, key_col, cur_epoch, cur_batch)
else:
merged_key = 'merged_{}'.format(my_key)
merged_data = merged_table.load_or_wait(merged_key, key_col, 0.1)['value'].value
merged_vec = np.frombuffer(merged_data, dtype=vec_dtype).reshape(vec_shape)
return merged_vec
def reduce_epoch(tmp_table, merged_table, vector, key_col, n_workers, worker_index, cur_epoch):
assert isinstance(tmp_table, DynamoTable)
assert isinstance(merged_table, DynamoTable)
# vector is supposed to be a 1-d numpy array
vec_shape = vector.shape
vec_dtype = vector.dtype
merged_vec = np.zeros(vec_shape, dtype=vec_dtype)
# put object to tmp table, format of key: workerID_epoch
key = str(cur_epoch)
tmp_table.save(vector.tobytes(), "{}_{}".format(worker_index, key), key_col)
# the first worker read and aggregate
if worker_index == 0:
n_files = 0
while n_files < n_workers:
items = tmp_table.list()
if items is not None and len(items) > 0:
delete_keys = []
for item in items:
tmp_key = item[key_col]
key_splits = tmp_key.split("_")
key_epoch = key_splits[-1]
if key_epoch == str(cur_epoch):
bytes_data = item['value'].value
tmp_vec = np.frombuffer(bytes_data, dtype=vec_dtype).reshape(vec_shape)
merged_vec += tmp_vec
n_files += 1
delete_keys.append(tmp_key)
tmp_table.delete(delete_keys, key_col)
# write the merged data to merged table
merged_key = 'merged_{}'.format(key)
merged_table.save(merged_vec.tobytes(), merged_key, key_col)
delete_expired_epoch(merged_table, key_col, cur_epoch)
else:
merged_key = 'merged_{}'.format(key)
merged_data = merged_table.load_or_wait(merged_key, key_col, 0.1)['value'].value
merged_vec = np.frombuffer(merged_data, dtype=vec_dtype).reshape(vec_shape)
return merged_vec
# delete the merged values of the *current or older* steps
def delete_expired_batch(table, key_col, cur_epoch, cur_batch):
assert isinstance(table, DynamoTable)
items = table.list()
if items is not None and len(items) > 0:
delete_keys = []
for item in items:
key = item[key_col]
key_splits = key.split("_")
key_batch = int(key_splits[-1])
key_epoch = int(key_splits[-2])
if key_epoch < cur_epoch or (key_epoch == cur_epoch and key_batch < cur_batch):
delete_keys.append(key)
if len(delete_keys) >= 1:
table.delete(delete_keys, key_col)
return True
def delete_expired_epoch(table, key_col, cur_epoch):
assert isinstance(table, DynamoTable)
items = table.list()
if items is not None and len(items) > 0:
delete_keys = []
for item in items:
key = item[key_col]
key_splits = key.split("_")
key_epoch = int(key_splits[-1])
if key_epoch < cur_epoch:
delete_keys.append(key)
if len(delete_keys) >= 1:
table.delete(delete_keys, key_col)
return True
def reduce_scatter_batch(tmp_table, merged_table, vector, key_col, n_workers, worker_index, cur_epoch, cur_batch):
assert isinstance(tmp_table, DynamoTable)
assert isinstance(merged_table, DynamoTable)
# vector is supposed to be a 1-d numpy array
vec_size = vector.size
vec_size_per_worker = vec_size // n_workers
vec_size_residue = vec_size % n_workers
postfix = "{}_{}".format(cur_epoch, cur_batch)
my_offset = (vec_size_per_worker * worker_index) + min(vec_size_residue, worker_index)
my_length = vec_size_per_worker + (1 if worker_index < vec_size_residue else 0)
my_chunk = vector[my_offset: my_offset + my_length]
my_chunk_shape = my_chunk.shape
# write partitioned vector to the shared storage, except the chunk charged by myself
for i in range(n_workers):
if i != worker_index:
offset = (vec_size_per_worker * i) + min(vec_size_residue, i)
length = vec_size_per_worker + (1 if i < vec_size_residue else 0)
# indicating the chunk number and which worker it comes from
# format of key in tmp-bucket: chunkID_workerID_epoch_batch
chunk_id = i
tmp_key = "{}_{}_{}".format(chunk_id, worker_index, postfix)
tmp_table.save(vector[offset: offset + length].tobytes(), tmp_key, key_col)
# read and aggregate the corresponding chunk
n_files = 0
while n_files < n_workers - 1:
tmp_items = tmp_table.list()
if tmp_items is not None and len(tmp_items) > 0:
delete_keys = []
for tmp_item in tmp_items:
tmp_key = tmp_item[key_col]
key_splits = tmp_key.split("_")
# if it's the responsible chunk and it is from the current step
# format of key in tmp-bucket: chunkID_workerID_epoch_batch
if key_splits[0] == str(worker_index) \
and key_splits[-2] == str(cur_epoch) \
and key_splits[-1] == str(cur_batch):
bytes_data = tmp_item['value'].value
tmp_vec = np.frombuffer(bytes_data, dtype=vector.dtype).reshape(my_chunk_shape)
my_chunk = my_chunk + tmp_vec
n_files += 1
delete_keys.append(tmp_key)
tmp_table.delete(delete_keys, key_col)
# write the aggregated chunk back
# key format in merged_bucket: chunkID_epoch_batch
merged_key = "{}_{}".format(worker_index, postfix)
merged_table.save(my_chunk.tobytes(), merged_key, key_col)
# read other aggregated chunks
merged_value = dict()
merged_value[worker_index] = my_chunk
n_merged_keys = 0
read_keys = []
while n_merged_keys < n_workers - 1:
merged_items = merged_table.list()
if merged_items is not None and len(merged_items) > 0:
for merged_item in merged_items:
merged_key = merged_item[key_col]
key_splits = merged_key.split("_")
# key format in merged_bucket: chunkID_epoch_batch
# if not file_key.startswith(str(my_rank)) and merged_key not in already_read:
if key_splits[0] != str(worker_index) and key_splits[-2] == str(cur_epoch) and \
key_splits[-1] == str(cur_batch) and merged_key not in read_keys:
bytes_data = merged_item['value'].value
merged_value[int(key_splits[0])] = np.frombuffer(bytes_data, dtype=vector.dtype)
read_keys.append(merged_key)
n_merged_keys += 1
# reconstruct the whole vector
result = merged_value[0]
for k in range(1, n_workers):
result = np.concatenate((result, merged_value[k]))
return result
def reduce_scatter_epoch(tmp_table, merged_table, vector, key_col, n_workers, worker_index, cur_epoch):
assert isinstance(tmp_table, DynamoTable)
assert isinstance(merged_table, DynamoTable)
# vector is supposed to be a 1-d numpy array
vec_size = vector.size
vec_size_per_worker = vec_size // n_workers
vec_size_residue = vec_size % n_workers
my_offset = (vec_size_per_worker * worker_index) + min(vec_size_residue, worker_index)
my_length = vec_size_per_worker + (1 if worker_index < vec_size_residue else 0)
my_chunk = vector[my_offset: my_offset + my_length]
my_chunk_shape = my_chunk.shape
# write partitioned vector to the shared memory, except the chunk charged by myself
for i in range(n_workers):
if i != worker_index:
offset = (vec_size_per_worker * i) + min(vec_size_residue, i)
length = vec_size_per_worker + (1 if i < vec_size_residue else 0)
# indicating the chunk number and which worker it comes from
chunk_id = i
tmp_key = "{}_{}_{}".format(chunk_id, worker_index, cur_epoch)
# format of key in tmp-bucket: chunkID_workerID_epoch
tmp_table.save(vector[offset: offset + length].tobytes(), tmp_key, key_col)
# read and aggregate the corresponding chunk
n_merged_keys = 0
while n_merged_keys < n_workers - 1:
tmp_items = tmp_table.list()
delete_keys = []
if tmp_items is not None and len(tmp_items) > 0:
for tmp_item in tmp_items:
tmp_key = tmp_item[key_col]
key_splits = tmp_key.split("_")
# if it's the responsible chunk and it is from the current step
# format of key in tmp-bucket: chunkID_workerID_epoch
if key_splits[0] == str(worker_index) and key_splits[-1] == str(cur_epoch):
bytes_data = tmp_item['value'].value
my_chunk = my_chunk + np.frombuffer(bytes_data, dtype=vector.dtype)
n_merged_keys += 1
tmp_table.delete(delete_keys, key_col)
# write the aggregated chunk back
# key format in merged_bucket: chunkID_epoch
merged_key = "{}_{}".format(worker_index, cur_epoch)
merged_table.save(my_chunk.tobytes(), merged_key, key_col)
# read other aggregated chunks
merged_value = dict()
merged_value[worker_index] = my_chunk
n_merged_keys = 0
read_keys = []
while n_merged_keys < n_workers - 1:
merged_items = merged_table.list()
if merged_items is not None and len(merged_items) > 0:
for merged_item in merged_items:
merged_key = merged_item[key_col]
key_splits = merged_key.split("_")
# key format in merged_bucket: chunkID_epoch
# if not file_key.startswith(str(my_rank)) and merged_key not in already_read:
if (key_splits[0]).isdigit() and key_splits[0] != str(worker_index) and key_splits[-1] == str(cur_epoch) \
and merged_key not in read_keys:
bytes_data = merged_item['value'].value
merged_value[int(key_splits[0])] = np.frombuffer(bytes_data, dtype=vector.dtype)
read_keys.append(merged_key)
n_merged_keys += 1
# reconstruct the whole vector
result = merged_value[0]
for k in range(1, n_workers):
result = np.concatenate((result, merged_value[k]))
return result
| 42.421927 | 122 | 0.627614 | 1,755 | 12,769 | 4.260399 | 0.076923 | 0.028086 | 0.012037 | 0.021399 | 0.929517 | 0.894343 | 0.877223 | 0.853818 | 0.831216 | 0.81851 | 0 | 0.007675 | 0.285692 | 12,769 | 300 | 123 | 42.563333 | 0.812082 | 0.138304 | 0 | 0.782407 | 0 | 0 | 0.012311 | 0 | 0 | 0 | 0 | 0 | 0.050926 | 1 | 0.032407 | false | 0 | 0.013889 | 0 | 0.078704 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0f2c73753c002f57fccc2dcfc3b2919532b3e568 | 1,885 | py | Python | wrappers/python/tests/ledger/test_build_revoc_reg_def_request.py | absltkaos/indy-sdk | bc14c5b514dc1c76ce62dd7f6bf804120bf69f5e | [
"Apache-2.0"
] | 5 | 2018-04-09T12:26:28.000Z | 2019-06-12T01:45:30.000Z | wrappers/python/tests/ledger/test_build_revoc_reg_def_request.py | absltkaos/indy-sdk | bc14c5b514dc1c76ce62dd7f6bf804120bf69f5e | [
"Apache-2.0"
] | 9 | 2019-01-22T22:31:54.000Z | 2019-04-11T21:45:09.000Z | wrappers/python/tests/ledger/test_build_revoc_reg_def_request.py | absltkaos/indy-sdk | bc14c5b514dc1c76ce62dd7f6bf804120bf69f5e | [
"Apache-2.0"
] | 19 | 2018-04-25T16:08:43.000Z | 2022-01-11T10:18:38.000Z | from indy import ledger
import json
import pytest
@pytest.mark.asyncio
async def test_build_revoc_reg_def_request_work():
identifier = "Th7MpTaRZVRYnPiabds81Y"
data = {
"ver": "1.0",
"id": "RevocRegID",
"revocDefType": "CL_ACCUM",
"tag": "TAG1",
"credDefId": "CredDefID",
"value": {
"issuanceType": "ISSUANCE_ON_DEMAND",
"maxCredNum": 5,
"tailsHash": "s",
"tailsLocation": "http://tails.location.com",
"publicKeys": {
"accumKey": {
"z": "1 0000000000000000000000000000000000000000000000000000000000001111 1 0000000000000000000000000000000000000000000000000000000000000000 1 0000000000000000000000000000000000000000000000000000000000000000 1 0000000000000000000000000000000000000000000000000000000000000000 1 0000000000000000000000000000000000000000000000000000000000000000 1 0000000000000000000000000000000000000000000000000000000000000000 1 0000000000000000000000000000000000000000000000000000000000000000 1 0000000000000000000000000000000000000000000000000000000000000000 1 0000000000000000000000000000000000000000000000000000000000000000 1 0000000000000000000000000000000000000000000000000000000000000000 1 0000000000000000000000000000000000000000000000000000000000000000 1 0000000000000000000000000000000000000000000000000000000000000000"
}
}
}
}
expected_response = {
"operation": {
"credDefId": data["credDefId"],
"id": data["id"],
"revocDefType": data["revocDefType"],
"tag": data["tag"],
"type": "113",
"value": data["value"]
}
}
request = json.loads(await ledger.build_revoc_reg_def_request(identifier, json.dumps(data)))
assert expected_response.items() <= request.items()
| 43.837209 | 830 | 0.701326 | 116 | 1,885 | 11.267241 | 0.508621 | 0.547054 | 0.504973 | 0.994644 | 0.582249 | 0.547054 | 0.547054 | 0.547054 | 0.547054 | 0.547054 | 0 | 0.532345 | 0.212732 | 1,885 | 42 | 831 | 44.880952 | 0.348383 | 0 | 0 | 0 | 0 | 0.027778 | 0.571883 | 0.419098 | 0 | 0 | 0 | 0 | 0.027778 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.083333 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0f50bc1cf8498a0fdc6ea5e1069568312cd5b75d | 55,508 | py | Python | ocbind/lacp/interfaces/interface/config/__init__.py | SeanCondon/onos-config-demo | 0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe | [
"Apache-2.0"
] | 1 | 2019-08-01T17:42:57.000Z | 2019-08-01T17:42:57.000Z | ocbind/lacp/interfaces/interface/config/__init__.py | SeanCondon/onos-config-demo | 0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe | [
"Apache-2.0"
] | 1 | 2021-05-26T16:38:04.000Z | 2021-05-26T16:38:04.000Z | ocbind/lacp/interfaces/interface/config/__init__.py | SeanCondon/onos-config-demo | 0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe | [
"Apache-2.0"
] | 4 | 2019-07-24T16:52:39.000Z | 2021-12-03T02:08:13.000Z | # -*- coding: utf-8 -*-
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-lacp - based on the path /lacp/interfaces/interface/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration data for each LACP aggregate interface
"""
__slots__ = ('_path_helper', '_extmethods', '__name','__interval','__lacp_mode','__system_id_mac','__system_priority',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)
self.__interval = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)
self.__lacp_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)
self.__system_id_mac = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)
self.__system_priority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['lacp', 'interfaces', 'interface', 'config']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /lacp/interfaces/interface/config/name (oc-if:base-interface-ref)
YANG Description: Reference to the interface on which LACP should be
configured. The type of the target interface must be
ieee8023adLag
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /lacp/interfaces/interface/config/name (oc-if:base-interface-ref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: Reference to the interface on which LACP should be
configured. The type of the target interface must be
ieee8023adLag
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with oc-if:base-interface-ref""",
'defined-type': "oc-if:base-interface-ref",
'generated-type': """YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)
def _get_interval(self):
"""
Getter method for interval, mapped from YANG variable /lacp/interfaces/interface/config/interval (lacp-period-type)
YANG Description: Set the period between LACP messages -- uses
the lacp-period-type enumeration.
"""
return self.__interval
def _set_interval(self, v, load=False):
"""
Setter method for interval, mapped from YANG variable /lacp/interfaces/interface/config/interval (lacp-period-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interval() directly.
YANG Description: Set the period between LACP messages -- uses
the lacp-period-type enumeration.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interval must be of a type compatible with lacp-period-type""",
'defined-type': "openconfig-lacp:lacp-period-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)""",
})
self.__interval = t
if hasattr(self, '_set'):
self._set()
def _unset_interval(self):
self.__interval = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)
def _get_lacp_mode(self):
"""
Getter method for lacp_mode, mapped from YANG variable /lacp/interfaces/interface/config/lacp_mode (lacp-activity-type)
YANG Description: ACTIVE is to initiate the transmission of LACP packets.
PASSIVE is to wait for peer to initiate the transmission of
LACP packets.
"""
return self.__lacp_mode
def _set_lacp_mode(self, v, load=False):
"""
Setter method for lacp_mode, mapped from YANG variable /lacp/interfaces/interface/config/lacp_mode (lacp-activity-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_lacp_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lacp_mode() directly.
YANG Description: ACTIVE is to initiate the transmission of LACP packets.
PASSIVE is to wait for peer to initiate the transmission of
LACP packets.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lacp_mode must be of a type compatible with lacp-activity-type""",
'defined-type': "openconfig-lacp:lacp-activity-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)""",
})
self.__lacp_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_lacp_mode(self):
self.__lacp_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)
def _get_system_id_mac(self):
"""
Getter method for system_id_mac, mapped from YANG variable /lacp/interfaces/interface/config/system_id_mac (oc-yang:mac-address)
YANG Description: The MAC address portion of the node's System ID. This is
combined with the system priority to construct the 8-octet
system-id
"""
return self.__system_id_mac
def _set_system_id_mac(self, v, load=False):
"""
Setter method for system_id_mac, mapped from YANG variable /lacp/interfaces/interface/config/system_id_mac (oc-yang:mac-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_system_id_mac is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_system_id_mac() directly.
YANG Description: The MAC address portion of the node's System ID. This is
combined with the system priority to construct the 8-octet
system-id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """system_id_mac must be of a type compatible with oc-yang:mac-address""",
'defined-type': "oc-yang:mac-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)""",
})
self.__system_id_mac = t
if hasattr(self, '_set'):
self._set()
def _unset_system_id_mac(self):
self.__system_id_mac = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)
def _get_system_priority(self):
"""
Getter method for system_priority, mapped from YANG variable /lacp/interfaces/interface/config/system_priority (uint16)
YANG Description: Sytem priority used by the node on this LAG interface.
Lower value is higher priority for determining which node
is the controlling system.
"""
return self.__system_priority
def _set_system_priority(self, v, load=False):
"""
Setter method for system_priority, mapped from YANG variable /lacp/interfaces/interface/config/system_priority (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_system_priority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_system_priority() directly.
YANG Description: Sytem priority used by the node on this LAG interface.
Lower value is higher priority for determining which node
is the controlling system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """system_priority must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)""",
})
self.__system_priority = t
if hasattr(self, '_set'):
self._set()
def _unset_system_priority(self):
self.__system_priority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)
name = __builtin__.property(_get_name, _set_name)
interval = __builtin__.property(_get_interval, _set_interval)
lacp_mode = __builtin__.property(_get_lacp_mode, _set_lacp_mode)
system_id_mac = __builtin__.property(_get_system_id_mac, _set_system_id_mac)
system_priority = __builtin__.property(_get_system_priority, _set_system_priority)
_pyangbind_elements = OrderedDict([('name', name), ('interval', interval), ('lacp_mode', lacp_mode), ('system_id_mac', system_id_mac), ('system_priority', system_priority), ])
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-lacp - based on the path /lacp/interfaces/interface/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration data for each LACP aggregate interface
"""
__slots__ = ('_path_helper', '_extmethods', '__name','__interval','__lacp_mode','__system_id_mac','__system_priority',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)
self.__interval = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)
self.__lacp_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)
self.__system_id_mac = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)
self.__system_priority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['lacp', 'interfaces', 'interface', 'config']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /lacp/interfaces/interface/config/name (oc-if:base-interface-ref)
YANG Description: Reference to the interface on which LACP should be
configured. The type of the target interface must be
ieee8023adLag
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /lacp/interfaces/interface/config/name (oc-if:base-interface-ref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: Reference to the interface on which LACP should be
configured. The type of the target interface must be
ieee8023adLag
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with oc-if:base-interface-ref""",
'defined-type': "oc-if:base-interface-ref",
'generated-type': """YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)
def _get_interval(self):
"""
Getter method for interval, mapped from YANG variable /lacp/interfaces/interface/config/interval (lacp-period-type)
YANG Description: Set the period between LACP messages -- uses
the lacp-period-type enumeration.
"""
return self.__interval
def _set_interval(self, v, load=False):
"""
Setter method for interval, mapped from YANG variable /lacp/interfaces/interface/config/interval (lacp-period-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interval() directly.
YANG Description: Set the period between LACP messages -- uses
the lacp-period-type enumeration.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interval must be of a type compatible with lacp-period-type""",
'defined-type': "openconfig-lacp:lacp-period-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)""",
})
self.__interval = t
if hasattr(self, '_set'):
self._set()
def _unset_interval(self):
self.__interval = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)
def _get_lacp_mode(self):
"""
Getter method for lacp_mode, mapped from YANG variable /lacp/interfaces/interface/config/lacp_mode (lacp-activity-type)
YANG Description: ACTIVE is to initiate the transmission of LACP packets.
PASSIVE is to wait for peer to initiate the transmission of
LACP packets.
"""
return self.__lacp_mode
def _set_lacp_mode(self, v, load=False):
"""
Setter method for lacp_mode, mapped from YANG variable /lacp/interfaces/interface/config/lacp_mode (lacp-activity-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_lacp_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lacp_mode() directly.
YANG Description: ACTIVE is to initiate the transmission of LACP packets.
PASSIVE is to wait for peer to initiate the transmission of
LACP packets.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lacp_mode must be of a type compatible with lacp-activity-type""",
'defined-type': "openconfig-lacp:lacp-activity-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)""",
})
self.__lacp_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_lacp_mode(self):
self.__lacp_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)
def _get_system_id_mac(self):
"""
Getter method for system_id_mac, mapped from YANG variable /lacp/interfaces/interface/config/system_id_mac (oc-yang:mac-address)
YANG Description: The MAC address portion of the node's System ID. This is
combined with the system priority to construct the 8-octet
system-id
"""
return self.__system_id_mac
def _set_system_id_mac(self, v, load=False):
"""
Setter method for system_id_mac, mapped from YANG variable /lacp/interfaces/interface/config/system_id_mac (oc-yang:mac-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_system_id_mac is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_system_id_mac() directly.
YANG Description: The MAC address portion of the node's System ID. This is
combined with the system priority to construct the 8-octet
system-id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """system_id_mac must be of a type compatible with oc-yang:mac-address""",
'defined-type': "oc-yang:mac-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)""",
})
self.__system_id_mac = t
if hasattr(self, '_set'):
self._set()
def _unset_system_id_mac(self):
self.__system_id_mac = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)
def _get_system_priority(self):
"""
Getter method for system_priority, mapped from YANG variable /lacp/interfaces/interface/config/system_priority (uint16)
YANG Description: Sytem priority used by the node on this LAG interface.
Lower value is higher priority for determining which node
is the controlling system.
"""
return self.__system_priority
def _set_system_priority(self, v, load=False):
"""
Setter method for system_priority, mapped from YANG variable /lacp/interfaces/interface/config/system_priority (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_system_priority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_system_priority() directly.
YANG Description: Sytem priority used by the node on this LAG interface.
Lower value is higher priority for determining which node
is the controlling system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """system_priority must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)""",
})
self.__system_priority = t
if hasattr(self, '_set'):
self._set()
def _unset_system_priority(self):
self.__system_priority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)
name = __builtin__.property(_get_name, _set_name)
interval = __builtin__.property(_get_interval, _set_interval)
lacp_mode = __builtin__.property(_get_lacp_mode, _set_lacp_mode)
system_id_mac = __builtin__.property(_get_system_id_mac, _set_system_id_mac)
system_priority = __builtin__.property(_get_system_priority, _set_system_priority)
_pyangbind_elements = OrderedDict([('name', name), ('interval', interval), ('lacp_mode', lacp_mode), ('system_id_mac', system_id_mac), ('system_priority', system_priority), ])
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-lacp - based on the path /lacp/interfaces/interface/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration data for each LACP aggregate interface
"""
__slots__ = ('_path_helper', '_extmethods', '__name','__interval','__lacp_mode','__system_id_mac','__system_priority',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)
self.__interval = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)
self.__lacp_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)
self.__system_id_mac = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)
self.__system_priority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['lacp', 'interfaces', 'interface', 'config']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /lacp/interfaces/interface/config/name (oc-if:base-interface-ref)
YANG Description: Reference to the interface on which LACP should be
configured. The type of the target interface must be
ieee8023adLag
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /lacp/interfaces/interface/config/name (oc-if:base-interface-ref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: Reference to the interface on which LACP should be
configured. The type of the target interface must be
ieee8023adLag
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with oc-if:base-interface-ref""",
'defined-type': "oc-if:base-interface-ref",
'generated-type': """YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['name'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-if:base-interface-ref', is_config=True)
def _get_interval(self):
"""
Getter method for interval, mapped from YANG variable /lacp/interfaces/interface/config/interval (lacp-period-type)
YANG Description: Set the period between LACP messages -- uses
the lacp-period-type enumeration.
"""
return self.__interval
def _set_interval(self, v, load=False):
"""
Setter method for interval, mapped from YANG variable /lacp/interfaces/interface/config/interval (lacp-period-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interval() directly.
YANG Description: Set the period between LACP messages -- uses
the lacp-period-type enumeration.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interval must be of a type compatible with lacp-period-type""",
'defined-type': "openconfig-lacp:lacp-period-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)""",
})
self.__interval = t
if hasattr(self, '_set'):
self._set()
def _unset_interval(self):
self.__interval = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'FAST': {}, 'SLOW': {}},), default=six.text_type("SLOW"), is_leaf=True, yang_name="interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-period-type', is_config=True)
def _get_lacp_mode(self):
"""
Getter method for lacp_mode, mapped from YANG variable /lacp/interfaces/interface/config/lacp_mode (lacp-activity-type)
YANG Description: ACTIVE is to initiate the transmission of LACP packets.
PASSIVE is to wait for peer to initiate the transmission of
LACP packets.
"""
return self.__lacp_mode
def _set_lacp_mode(self, v, load=False):
"""
Setter method for lacp_mode, mapped from YANG variable /lacp/interfaces/interface/config/lacp_mode (lacp-activity-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_lacp_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lacp_mode() directly.
YANG Description: ACTIVE is to initiate the transmission of LACP packets.
PASSIVE is to wait for peer to initiate the transmission of
LACP packets.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lacp_mode must be of a type compatible with lacp-activity-type""",
'defined-type': "openconfig-lacp:lacp-activity-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)""",
})
self.__lacp_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_lacp_mode(self):
self.__lacp_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACTIVE': {}, 'PASSIVE': {}},), default=six.text_type("ACTIVE"), is_leaf=True, yang_name="lacp-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='lacp-activity-type', is_config=True)
def _get_system_id_mac(self):
"""
Getter method for system_id_mac, mapped from YANG variable /lacp/interfaces/interface/config/system_id_mac (oc-yang:mac-address)
YANG Description: The MAC address portion of the node's System ID. This is
combined with the system priority to construct the 8-octet
system-id
"""
return self.__system_id_mac
def _set_system_id_mac(self, v, load=False):
"""
Setter method for system_id_mac, mapped from YANG variable /lacp/interfaces/interface/config/system_id_mac (oc-yang:mac-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_system_id_mac is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_system_id_mac() directly.
YANG Description: The MAC address portion of the node's System ID. This is
combined with the system priority to construct the 8-octet
system-id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """system_id_mac must be of a type compatible with oc-yang:mac-address""",
'defined-type': "oc-yang:mac-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)""",
})
self.__system_id_mac = t
if hasattr(self, '_set'):
self._set()
def _unset_system_id_mac(self):
self.__system_id_mac = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}$'}), is_leaf=True, yang_name="system-id-mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='oc-yang:mac-address', is_config=True)
def _get_system_priority(self):
"""
Getter method for system_priority, mapped from YANG variable /lacp/interfaces/interface/config/system_priority (uint16)
YANG Description: Sytem priority used by the node on this LAG interface.
Lower value is higher priority for determining which node
is the controlling system.
"""
return self.__system_priority
def _set_system_priority(self, v, load=False):
"""
Setter method for system_priority, mapped from YANG variable /lacp/interfaces/interface/config/system_priority (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_system_priority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_system_priority() directly.
YANG Description: Sytem priority used by the node on this LAG interface.
Lower value is higher priority for determining which node
is the controlling system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """system_priority must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)""",
})
self.__system_priority = t
if hasattr(self, '_set'):
self._set()
def _unset_system_priority(self):
self.__system_priority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="system-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='uint16', is_config=True)
name = __builtin__.property(_get_name, _set_name)
interval = __builtin__.property(_get_interval, _set_interval)
lacp_mode = __builtin__.property(_get_lacp_mode, _set_lacp_mode)
system_id_mac = __builtin__.property(_get_system_id_mac, _set_system_id_mac)
system_priority = __builtin__.property(_get_system_priority, _set_system_priority)
_pyangbind_elements = OrderedDict([('name', name), ('interval', interval), ('lacp_mode', lacp_mode), ('system_id_mac', system_id_mac), ('system_priority', system_priority), ])
| 65.612293 | 539 | 0.712023 | 7,450 | 55,508 | 5.068859 | 0.031275 | 0.043694 | 0.053386 | 0.034319 | 0.98901 | 0.983264 | 0.983264 | 0.983264 | 0.983264 | 0.983264 | 0 | 0.006027 | 0.160067 | 55,508 | 845 | 540 | 65.689941 | 0.803938 | 0.232183 | 0 | 0.957303 | 0 | 0.053933 | 0.334057 | 0.126572 | 0 | 0 | 0 | 0 | 0 | 1 | 0.114607 | false | 0.026966 | 0.035955 | 0 | 0.265169 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0f5d626e539afa328b4bc357c929a9e70d6af7c8 | 22,807 | py | Python | RunConfig.py | avmoldovan/CNN-TE | 87fd7f97e4a7b16617b0404a34292dae03b55ece | [
"Apache-2.0"
] | 1 | 2022-03-15T06:22:35.000Z | 2022-03-15T06:22:35.000Z | RunConfig.py | avmoldovan/CNN-TE | 87fd7f97e4a7b16617b0404a34292dae03b55ece | [
"Apache-2.0"
] | null | null | null | RunConfig.py | avmoldovan/CNN-TE | 87fd7f97e4a7b16617b0404a34292dae03b55ece | [
"Apache-2.0"
] | null | null | null | settings = {
'USPS' : {
'dataset': 'USPS',
'lr': 0.008,
'momentum' : 0.0,
'weight_decay' : 0.0,
'lr_decay' : 0.1,
'lr_decay_epochs': 2,
'data' : './/data/usps',
'dropout1' : 0.25,
'dropout2' : 0.5,
'resume' : False,
'num_classes' : 10,
'start_epoch' : 0,
'seed' : None,
'variable_batch_size' : False,
'world_size' : 1, #int(os.environ["WORLD_SIZE"])
'rank': 0,
'multiprocessing_distributed' : False,
'distributed' : False,
'ngpus_per_node' : 0, #torch.cuda.device_count()
'workers' : 0,
'dist_url' : None,
'arch' : 'AlexNet-USPS',
'shuffle_validation' : False,
'bestmodel_name' : 'bestmodel',
'checkpoint_name' : 'checkpoint',
'run_title' : 'noTE-usps-mom0-lrd0-b100-do1025-d20-wd0-lr0001-gpu-60ep',
'trainingset_size' : 60000,
'gpu' : 'cpu', #'cuda:0',
'epochs': 60,#90,
'pretrained': False,
'pretrained_url': 'noTE-subset5k-withparams-mom09-wd00005-lrd10-b128-do05-lr001-gpu-90ep-model_best.pth.tar',#'mom0-wd0-lrd0-b128-do0-gpu-baseretrain-noTE-noSMX-300ep-model_best.pth.tar',
#'pretrained_url': 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # last 4 layers with TE
#'pretrained_url' : 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth (1).tar',
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # leave empty to load the pytorch one
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth.tar', # leave empty to load the pytorch one
# 'pretrained_url' : 'mom9-wd0005-b128-gpu-baseretrain-checkpoint.pth.tar',
'base_retrain': True,
'partial_freeze': False,
'trainsubset': 6000,
'batch_size' : 100,
'use_subset' : True,
'subset_classes' : None,
'print_freq' : 1,
'evaluate' : False,
#new
'tr1' : 1.0,
'tr2' : 0.9,
'te_events_batch_multiple' : 1,
'rolling_te_window' : False,
'suffix' : '',
'out_to_file' : False,
'skip_first' : 10,
'te_length': 500,
'clean_window': False,
'fc8rate': 0,
'save_model': False,
'forward': True,
'useTIE' : True,
'withte': True,
'fc7rate': 0,
'fc7te' : True,
'debug' : True
},
'SVHN' : {
'dataset': 'SVHN',
'lr': 0.001,
'momentum' : 0.9,
'weight_decay' : 0.1,
'lr_decay' : 0.0,
'lr_decay_epochs': 20,
'data' : '.\\data',
'dropout1' : 0.3,
'dropout2' : 0.0,
'resume' : False,
'num_classes' : 10,
'start_epoch' : 0,
'seed' : None,
'variable_batch_size' : False,
'world_size' : 1, #int(os.environ["WORLD_SIZE"])
'rank': 0,
'multiprocessing_distributed' : False,
'distributed' : False,
'ngpus_per_node' : 0, #torch.cuda.device_count()
'workers' : 0,
'dist_url' : None,
'arch' : 'AlexNetFB',
'shuffle_validation' : False,
'bestmodel_name' : 'bestmodel',
'checkpoint_name' : 'checkpoint',
#'run_title' : 'CF10-TEhookbw-fc8-smx-WeqWmulEye-TEWINDOW-TR0.5-TR20.9-telen4096-noROLL-mom09-lrd0.1by20-b128-do10-do20-wd00005-lr0.01-gpu-35ep',
'run_title' : 'CF10-TEhookbw-fc8-smx-WeqWmulEye1.2-epoch-TR1.0-TR20.9-telen4096-rolling-mom09-lrd0.1by20-b128-do10-do20-wd00005-lr0.01-cpu-35ep',
'trainingset_size' : 50000,
'gpu' : 'cpu', #'cuda:0',
'epochs': 150,#90,
'pretrained': False,
'pretrained_url': 'noTE-withparams-mom09-wd00005-lrd10-b128-do05-lr001-gpu-90ep-model_best.pth.tar',#'mom0-wd0-lrd0-b128-do0-gpu-baseretrain-noTE-noSMX-300ep-model_best.pth.tar',
#'pretrained_url': 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # last 4 layers with TE
#'pretrained_url' : 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth (1).tar',
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # leave empty to load the pytorch one
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth.tar', # leave empty to load the pytorch one
# 'pretrained_url' : 'mom9-wd0005-b128-gpu-baseretrain-checkpoint.pth.tar',
'base_retrain': True,
'partial_freeze': False,
'trainsubset': 512,
'batch_size' : 200,
'use_subset' : False,
'subset_classes' : None,
'print_freq' : 1,
'evaluate' : False,
#new
'tr1' : 1.0,
'tr2' : 0.9,
'te_events_batch_multiple' : 1,
'rolling_te_window' : False,
'suffix' : '',
'out_to_file' : False,
'skip_first' : 10,
'te_length': 4096,
'clean_window': False,
'fc8rate': 0,
'save_model': False,
'forward': True,
'withte': True,
'fc7rate': 0,
'fc7te' : True,
'debug' : True
},
'STL10' : {
'dataset': 'STL10',
'lr': 0.001,
'momentum' : 0.9,
'weight_decay' : 0.0,
'lr_decay' : 0.0,
'lr_decay_epochs': 20,
'data' : '.\\data',
'dropout1' : 0.0,
'dropout2' : 0.0,
'resume' : False,
'num_classes' : 10,
'start_epoch' : 0,
'seed' : None,
'variable_batch_size' : False,
'world_size' : 1, #int(os.environ["WORLD_SIZE"])
'rank': 0,
'multiprocessing_distributed' : False,
'distributed' : False,
'ngpus_per_node' : 0, #torch.cuda.device_count()
'workers' : 0,
'dist_url' : None,
'arch' : 'AlexNetFB',
'shuffle_validation' : False,
'bestmodel_name' : 'bestmodel',
'checkpoint_name' : 'checkpoint',
#'run_title' : 'CF10-TEhookbw-fc8-smx-WeqWmulEye-TEWINDOW-TR0.5-TR20.9-telen4096-noROLL-mom09-lrd0.1by20-b128-do10-do20-wd00005-lr0.01-gpu-35ep',
'run_title' : 'CF10-TEhookbw-fc8-smx-WeqWmulEye1.2-epoch-TR1.0-TR20.9-telen4096-rolling-mom09-lrd0.1by20-b128-do10-do20-wd00005-lr0.01-cpu-35ep',
'trainingset_size' : 50000,
'gpu' : 'cpu', #'cuda:0',
'epochs': 150,#90,
'pretrained': False,
'pretrained_url': 'noTE-withparams-mom09-wd00005-lrd10-b128-do05-lr001-gpu-90ep-model_best.pth.tar',#'mom0-wd0-lrd0-b128-do0-gpu-baseretrain-noTE-noSMX-300ep-model_best.pth.tar',
#'pretrained_url': 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # last 4 layers with TE
#'pretrained_url' : 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth (1).tar',
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # leave empty to load the pytorch one
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth.tar', # leave empty to load the pytorch one
# 'pretrained_url' : 'mom9-wd0005-b128-gpu-baseretrain-checkpoint.pth.tar',
'base_retrain': True,
'partial_freeze': False,
'trainsubset': 512,
'batch_size' : 200,
'use_subset' : False,
'subset_classes' : None,
'print_freq' : 1,
'evaluate' : False,
#new
'tr1' : 1.0,
'tr2' : 0.9,
'te_events_batch_multiple' : 1,
'rolling_te_window' : False,
'suffix' : '',
'out_to_file' : False,
'skip_first' : 10,
'te_length': 4096,
'clean_window': False,
'fc8rate': 0,
'save_model': False,
'forward': True,
'withte': True,
'fc7rate': 0,
'fc7te' : True,
'debug' : True
},
'CIFAR10' : {
'dataset': 'CIFAR10',
'lr': 0.01,
'momentum' : 0.9,
'weight_decay' : 0.0005,
'lr_decay' : 0.1,
'lr_decay_epochs': 20,
'data' : '.\\data',
'dropout1' : 0.0,
'dropout2' : 0.0,
'resume' : False,
'num_classes' : 10,
'start_epoch' : 0,
'seed' : None,
'variable_batch_size' : False,
'world_size' : 1, #int(os.environ["WORLD_SIZE"])
'rank': 0,
'multiprocessing_distributed' : False,
'distributed' : False,
'ngpus_per_node' : 0, #torch.cuda.device_count()
'workers' : 0,
'dist_url' : None,
'arch' : 'AlexNetFB',
'shuffle_validation' : False,
'bestmodel_name' : 'bestmodel',
'checkpoint_name' : 'checkpoint',
#'run_title' : 'CF10-TEhookbw-fc8-smx-WeqWmulEye-TEWINDOW-TR0.5-TR20.9-telen4096-noROLL-mom09-lrd0.1by20-b128-do10-do20-wd00005-lr0.01-gpu-35ep',
'run_title' : 'CF10-TEhookbw-fc8-smx-WeqWmulEye1.2-epoch-TR1.0-TR20.9-telen4096-rolling-mom09-lrd0.1by20-b128-do10-do20-wd00005-lr0.01-cpu-35ep',
'trainingset_size' : 50000,
'gpu' : 'cpu', #'cuda:0',
'epochs': 35,#90,
'pretrained': False,
'pretrained_url': 'noTE-withparams-mom09-wd00005-lrd10-b128-do05-lr001-gpu-90ep-model_best.pth.tar',#'mom0-wd0-lrd0-b128-do0-gpu-baseretrain-noTE-noSMX-300ep-model_best.pth.tar',
#'pretrained_url': 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # last 4 layers with TE
#'pretrained_url' : 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth (1).tar',
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # leave empty to load the pytorch one
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth.tar', # leave empty to load the pytorch one
# 'pretrained_url' : 'mom9-wd0005-b128-gpu-baseretrain-checkpoint.pth.tar',
'base_retrain': True,
'partial_freeze': False,
'trainsubset': 512,
'batch_size' : 128,
'use_subset' : False,
'subset_classes' : None,
'print_freq' : 1,
'evaluate' : False,
#new
'tr1' : 1.0,
'tr2' : 0.9,
'te_events_batch_multiple' : 1,
'rolling_te_window' : False,
'suffix' : '',
'out_to_file' : False,
'skip_first' : 10,
'te_length': 512,
'clean_window': False,
'fc8rate': 0,
'save_model': False,
'forward': True,
'withte': True,
'fc7rate': 0,
'fc7te' : True,
'debug' : True
},
'CIFAR100' : {
'dataset': 'CIFAR100',
'lr': 0.01,
'momentum' : 0.9,
'weight_decay' : 0.,#0.0005,
'lr_decay' : 0.,#0.1,
'lr_decay_epochs': 30,
'data' : '.\\data',
'dropout1' : 0.,#0.5,
'dropout2' : 0.,#0.5,
'resume' : False,
'num_classes' : 100,
'start_epoch' : 0,
'seed' : None,
'variable_batch_size' : False,
'world_size' : 1, #int(os.environ["WORLD_SIZE"])
'rank': 0,
'multiprocessing_distributed' : False,
'distributed' : False,
'ngpus_per_node' : 0, #torch.cuda.device_count()
'workers' : 0,
'dist_url' : None,
'arch' : 'AlexNetFB',
'shuffle_validation' : False,
'bestmodel_name' : 'bestmodel',
'checkpoint_name' : 'checkpoint',
'run_title' : 'CF10tiny-NOTE-smxG01-telen128-withparams-mom09-wd0-lrd0-b128-do0-lr0001-gpu-60ep',
'trainingset_size' : 50000,
'gpu' : 'cpu', #'cuda:0',
'epochs': 2,#90,
'pretrained': False,
'pretrained_url': 'noTE-withparams-mom09-wd00005-lrd10-b128-do05-lr001-gpu-90ep-model_best.pth.tar',#'mom0-wd0-lrd0-b128-do0-gpu-baseretrain-noTE-noSMX-300ep-model_best.pth.tar',
#'pretrained_url': 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # last 4 layers with TE
#'pretrained_url' : 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth (1).tar',
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # leave empty to load the pytorch one
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth.tar', # leave empty to load the pytorch one
# 'pretrained_url' : 'mom9-wd0005-b128-gpu-baseretrain-checkpoint.pth.tar',
'base_retrain': True,
'partial_freeze': False,
'trainsubset': 3000,
'batch_size' : 128,
'use_subset' : False,
'subset_classes' : None,
'print_freq' : 1,
'evaluate' : False,
#new
'tr1' : 0.01,
'te_events_batch_multiple' : 1,
'rolling_te_window' : False,
'suffix' : '',
'out_to_file' : False,
'skip_first' : 10,
'te_length': 512,
'clean_window': False,
'fc8rate': 0,
'save_model': False,
'forward': True,
'withte': False,
'fc7rate': 0,
'fc7te' : True,
'debug' : False
},
'fashionMNIST' : {
'dataset': 'FashionMNIST',
'lr': 0.008,
'momentum' : 0.0,
'weight_decay' : 0.0,
'lr_decay' : 0.1,
'lr_decay_epochs': 2,
'data' : './/data/fashionMNIST',
'dropout1' : 0.25,
'dropout2' : 0.5,
'resume' : False,
'num_classes' : 10,
'start_epoch' : 0,
'seed' : None,
'variable_batch_size' : False,
'world_size' : 1, #int(os.environ["WORLD_SIZE"])
'rank': 0,
'multiprocessing_distributed' : False,
'distributed' : False,
'ngpus_per_node' : 0, #torch.cuda.device_count()
'workers' : 0,
'dist_url' : None,
'arch' : 'AlexNet-FashionMNIST',
'shuffle_validation' : False,
'bestmodel_name' : 'bestmodel',
'checkpoint_name' : 'checkpoint',
'run_title' : 'withTE-fashionmnist-subset-withparams-b4-do05-lr001-gpu-20ep',
'trainingset_size' : 60000,
'gpu' : 'cpu', #'cuda:0',
'epochs': 60,#90,
'pretrained': False,
'pretrained_url': 'noTE-subset5k-withparams-mom09-wd00005-lrd10-b128-do05-lr001-gpu-90ep-model_best.pth.tar',#'mom0-wd0-lrd0-b128-do0-gpu-baseretrain-noTE-noSMX-300ep-model_best.pth.tar',
#'pretrained_url': 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # last 4 layers with TE
#'pretrained_url' : 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth (1).tar',
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # leave empty to load the pytorch one
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth.tar', # leave empty to load the pytorch one
# 'pretrained_url' : 'mom9-wd0005-b128-gpu-baseretrain-checkpoint.pth.tar',
'base_retrain': True,
'partial_freeze': False,
'trainsubset': 60000,
'batch_size' : 100,
'use_subset' : False,
'subset_classes' : None,
'print_freq' : 1,
'evaluate' : False,
#new
'tr1' : 1.0,
'tr2' : 0.9,
'te_events_batch_multiple' : 1,
'rolling_te_window' : False,
'suffix' : '',
'out_to_file' : False,
'skip_first' : 10,
'te_length': 500,
'clean_window': False,
'fc8rate': 0,
'save_model': False,
'forward': True,
'withte': True,
'fc7rate': 0,
'fc7te' : True,
'debug' : True
},
'SMALLMNIST' : {
'dataset': 'MNIST',
'lr': 0.001,
'momentum' : 0.9,
'weight_decay' : 0.0005,
'lr_decay' : 10,
'data' : '.\\data',
'dropout1' : 0.25,
'dropout2' : 0.5,
'resume' : False,
'num_classes' : 10,
'start_epoch' : 0,
'seed' : None,
'variable_batch_size' : False,
'world_size' : 1, #int(os.environ["WORLD_SIZE"])
'rank': 0,
'multiprocessing_distributed' : False,
'distributed' : False,
'ngpus_per_node' : 0, #torch.cuda.device_count()
'workers' : 0,
'dist_url' : None,
'arch' : 'AlexNetFB',
'shuffle_validation' : False,
'bestmodel_name' : 'bestmodel',
'checkpoint_name' : 'checkpoint',
'run_title' : 'withTE-mnist-C10-withparams-tel512-mom09-wd00005-lrd10-b4-do05-lr001-gpu-20ep',
'trainingset_size' : 50000,
'gpu' : 'cpu', #'cuda:0',
'epochs': 20,#90,
'pretrained': False,
'pretrained_url': 'noTE-subset5k-withparams-mom09-wd00005-lrd10-b128-do05-lr001-gpu-90ep-model_best.pth.tar',#'mom0-wd0-lrd0-b128-do0-gpu-baseretrain-noTE-noSMX-300ep-model_best.pth.tar',
#'pretrained_url': 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # last 4 layers with TE
#'pretrained_url' : 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth (1).tar',
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # leave empty to load the pytorch one
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth.tar', # leave empty to load the pytorch one
# 'pretrained_url' : 'mom9-wd0005-b128-gpu-baseretrain-checkpoint.pth.tar',
'base_retrain': True,
'partial_freeze': False,
'trainsubset': 50000,
'batch_size' : 64,
'use_subset' : False,
'subset_classes' : None,
'print_freq' : 1,
'evaluate' : False,
#new
'tr1' : 0.9,
'te_events_batch_multiple' : 1,
'rolling_te_window' : False,
'suffix' : '',
'out_to_file' : False,
'skip_first' : 10,
'te_length': 512,
'fc8rate': 0,
'save_model': False,
'fc7rate': 0,
'fc7te' : True,
'debug' : True
},
'MNIST' : {
'dataset': 'MNIST',
'lr': 0.001,
'momentum' : 0.9,
'weight_decay' : 0.0005,
'lr_decay' : 10,
'data' : './/data/mnist',
'dropout1' : 0.25,
'dropout2' : 0.5,
'resume' : False,
'num_classes' : 10,
'start_epoch' : 0,
'seed' : None,
'variable_batch_size' : False,
'world_size' : 1, #int(os.environ["WORLD_SIZE"])
'rank': 0,
'multiprocessing_distributed' : False,
'distributed' : False,
'ngpus_per_node' : 0, #torch.cuda.device_count()
'workers' : 0,
'dist_url' : None,
'arch' : 'AlexNetFB',
'shuffle_validation' : False,
'bestmodel_name' : 'bestmodel',
'checkpoint_name' : 'checkpoint',
'run_title' : 'withTE-mnist-C10-withparams-tel512-mom09-wd00005-lrd10-b4-do05-lr001-gpu-20ep',
'trainingset_size' : 50000,
'gpu' : 'cpu', #'cuda:0',
'epochs': 20,#90,
'pretrained': False,
'pretrained_url': 'noTE-subset5k-withparams-mom09-wd00005-lrd10-b128-do05-lr001-gpu-90ep-model_best.pth.tar',#'mom0-wd0-lrd0-b128-do0-gpu-baseretrain-noTE-noSMX-300ep-model_best.pth.tar',
#'pretrained_url': 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # last 4 layers with TE
#'pretrained_url' : 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth (1).tar',
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # leave empty to load the pytorch one
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth.tar', # leave empty to load the pytorch one
# 'pretrained_url' : 'mom9-wd0005-b128-gpu-baseretrain-checkpoint.pth.tar',
'base_retrain': True,
'partial_freeze': False,
'trainsubset': 50000,
'batch_size' : 10,
'use_subset' : False,
'subset_classes' : None,
'print_freq' : 1,
'evaluate' : False,
#new
'tr1' : 0.9,
'te_events_batch_multiple' : 1,
'rolling_te_window' : False,
'suffix' : '',
'out_to_file' : False,
'skip_first' : 10,
'te_length': 512,
'fc8rate': 0,
'save_model': False,
'fc7rate': 0,
'fc7te' : True,
'debug' : True
},
'tiny' : {
'dataset': 'TINYIMAGENET',
'lr': 0.001,
'momentum' : 0.9,
'weight_decay' : 0.0005,
'lr_decay' : 0.1,
'lr_decay_epochs': 30,
'data' : './/data/tiny-imagenet-200',
'dropout1' : 0.5,
'dropout2' : 0.5,
'resume' : False,
'num_classes' : 200,
'start_epoch' : 0,
'seed' : None,
'variable_batch_size' : False,
'world_size' : 1, #int(os.environ["WORLD_SIZE"])
'rank': 0,
'multiprocessing_distributed' : False,
'distributed' : False,
'ngpus_per_node' : 0, #torch.cuda.device_count()
'workers' : 0,
'dist_url' : None,
'arch' : 'AlexNetFB',
'shuffle_validation' : False,
'bestmodel_name' : 'bestmodel',
'checkpoint_name' : 'checkpoint',
'run_title' : 'ANTIN-withparams-tel512-mom09-wd00005-lrd10-b4-do05-lr001-gpu-20ep',
'trainingset_size' : 200000,
'gpu' : 'cpu', #'cuda:0',
'epochs': 90,
'pretrained': False,
'pretrained_url': 'noTE-subset5k-withparams-mom09-wd00005-lrd10-b128-do05-lr001-gpu-90ep-model_best.pth.tar',#'mom0-wd0-lrd0-b128-do0-gpu-baseretrain-noTE-noSMX-300ep-model_best.pth.tar',
#'pretrained_url': 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # last 4 layers with TE
#'pretrained_url' : 'mom0-wd00-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth (1).tar',
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-model_best.pth.tar', # leave empty to load the pytorch one
#'pretrained_url': 'mom9-wd0005-b128-gpu-baseretrain-wo-TE-wo-Freeze-checkpoint.pth.tar', # leave empty to load the pytorch one
# 'pretrained_url' : 'mom9-wd0005-b128-gpu-baseretrain-checkpoint.pth.tar',
'base_retrain': True,
'partial_freeze': False,
'trainsubset': 2000,
'batch_size' : 128,
'use_subset' : False,
'subset_classes' : None,
'print_freq' : 1,
'evaluate' : False,
#new
'tr1' : 0.9,
'te_events_batch_multiple' : 1,
'rolling_te_window' : False,
'suffix' : '',
'out_to_file' : False,
'skip_first' : 10,
'te_length': 512,
'fc8rate': 0,
'save_model': False,
'fc7rate': 0,
'fc7te' : True,
'debug' : True
},
} | 39.322414 | 195 | 0.567896 | 2,722 | 22,807 | 4.604335 | 0.070904 | 0.056012 | 0.064629 | 0.043086 | 0.966967 | 0.959068 | 0.955079 | 0.951009 | 0.945265 | 0.935051 | 0 | 0.089797 | 0.268558 | 22,807 | 580 | 196 | 39.322414 | 0.661491 | 0.284825 | 0 | 0.878244 | 0 | 0.033932 | 0.423918 | 0.12602 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.017964 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7e242b07f6be1b0886f4cda2ee8646339c10a82a | 2,730 | py | Python | test/pyaz/vm/extension/__init__.py | bigdatamoore/py-az-cli | 54383a4ee7cc77556f6183e74e992eec95b28e01 | [
"MIT"
] | null | null | null | test/pyaz/vm/extension/__init__.py | bigdatamoore/py-az-cli | 54383a4ee7cc77556f6183e74e992eec95b28e01 | [
"MIT"
] | 9 | 2021-09-24T16:37:24.000Z | 2021-12-24T00:39:19.000Z | test/pyaz/vm/extension/__init__.py | bigdatamoore/py-az-cli | 54383a4ee7cc77556f6183e74e992eec95b28e01 | [
"MIT"
] | null | null | null | import json, subprocess
from ... pyaz_utils import get_cli_name, get_params
def delete(resource_group, vm_name, name, no_wait=None):
params = get_params(locals())
command = "az vm extension delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(resource_group, vm_name, name, expand=None):
params = get_params(locals())
command = "az vm extension show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def set(resource_group, vm_name, name, publisher, version=None, settings=None, protected_settings=None, no_auto_upgrade_minor_version=None, force_update=None, extension_instance_name=None, enable_auto_upgrade=None, no_wait=None):
params = get_params(locals())
command = "az vm extension set " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list(resource_group, vm_name):
params = get_params(locals())
command = "az vm extension list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def wait(resource_group, vm_name, name, expand=None, timeout=None, interval=None, deleted=None, created=None, updated=None, exists=None, custom=None):
params = get_params(locals())
command = "az vm extension wait " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 36.891892 | 229 | 0.674359 | 345 | 2,730 | 5.246377 | 0.176812 | 0.077348 | 0.055249 | 0.052486 | 0.813812 | 0.788398 | 0.788398 | 0.754144 | 0.731492 | 0.681768 | 0 | 0.004636 | 0.20989 | 2,730 | 73 | 230 | 37.39726 | 0.834492 | 0 | 0 | 0.820896 | 0 | 0 | 0.057143 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074627 | false | 0 | 0.029851 | 0 | 0.179104 | 0.223881 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7e3542a3c1d745645e45b8952ec7b798639fc6c9 | 78,956 | py | Python | ocbind/system/cpus/cpu/state/__init__.py | SeanCondon/onos-config-demo | 0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe | [
"Apache-2.0"
] | 1 | 2019-08-01T17:42:57.000Z | 2019-08-01T17:42:57.000Z | ocbind/system/cpus/cpu/state/__init__.py | SeanCondon/onos-config-demo | 0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe | [
"Apache-2.0"
] | 1 | 2021-05-26T16:38:04.000Z | 2021-05-26T16:38:04.000Z | ocbind/system/cpus/cpu/state/__init__.py | SeanCondon/onos-config-demo | 0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe | [
"Apache-2.0"
] | 4 | 2019-07-24T16:52:39.000Z | 2021-12-03T02:08:13.000Z | # -*- coding: utf-8 -*-
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import total
from . import user
from . import kernel
from . import nice
from . import idle
from . import wait
from . import hardware_interrupt
from . import software_interrupt
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-system - based on the path /system/cpus/cpu/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data for the system CPU(s)
"""
__slots__ = ('_path_helper', '_extmethods', '__index','__total','__user','__kernel','__nice','__idle','__wait','__hardware_interrupt','__software_interrupt',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__index = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)
self.__total = YANGDynClass(base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__user = YANGDynClass(base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__kernel = YANGDynClass(base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__nice = YANGDynClass(base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__idle = YANGDynClass(base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__wait = YANGDynClass(base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__hardware_interrupt = YANGDynClass(base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__software_interrupt = YANGDynClass(base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['system', 'cpus', 'cpu', 'state']
def _get_index(self):
"""
Getter method for index, mapped from YANG variable /system/cpus/cpu/state/index (union)
YANG Description: The CPU index for each processor core on the system. On a
single-core system, the index should be zero. The ALL
index signifies an aggregation of the CPU utilization
statistics over all cores in the system.
"""
return self.__index
def _set_index(self, v, load=False):
"""
Setter method for index, mapped from YANG variable /system/cpus/cpu/state/index (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_index is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_index() directly.
YANG Description: The CPU index for each processor core on the system. On a
single-core system, the index should be zero. The ALL
index signifies an aggregation of the CPU utilization
statistics over all cores in the system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """index must be of a type compatible with union""",
'defined-type': "openconfig-system:union",
'generated-type': """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)""",
})
self.__index = t
if hasattr(self, '_set'):
self._set()
def _unset_index(self):
self.__index = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)
def _get_total(self):
"""
Getter method for total, mapped from YANG variable /system/cpus/cpu/state/total (container)
YANG Description: Total CPU utilization.
"""
return self.__total
def _set_total(self, v, load=False):
"""
Setter method for total, mapped from YANG variable /system/cpus/cpu/state/total (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_total is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_total() directly.
YANG Description: Total CPU utilization.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """total must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__total = t
if hasattr(self, '_set'):
self._set()
def _unset_total(self):
self.__total = YANGDynClass(base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_user(self):
"""
Getter method for user, mapped from YANG variable /system/cpus/cpu/state/user (container)
YANG Description: Percentage of CPU time spent running in user space.
"""
return self.__user
def _set_user(self, v, load=False):
"""
Setter method for user, mapped from YANG variable /system/cpus/cpu/state/user (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_user is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_user() directly.
YANG Description: Percentage of CPU time spent running in user space.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """user must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__user = t
if hasattr(self, '_set'):
self._set()
def _unset_user(self):
self.__user = YANGDynClass(base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_kernel(self):
"""
Getter method for kernel, mapped from YANG variable /system/cpus/cpu/state/kernel (container)
YANG Description: Percentage of CPU time spent running in kernel space.
"""
return self.__kernel
def _set_kernel(self, v, load=False):
"""
Setter method for kernel, mapped from YANG variable /system/cpus/cpu/state/kernel (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_kernel is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_kernel() directly.
YANG Description: Percentage of CPU time spent running in kernel space.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """kernel must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__kernel = t
if hasattr(self, '_set'):
self._set()
def _unset_kernel(self):
self.__kernel = YANGDynClass(base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_nice(self):
"""
Getter method for nice, mapped from YANG variable /system/cpus/cpu/state/nice (container)
YANG Description: Percentage of CPU time spent running low-priority (niced)
user processes.
"""
return self.__nice
def _set_nice(self, v, load=False):
"""
Setter method for nice, mapped from YANG variable /system/cpus/cpu/state/nice (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_nice is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_nice() directly.
YANG Description: Percentage of CPU time spent running low-priority (niced)
user processes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """nice must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__nice = t
if hasattr(self, '_set'):
self._set()
def _unset_nice(self):
self.__nice = YANGDynClass(base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_idle(self):
"""
Getter method for idle, mapped from YANG variable /system/cpus/cpu/state/idle (container)
YANG Description: Percentage of CPU time spent idle.
"""
return self.__idle
def _set_idle(self, v, load=False):
"""
Setter method for idle, mapped from YANG variable /system/cpus/cpu/state/idle (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_idle is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_idle() directly.
YANG Description: Percentage of CPU time spent idle.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """idle must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__idle = t
if hasattr(self, '_set'):
self._set()
def _unset_idle(self):
self.__idle = YANGDynClass(base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_wait(self):
"""
Getter method for wait, mapped from YANG variable /system/cpus/cpu/state/wait (container)
YANG Description: Percentage of CPU time spent waiting for I/O.
"""
return self.__wait
def _set_wait(self, v, load=False):
"""
Setter method for wait, mapped from YANG variable /system/cpus/cpu/state/wait (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_wait is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_wait() directly.
YANG Description: Percentage of CPU time spent waiting for I/O.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """wait must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__wait = t
if hasattr(self, '_set'):
self._set()
def _unset_wait(self):
self.__wait = YANGDynClass(base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_hardware_interrupt(self):
"""
Getter method for hardware_interrupt, mapped from YANG variable /system/cpus/cpu/state/hardware_interrupt (container)
YANG Description: Percentage of CPU time spent servicing hardware interrupts.
"""
return self.__hardware_interrupt
def _set_hardware_interrupt(self, v, load=False):
"""
Setter method for hardware_interrupt, mapped from YANG variable /system/cpus/cpu/state/hardware_interrupt (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_hardware_interrupt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hardware_interrupt() directly.
YANG Description: Percentage of CPU time spent servicing hardware interrupts.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """hardware_interrupt must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__hardware_interrupt = t
if hasattr(self, '_set'):
self._set()
def _unset_hardware_interrupt(self):
self.__hardware_interrupt = YANGDynClass(base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_software_interrupt(self):
"""
Getter method for software_interrupt, mapped from YANG variable /system/cpus/cpu/state/software_interrupt (container)
YANG Description: Percentage of CPU time spent servicing software interrupts
"""
return self.__software_interrupt
def _set_software_interrupt(self, v, load=False):
"""
Setter method for software_interrupt, mapped from YANG variable /system/cpus/cpu/state/software_interrupt (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_software_interrupt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_software_interrupt() directly.
YANG Description: Percentage of CPU time spent servicing software interrupts
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """software_interrupt must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__software_interrupt = t
if hasattr(self, '_set'):
self._set()
def _unset_software_interrupt(self):
self.__software_interrupt = YANGDynClass(base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
index = __builtin__.property(_get_index)
total = __builtin__.property(_get_total)
user = __builtin__.property(_get_user)
kernel = __builtin__.property(_get_kernel)
nice = __builtin__.property(_get_nice)
idle = __builtin__.property(_get_idle)
wait = __builtin__.property(_get_wait)
hardware_interrupt = __builtin__.property(_get_hardware_interrupt)
software_interrupt = __builtin__.property(_get_software_interrupt)
_pyangbind_elements = OrderedDict([('index', index), ('total', total), ('user', user), ('kernel', kernel), ('nice', nice), ('idle', idle), ('wait', wait), ('hardware_interrupt', hardware_interrupt), ('software_interrupt', software_interrupt), ])
from . import total
from . import user
from . import kernel
from . import nice
from . import idle
from . import wait
from . import hardware_interrupt
from . import software_interrupt
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-system - based on the path /system/cpus/cpu/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data for the system CPU(s)
"""
__slots__ = ('_path_helper', '_extmethods', '__index','__total','__user','__kernel','__nice','__idle','__wait','__hardware_interrupt','__software_interrupt',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__index = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)
self.__total = YANGDynClass(base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__user = YANGDynClass(base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__kernel = YANGDynClass(base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__nice = YANGDynClass(base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__idle = YANGDynClass(base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__wait = YANGDynClass(base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__hardware_interrupt = YANGDynClass(base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__software_interrupt = YANGDynClass(base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['system', 'cpus', 'cpu', 'state']
def _get_index(self):
"""
Getter method for index, mapped from YANG variable /system/cpus/cpu/state/index (union)
YANG Description: The CPU index for each processor core on the system. On a
single-core system, the index should be zero. The ALL
index signifies an aggregation of the CPU utilization
statistics over all cores in the system.
"""
return self.__index
def _set_index(self, v, load=False):
"""
Setter method for index, mapped from YANG variable /system/cpus/cpu/state/index (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_index is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_index() directly.
YANG Description: The CPU index for each processor core on the system. On a
single-core system, the index should be zero. The ALL
index signifies an aggregation of the CPU utilization
statistics over all cores in the system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """index must be of a type compatible with union""",
'defined-type': "openconfig-system:union",
'generated-type': """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)""",
})
self.__index = t
if hasattr(self, '_set'):
self._set()
def _unset_index(self):
self.__index = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)
def _get_total(self):
"""
Getter method for total, mapped from YANG variable /system/cpus/cpu/state/total (container)
YANG Description: Total CPU utilization.
"""
return self.__total
def _set_total(self, v, load=False):
"""
Setter method for total, mapped from YANG variable /system/cpus/cpu/state/total (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_total is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_total() directly.
YANG Description: Total CPU utilization.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """total must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__total = t
if hasattr(self, '_set'):
self._set()
def _unset_total(self):
self.__total = YANGDynClass(base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_user(self):
"""
Getter method for user, mapped from YANG variable /system/cpus/cpu/state/user (container)
YANG Description: Percentage of CPU time spent running in user space.
"""
return self.__user
def _set_user(self, v, load=False):
"""
Setter method for user, mapped from YANG variable /system/cpus/cpu/state/user (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_user is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_user() directly.
YANG Description: Percentage of CPU time spent running in user space.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """user must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__user = t
if hasattr(self, '_set'):
self._set()
def _unset_user(self):
self.__user = YANGDynClass(base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_kernel(self):
"""
Getter method for kernel, mapped from YANG variable /system/cpus/cpu/state/kernel (container)
YANG Description: Percentage of CPU time spent running in kernel space.
"""
return self.__kernel
def _set_kernel(self, v, load=False):
"""
Setter method for kernel, mapped from YANG variable /system/cpus/cpu/state/kernel (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_kernel is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_kernel() directly.
YANG Description: Percentage of CPU time spent running in kernel space.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """kernel must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__kernel = t
if hasattr(self, '_set'):
self._set()
def _unset_kernel(self):
self.__kernel = YANGDynClass(base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_nice(self):
"""
Getter method for nice, mapped from YANG variable /system/cpus/cpu/state/nice (container)
YANG Description: Percentage of CPU time spent running low-priority (niced)
user processes.
"""
return self.__nice
def _set_nice(self, v, load=False):
"""
Setter method for nice, mapped from YANG variable /system/cpus/cpu/state/nice (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_nice is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_nice() directly.
YANG Description: Percentage of CPU time spent running low-priority (niced)
user processes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """nice must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__nice = t
if hasattr(self, '_set'):
self._set()
def _unset_nice(self):
self.__nice = YANGDynClass(base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_idle(self):
"""
Getter method for idle, mapped from YANG variable /system/cpus/cpu/state/idle (container)
YANG Description: Percentage of CPU time spent idle.
"""
return self.__idle
def _set_idle(self, v, load=False):
"""
Setter method for idle, mapped from YANG variable /system/cpus/cpu/state/idle (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_idle is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_idle() directly.
YANG Description: Percentage of CPU time spent idle.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """idle must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__idle = t
if hasattr(self, '_set'):
self._set()
def _unset_idle(self):
self.__idle = YANGDynClass(base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_wait(self):
"""
Getter method for wait, mapped from YANG variable /system/cpus/cpu/state/wait (container)
YANG Description: Percentage of CPU time spent waiting for I/O.
"""
return self.__wait
def _set_wait(self, v, load=False):
"""
Setter method for wait, mapped from YANG variable /system/cpus/cpu/state/wait (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_wait is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_wait() directly.
YANG Description: Percentage of CPU time spent waiting for I/O.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """wait must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__wait = t
if hasattr(self, '_set'):
self._set()
def _unset_wait(self):
self.__wait = YANGDynClass(base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_hardware_interrupt(self):
"""
Getter method for hardware_interrupt, mapped from YANG variable /system/cpus/cpu/state/hardware_interrupt (container)
YANG Description: Percentage of CPU time spent servicing hardware interrupts.
"""
return self.__hardware_interrupt
def _set_hardware_interrupt(self, v, load=False):
"""
Setter method for hardware_interrupt, mapped from YANG variable /system/cpus/cpu/state/hardware_interrupt (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_hardware_interrupt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hardware_interrupt() directly.
YANG Description: Percentage of CPU time spent servicing hardware interrupts.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """hardware_interrupt must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__hardware_interrupt = t
if hasattr(self, '_set'):
self._set()
def _unset_hardware_interrupt(self):
self.__hardware_interrupt = YANGDynClass(base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_software_interrupt(self):
"""
Getter method for software_interrupt, mapped from YANG variable /system/cpus/cpu/state/software_interrupt (container)
YANG Description: Percentage of CPU time spent servicing software interrupts
"""
return self.__software_interrupt
def _set_software_interrupt(self, v, load=False):
"""
Setter method for software_interrupt, mapped from YANG variable /system/cpus/cpu/state/software_interrupt (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_software_interrupt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_software_interrupt() directly.
YANG Description: Percentage of CPU time spent servicing software interrupts
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """software_interrupt must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__software_interrupt = t
if hasattr(self, '_set'):
self._set()
def _unset_software_interrupt(self):
self.__software_interrupt = YANGDynClass(base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
index = __builtin__.property(_get_index)
total = __builtin__.property(_get_total)
user = __builtin__.property(_get_user)
kernel = __builtin__.property(_get_kernel)
nice = __builtin__.property(_get_nice)
idle = __builtin__.property(_get_idle)
wait = __builtin__.property(_get_wait)
hardware_interrupt = __builtin__.property(_get_hardware_interrupt)
software_interrupt = __builtin__.property(_get_software_interrupt)
_pyangbind_elements = OrderedDict([('index', index), ('total', total), ('user', user), ('kernel', kernel), ('nice', nice), ('idle', idle), ('wait', wait), ('hardware_interrupt', hardware_interrupt), ('software_interrupt', software_interrupt), ])
from . import total
from . import user
from . import kernel
from . import nice
from . import idle
from . import wait
from . import hardware_interrupt
from . import software_interrupt
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-system - based on the path /system/cpus/cpu/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data for the system CPU(s)
"""
__slots__ = ('_path_helper', '_extmethods', '__index','__total','__user','__kernel','__nice','__idle','__wait','__hardware_interrupt','__software_interrupt',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__index = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)
self.__total = YANGDynClass(base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__user = YANGDynClass(base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__kernel = YANGDynClass(base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__nice = YANGDynClass(base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__idle = YANGDynClass(base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__wait = YANGDynClass(base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__hardware_interrupt = YANGDynClass(base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
self.__software_interrupt = YANGDynClass(base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['system', 'cpus', 'cpu', 'state']
def _get_index(self):
"""
Getter method for index, mapped from YANG variable /system/cpus/cpu/state/index (union)
YANG Description: The CPU index for each processor core on the system. On a
single-core system, the index should be zero. The ALL
index signifies an aggregation of the CPU utilization
statistics over all cores in the system.
"""
return self.__index
def _set_index(self, v, load=False):
"""
Setter method for index, mapped from YANG variable /system/cpus/cpu/state/index (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_index is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_index() directly.
YANG Description: The CPU index for each processor core on the system. On a
single-core system, the index should be zero. The ALL
index signifies an aggregation of the CPU utilization
statistics over all cores in the system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """index must be of a type compatible with union""",
'defined-type': "openconfig-system:union",
'generated-type': """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)""",
})
self.__index = t
if hasattr(self, '_set'):
self._set()
def _unset_index(self):
self.__index = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ALL': {}},),RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32),], is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='union', is_config=False)
def _get_total(self):
"""
Getter method for total, mapped from YANG variable /system/cpus/cpu/state/total (container)
YANG Description: Total CPU utilization.
"""
return self.__total
def _set_total(self, v, load=False):
"""
Setter method for total, mapped from YANG variable /system/cpus/cpu/state/total (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_total is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_total() directly.
YANG Description: Total CPU utilization.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """total must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__total = t
if hasattr(self, '_set'):
self._set()
def _unset_total(self):
self.__total = YANGDynClass(base=total.total, is_container='container', yang_name="total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_user(self):
"""
Getter method for user, mapped from YANG variable /system/cpus/cpu/state/user (container)
YANG Description: Percentage of CPU time spent running in user space.
"""
return self.__user
def _set_user(self, v, load=False):
"""
Setter method for user, mapped from YANG variable /system/cpus/cpu/state/user (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_user is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_user() directly.
YANG Description: Percentage of CPU time spent running in user space.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """user must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__user = t
if hasattr(self, '_set'):
self._set()
def _unset_user(self):
self.__user = YANGDynClass(base=user.user, is_container='container', yang_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_kernel(self):
"""
Getter method for kernel, mapped from YANG variable /system/cpus/cpu/state/kernel (container)
YANG Description: Percentage of CPU time spent running in kernel space.
"""
return self.__kernel
def _set_kernel(self, v, load=False):
"""
Setter method for kernel, mapped from YANG variable /system/cpus/cpu/state/kernel (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_kernel is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_kernel() directly.
YANG Description: Percentage of CPU time spent running in kernel space.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """kernel must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__kernel = t
if hasattr(self, '_set'):
self._set()
def _unset_kernel(self):
self.__kernel = YANGDynClass(base=kernel.kernel, is_container='container', yang_name="kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_nice(self):
"""
Getter method for nice, mapped from YANG variable /system/cpus/cpu/state/nice (container)
YANG Description: Percentage of CPU time spent running low-priority (niced)
user processes.
"""
return self.__nice
def _set_nice(self, v, load=False):
"""
Setter method for nice, mapped from YANG variable /system/cpus/cpu/state/nice (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_nice is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_nice() directly.
YANG Description: Percentage of CPU time spent running low-priority (niced)
user processes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """nice must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__nice = t
if hasattr(self, '_set'):
self._set()
def _unset_nice(self):
self.__nice = YANGDynClass(base=nice.nice, is_container='container', yang_name="nice", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_idle(self):
"""
Getter method for idle, mapped from YANG variable /system/cpus/cpu/state/idle (container)
YANG Description: Percentage of CPU time spent idle.
"""
return self.__idle
def _set_idle(self, v, load=False):
"""
Setter method for idle, mapped from YANG variable /system/cpus/cpu/state/idle (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_idle is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_idle() directly.
YANG Description: Percentage of CPU time spent idle.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """idle must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__idle = t
if hasattr(self, '_set'):
self._set()
def _unset_idle(self):
self.__idle = YANGDynClass(base=idle.idle, is_container='container', yang_name="idle", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_wait(self):
"""
Getter method for wait, mapped from YANG variable /system/cpus/cpu/state/wait (container)
YANG Description: Percentage of CPU time spent waiting for I/O.
"""
return self.__wait
def _set_wait(self, v, load=False):
"""
Setter method for wait, mapped from YANG variable /system/cpus/cpu/state/wait (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_wait is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_wait() directly.
YANG Description: Percentage of CPU time spent waiting for I/O.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """wait must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__wait = t
if hasattr(self, '_set'):
self._set()
def _unset_wait(self):
self.__wait = YANGDynClass(base=wait.wait, is_container='container', yang_name="wait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_hardware_interrupt(self):
"""
Getter method for hardware_interrupt, mapped from YANG variable /system/cpus/cpu/state/hardware_interrupt (container)
YANG Description: Percentage of CPU time spent servicing hardware interrupts.
"""
return self.__hardware_interrupt
def _set_hardware_interrupt(self, v, load=False):
"""
Setter method for hardware_interrupt, mapped from YANG variable /system/cpus/cpu/state/hardware_interrupt (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_hardware_interrupt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hardware_interrupt() directly.
YANG Description: Percentage of CPU time spent servicing hardware interrupts.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """hardware_interrupt must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__hardware_interrupt = t
if hasattr(self, '_set'):
self._set()
def _unset_hardware_interrupt(self):
self.__hardware_interrupt = YANGDynClass(base=hardware_interrupt.hardware_interrupt, is_container='container', yang_name="hardware-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
def _get_software_interrupt(self):
"""
Getter method for software_interrupt, mapped from YANG variable /system/cpus/cpu/state/software_interrupt (container)
YANG Description: Percentage of CPU time spent servicing software interrupts
"""
return self.__software_interrupt
def _set_software_interrupt(self, v, load=False):
"""
Setter method for software_interrupt, mapped from YANG variable /system/cpus/cpu/state/software_interrupt (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_software_interrupt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_software_interrupt() directly.
YANG Description: Percentage of CPU time spent servicing software interrupts
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """software_interrupt must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)""",
})
self.__software_interrupt = t
if hasattr(self, '_set'):
self._set()
def _unset_software_interrupt(self):
self.__software_interrupt = YANGDynClass(base=software_interrupt.software_interrupt, is_container='container', yang_name="software-interrupt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=False)
index = __builtin__.property(_get_index)
total = __builtin__.property(_get_total)
user = __builtin__.property(_get_user)
kernel = __builtin__.property(_get_kernel)
nice = __builtin__.property(_get_nice)
idle = __builtin__.property(_get_idle)
wait = __builtin__.property(_get_wait)
hardware_interrupt = __builtin__.property(_get_hardware_interrupt)
software_interrupt = __builtin__.property(_get_software_interrupt)
_pyangbind_elements = OrderedDict([('index', index), ('total', total), ('user', user), ('kernel', kernel), ('nice', nice), ('idle', idle), ('wait', wait), ('hardware_interrupt', hardware_interrupt), ('software_interrupt', software_interrupt), ])
| 60.363914 | 575 | 0.728241 | 10,141 | 78,956 | 5.44256 | 0.0213 | 0.04294 | 0.057833 | 0.039135 | 0.992481 | 0.988549 | 0.988549 | 0.988549 | 0.988549 | 0.988549 | 0 | 0.002669 | 0.150565 | 78,956 | 1,307 | 576 | 60.410099 | 0.820272 | 0.218818 | 0 | 0.973202 | 0 | 0.038082 | 0.323727 | 0.088095 | 0 | 0 | 0 | 0 | 0 | 1 | 0.122708 | false | 0 | 0.056417 | 0 | 0.284908 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7e6e597ff215315f28d663901a32908167378b2a | 6,042 | py | Python | src/start_script_mbert.py | EMBEDDIA/cross-lingual_training_for_offensive_language_detection | d050ba86c4a7b321f50395a541c98da0dde6af08 | [
"MIT"
] | 1 | 2021-05-01T11:06:11.000Z | 2021-05-01T11:06:11.000Z | src/start_script_mbert.py | EMBEDDIA/cross-lingual_training_for_offensive_language_detection | d050ba86c4a7b321f50395a541c98da0dde6af08 | [
"MIT"
] | null | null | null | src/start_script_mbert.py | EMBEDDIA/cross-lingual_training_for_offensive_language_detection | d050ba86c4a7b321f50395a541c98da0dde6af08 | [
"MIT"
] | null | null | null | import subprocess
def run():
subprocess.call(["python", "incremental_learning.py",
"--train_data_path", "../data/croatian/cro_train.tsv",
"--test_data_path", "../data/croatian/cro_internal_test.tsv",
"--eval_data_path", "../data/croatian/cro_val.tsv",
"--output_dir", "../models/mbert_croatian1",
"--data_column", "text_a",
"--label_column", "label",
"--config_file", "../models/mbert_en_finetune/config.json",
"--model_file", "../models/mbert_en_finetune/pytorch_model.bin",
"--random_seed", "42"])
subprocess.call(["python", "incremental_learning.py",
"--train_data_path", "../data/slovenian/slo_train.tsv",
"--test_data_path", "../data/slovenian/slo_internal_test.tsv",
"--eval_data_path", "../data/slovenian/slo_val.tsv",
"--output_dir", "../models/mbert_slovenian1",
"--data_column", "data",
"--label_column", "label",
"--config_file", "../models/mbert_en_finetune/config.json",
"--model_file", "../models/mbert_en_finetune/pytorch_model.bin",
"--random_seed", "42"])
subprocess.call(["python", "incremental_learning.py",
"--train_data_path", "../data/arabic/arabic_train.tsv",
"--test_data_path", "../data/arabic/arabic_internal_test.tsv",
"--eval_data_path", "../data/arabic/arabic_val.tsv",
"--output_dir", "../models/mbert_arabic1",
"--data_column", "data",
"--label_column", "label",
"--config_file", "../models/mbert_en_finetune/config.json",
"--model_file", "../models/mbert_en_finetune/pytorch_model.bin",
"--random_seed", "42"])
subprocess.call(["python", "incremental_learning.py",
"--train_data_path", "../data/croatian/cro_train.tsv",
"--test_data_path", "../data/croatian/cro_internal_test.tsv",
"--eval_data_path", "../data/croatian/cro_val.tsv",
"--output_dir", "../models/mbert_croatian2",
"--data_column", "text_a",
"--label_column", "label",
"--config_file", "../models/mbert_en_finetune/config.json",
"--model_file", "../models/mbert_en_finetune/pytorch_model.bin",
"--random_seed", "84"])
subprocess.call(["python", "incremental_learning.py",
"--train_data_path", "../data/slovenian/slo_train.tsv",
"--test_data_path", "../data/slovenian/slo_internal_test.tsv",
"--eval_data_path", "../data/slovenian/slo_val.tsv",
"--output_dir", "../models/mbert_slovenian2",
"--data_column", "data",
"--label_column", "label",
"--config_file", "../models/mbert_en_finetune/config.json",
"--model_file", "../models/mbert_en_finetune/pytorch_model.bin",
"--random_seed", "84"])
subprocess.call(["python", "incremental_learning.py",
"--train_data_path", "../data/arabic/arabic_train.tsv",
"--test_data_path", "../data/arabic/arabic_internal_test.tsv",
"--eval_data_path", "../data/arabic/arabic_val.tsv",
"--output_dir", "../models/mbert_arabic2",
"--data_column", "data",
"--label_column", "label",
"--config_file", "../models/mbert_en_finetune/config.json",
"--model_file", "../models/mbert_en_finetune/pytorch_model.bin",
"--random_seed", "84"])
subprocess.call(["python", "incremental_learning.py",
"--train_data_path", "../data/croatian/cro_train.tsv",
"--test_data_path", "../data/croatian/cro_internal_test.tsv",
"--eval_data_path", "../data/croatian/cro_val.tsv",
"--output_dir", "../models/mbert_croatian3",
"--data_column", "text_a",
"--label_column", "label",
"--config_file", "../models/mbert_en_finetune/config.json",
"--model_file", "../models/mbert_en_finetune/pytorch_model.bin",
"--random_seed", "126"])
subprocess.call(["python", "incremental_learning.py",
"--train_data_path", "../data/slovenian/slo_train.tsv",
"--test_data_path", "../data/slovenian/slo_internal_test.tsv",
"--eval_data_path", "../data/slovenian/slo_val.tsv",
"--output_dir", "../models/mbert_slovenian3",
"--data_column", "data",
"--label_column", "label",
"--config_file", "../models/mbert_en_finetune/config.json",
"--model_file", "../models/mbert_en_finetune/pytorch_model.bin",
"--random_seed", "126"])
subprocess.call(["python", "incremental_learning.py",
"--train_data_path", "../data/arabic/arabic_train.tsv",
"--test_data_path", "../data/arabic/arabic_internal_test.tsv",
"--eval_data_path", "../data/arabic/arabic_val.tsv",
"--output_dir", "../models/mbert_arabic3",
"--data_column", "data",
"--label_column", "label",
"--config_file", "../models/mbert_en_finetune/config.json",
"--model_file", "../models/mbert_en_finetune/pytorch_model.bin",
"--random_seed", "126"])
if __name__ == "__main__":
run() | 61.653061 | 85 | 0.505627 | 569 | 6,042 | 4.98594 | 0.096661 | 0.076137 | 0.114205 | 0.10786 | 0.960169 | 0.960169 | 0.960169 | 0.960169 | 0.960169 | 0.960169 | 0 | 0.007285 | 0.318438 | 6,042 | 98 | 86 | 61.653061 | 0.681642 | 0 | 0 | 0.861702 | 0 | 0 | 0.557836 | 0.342049 | 0 | 0 | 0 | 0 | 0 | 1 | 0.010638 | true | 0 | 0.010638 | 0 | 0.021277 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0e4cfda9d9abc322e3500df5c5f4ec20d037ed05 | 40 | py | Python | __init__.py | linklab/link_rl | e3d3196dcd49fd71b45941e07fc0d8a27d1d8c99 | [
"MIT"
] | null | null | null | __init__.py | linklab/link_rl | e3d3196dcd49fd71b45941e07fc0d8a27d1d8c99 | [
"MIT"
] | null | null | null | __init__.py | linklab/link_rl | e3d3196dcd49fd71b45941e07fc0d8a27d1d8c99 | [
"MIT"
] | 1 | 2021-11-23T12:30:37.000Z | 2021-11-23T12:30:37.000Z | from . import common
from . import codes | 20 | 20 | 0.775 | 6 | 40 | 5.166667 | 0.666667 | 0.645161 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.175 | 40 | 2 | 21 | 20 | 0.939394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0e5cf7416decbba28f371c0ebab4ba3f14ff5e75 | 95 | py | Python | gym-example/gym_example/envs/__init__.py | DerwenAI/gym_example | d6f97de9f751bb2ae04e724ec5d223cbb5ed2290 | [
"MIT"
] | 16 | 2021-01-02T02:36:29.000Z | 2022-01-25T14:20:56.000Z | gym-example/gym_example/envs/__init__.py | DerwenAI/gym_example | d6f97de9f751bb2ae04e724ec5d223cbb5ed2290 | [
"MIT"
] | 2 | 2020-12-08T21:52:55.000Z | 2022-01-02T23:25:50.000Z | gym-example/gym_example/envs/__init__.py | DerwenAI/gym_example | d6f97de9f751bb2ae04e724ec5d223cbb5ed2290 | [
"MIT"
] | 12 | 2020-10-11T08:40:20.000Z | 2022-02-20T23:03:21.000Z | from gym_example.envs.example_env import Example_v0
from gym_example.envs.fail1 import Fail_v1
| 31.666667 | 51 | 0.873684 | 17 | 95 | 4.588235 | 0.588235 | 0.179487 | 0.358974 | 0.461538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034483 | 0.084211 | 95 | 2 | 52 | 47.5 | 0.862069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
7d2b0bc3810ad9480a8a04e94e45f35e3fa27817 | 2,899 | py | Python | healthtools_ec/surgeons.py | CodeForAfrica/healthtools-ezolwaluko | 21fac3c05e15c0b1492bb4f9004bc0d9e1d8392b | [
"Apache-2.0"
] | null | null | null | healthtools_ec/surgeons.py | CodeForAfrica/healthtools-ezolwaluko | 21fac3c05e15c0b1492bb4f9004bc0d9e1d8392b | [
"Apache-2.0"
] | 1 | 2022-03-22T07:30:29.000Z | 2022-03-22T07:30:29.000Z | healthtools_ec/surgeons.py | CodeForAfrica/healthtools-ezolwaluko | 21fac3c05e15c0b1492bb4f9004bc0d9e1d8392b | [
"Apache-2.0"
] | 1 | 2018-11-24T20:47:56.000Z | 2018-11-24T20:47:56.000Z | from flask import flash, make_response, render_template, request, session
from healthtools_ec.app import app
from .helpers import email_register, get_locale_extension
from .models import RegisterSurgeon, db
from .models.surgeons import RegisterForm
@app.route("/register", methods=["GET", "POST"])
def surgeons_register():
form = RegisterForm(request.form)
status = 200
if request.method == "POST":
if form.validate():
surgeon = RegisterSurgeon()
with db.session.no_autoflush:
form.populate_obj(surgeon)
db.session.add(surgeon)
db.session.commit()
response = email_register(surgeon)
print(response)
template_locale = get_locale_extension(session["lang"])
return render_template(
f"surgeons/registersurgeonredirect{template_locale}.html"
)
else:
if session["lang"]:
flash(
"Nceda ulungise ezi ngxaki zingezantsi kwaye uzame kwakhona.",
"warning",
)
else:
flash("Please correct the problems below and try again.", "warning")
template_locale = get_locale_extension(session["lang"])
resp = make_response(
render_template(f"surgeons/surgeons{template_locale}.html", form=form)
)
return (
resp,
status,
# ensure the browser refreshes the page when Back is pressed
{"Cache-Control": "no-cache, no-store, must-revalidate"},
)
@app.route("/register-mobi", methods=["GET", "POST"])
def surgeons_register_mobi():
form = RegisterForm(request.form)
status = 200
if request.method == "POST":
if form.validate():
surgeon = RegisterSurgeon()
with db.session.no_autoflush:
form.populate_obj(surgeon)
db.session.add(surgeon)
db.session.commit()
response = email_register(surgeon)
print(response)
template_locale = get_locale_extension(session["lang"])
return render_template(
f"mobile/surgeons/registersurgeonredirect{template_locale}.html"
)
else:
if session["lang"]:
flash(
"Nceda ulungise ezi ngxaki zingezantsi kwaye uzame kwakhona.",
"warning",
)
else:
flash("Please correct the problems below and try again.", "warning")
template_locale = get_locale_extension(session["lang"])
resp = make_response(
render_template(f"mobile/surgeons/surgeons{template_locale}.html", form=form)
)
return (
resp,
status,
# ensure the browser refreshes the page when Back is pressed
{"Cache-Control": "no-cache, no-store, must-revalidate"},
)
| 34.511905 | 85 | 0.595033 | 294 | 2,899 | 5.741497 | 0.278912 | 0.066351 | 0.053318 | 0.054502 | 0.854265 | 0.847156 | 0.808057 | 0.808057 | 0.808057 | 0.808057 | 0 | 0.00299 | 0.307692 | 2,899 | 83 | 86 | 34.927711 | 0.838067 | 0.040359 | 0 | 0.712329 | 0 | 0 | 0.218424 | 0.071968 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027397 | false | 0 | 0.068493 | 0 | 0.150685 | 0.027397 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
70b3f5b4ae84f1f14674b07baa074a448901244b | 42,328 | py | Python | declaraciones/declaracion/models/activos.py | gob-cdmx/declaraciones | 90347c1572fa5b8137c5e0d23e6a7c6b2a0b2311 | [
"MIT"
] | 2 | 2019-10-17T02:40:12.000Z | 2019-10-17T22:51:36.000Z | declaraciones/declaracion/models/activos.py | gob-cdmx/declaraciones | 90347c1572fa5b8137c5e0d23e6a7c6b2a0b2311 | [
"MIT"
] | 1 | 2019-10-02T20:23:12.000Z | 2019-10-02T20:23:12.000Z | declaraciones/declaracion/models/activos.py | gob-cdmx/declaraciones | 90347c1572fa5b8137c5e0d23e6a7c6b2a0b2311 | [
"MIT"
] | 4 | 2019-08-20T21:16:04.000Z | 2021-07-01T03:08:10.000Z | from django.db import models
from django.urls import reverse_lazy
from .informacion_personal import(Declaraciones, Domicilios, Observaciones,
InfoPersonalVar)
from .catalogos import (CatTiposInmuebles, CatTiposTitulares,
CatFormasAdquisiciones, CatSectoresIndustria,
CatMonedas, CatTiposOperaciones, CatTiposMuebles,
CatPaises, CatEntidadesFederativas,
CatTiposEspecificosInversiones, CatTiposInversiones,
CatTiposMetales, CatTiposFideicomisos,
CatTiposRelacionesPersonales, CatUnidadesTemporales, CatActivoBien,
CatTipoParticipacion, CatEntesPublicos)
class ActivosBienes(models.Model):
BIENES_INMUEBLES = 1
BIENES_INTANGIBLES = 2
BIENES_MUEBLES = 3
MUEBLES_NO_REGISTRABLES = 4
FIDEICOMISOS = 5
CUENTAS_POR_COBRAR = 6
declaraciones = models.ForeignKey(Declaraciones, on_delete=models.DO_NOTHING)
id_activobien = models.IntegerField(null=True)
cat_activo_bien = models.ForeignKey(CatActivoBien, on_delete=models.DO_NOTHING, null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class BienesPersonas(models.Model):
COVENDEDOR = 1
COPROPIETARIO = 2
FIDEICOMITENTE = 3
FIDEICOMISARIO = 4
FIDUCIARIO = 5
PRESTATARIO_O_DEUDOR = 6
DECLARANTE = 7
COPROPIETARIO = 8
PROPIETARIO_ANTERIOR = 9
info_personal_var = models.ForeignKey(InfoPersonalVar, on_delete=models.DO_NOTHING, related_name="bienes_personas_info_personal_var")
activos_bienes = models.ForeignKey(ActivosBienes, on_delete=models.DO_NOTHING)
porcentaje = models.DecimalField(max_digits=5, decimal_places=2, null=True, blank=True)
es_propietario = models.BooleanField(blank=True, null=True, default=None)
precio_adquision = models.DecimalField(max_digits=13, decimal_places=2, null=True, blank=True)
el_adquirio = models.BooleanField(blank=True, null=True, default=None)
cat_tipo_participacion = models.ForeignKey(CatTipoParticipacion, on_delete=models.DO_NOTHING)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
tipo_relacion = models.ForeignKey(CatTiposRelacionesPersonales, on_delete=models.DO_NOTHING, blank=True, null=True)
otra_relacion = models.CharField(max_length=255, blank=True, null=True)
otra_relacion_familiar = models.CharField(max_length=255, blank=True)
otra_persona = models.ForeignKey(InfoPersonalVar, on_delete=models.DO_NOTHING, blank=True, null=True, related_name="bienes_personas_otra_persona")
def tipo(self):
return self.cat_tipo_participacion_id
def relacion(self):
try:
if self.tipo_relacion.default:
return u"{} {}".format(self.tipo_relacion,
self.otra_relacion)
else:
return u"{}".format(self.tipo_relacion)
except Exception as e:
return u""
class BienesInmuebles(models.Model):
superficie_terreno = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
superficie_construccion = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
otro_titular = models.CharField(max_length=255, blank=True)
num_escritura_publica = models.CharField(max_length=255, blank=True)
num_registro_publico = models.CharField(max_length=255, blank=True)
folio_real = models.CharField(max_length=255, blank=True)
fecha_contrato_compra = models.DateField(null=True, blank=True)
otra_forma = models.CharField(max_length=255, blank=True)
fecha_adquisicion = models.DateField(null=True, blank=True)
precio_adquisicion = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
valor_catastral = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
cat_formas_adquisiciones = models.ForeignKey(CatFormasAdquisiciones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_monedas = models.ForeignKey(CatMonedas, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_inmuebles = models.ForeignKey(CatTiposInmuebles, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_operaciones = models.ForeignKey(CatTiposOperaciones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_titulares = models.ForeignKey(CatTiposTitulares, on_delete=models.DO_NOTHING, null=True, blank=True)
declaraciones = models.ForeignKey(Declaraciones, on_delete=models.DO_NOTHING)
domicilios = models.ForeignKey(Domicilios, on_delete=models.DO_NOTHING)
observaciones = models.ForeignKey(Observaciones, on_delete=models.DO_NOTHING)
activos_bienes = models.ForeignKey(ActivosBienes, on_delete=models.DO_NOTHING)
otra_operacion = models.CharField(max_length=255, blank=True, null=True)
otro_inmueble = models.CharField(max_length=255, blank=True, null=True)
def persona(self):
try:
return BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.COPROPIETARIO).first()
except Exception as e:
return None
def copropietario(self):
try:
return BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.COPROPIETARIO)
except Exception as e:
return None
def declarante(self):
try:
return [BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.DECLARANTE).first().info_personal_var]
except Exception as e:
return None
def propierario_anterior(self):
try:
return BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.PROPIETARIO_ANTERIOR)
except Exception as e:
return None
def observacion(self):
return [self.observaciones]
def columna_uno(self):
if self.cat_tipos_operaciones:
return u"{}".format(self.cat_tipos_operaciones)
else:
return u""
def columna_dos(self):
if self.cat_formas_adquisiciones:
return u"{}".format(self.cat_formas_adquisiciones)
else:
return u""
def columna_tres(self):
if self.cat_tipos_titulares:
return u"{}".format(self.cat_tipos_titulares)
else:
return u""
def url_editar(self):
return reverse_lazy('declaracion:bienes-inmuebles-editar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_borrar(self):
return reverse_lazy('declaracion:bienes-inmuebles-borrar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_todos(self):
return reverse_lazy('declaracion:bienes-inmuebles',
kwargs={'folio': self.declaraciones.folio})
def tipo_operacion(self):
try:
if self.cat_tipos_operaciones.default:
return u"{} {}".format(self.cat_tipos_operaciones,
self.otra_operacion)
else:
return u"{}".format(self.cat_tipos_operaciones)
except Exception as e:
return u""
def tipo_inmueble(self):
try:
if self.cat_tipos_inmuebles.default:
return u"{} {}".format(self.cat_tipos_inmuebles,
self.otro_inmueble)
else:
return u"{}".format(self.cat_tipos_inmuebles)
except Exception as e:
return u""
def titular(self):
try:
if self.cat_tipos_titulares.default:
return u"{} {}".format(self.cat_tipos_titulares,
self.otro_titular)
else:
return u"{}".format(self.cat_tipos_titulares)
except Exception as e:
return u""
def forma_adquisicion(self):
try:
if self.cat_formas_adquisiciones.default:
return u"{} {}".format(self.cat_formas_adquisiciones,
self.otra_forma)
else:
return u"{}".format(self.cat_formas_adquisiciones)
except Exception as e:
return u""
class BienesMuebles(models.Model):
otra_operacion = models.CharField(max_length=255, blank=True)
otro_tipo_mueble = models.CharField(max_length=255, blank=True)
marca = models.CharField(max_length=255, blank=True)
submarca = models.CharField(max_length=255, blank=True)
modelo = models.IntegerField(blank=True, null=True)
num_serie = models.CharField(max_length=255, blank=True)
otro_titular = models.CharField(max_length=255, blank=True)
num_registro_vehicular = models.CharField(max_length=255, blank=True)
otra_forma = models.CharField(max_length=255, blank=True)
otro_sector = models.CharField(max_length=255, blank=True)
fecha_adquisicion = models.DateField(null=True, blank=True)
precio_adquisicion = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
cat_entidades_federativas = models.ForeignKey(CatEntidadesFederativas, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_formas_adquisiciones = models.ForeignKey(CatFormasAdquisiciones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_monedas = models.ForeignKey(CatMonedas, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_paises = models.ForeignKey(CatPaises, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_muebles = models.ForeignKey(CatTiposMuebles, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_operaciones = models.ForeignKey(CatTiposOperaciones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_titulares = models.ForeignKey(CatTiposTitulares, on_delete=models.DO_NOTHING, null=True, blank=True)
declaraciones = models.ForeignKey(Declaraciones, on_delete=models.DO_NOTHING)
observaciones = models.ForeignKey(Observaciones, on_delete=models.DO_NOTHING)
activos_bienes = models.ForeignKey(ActivosBienes, on_delete=models.DO_NOTHING)
def declarante(self):
try:
return [BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.DECLARANTE).first().info_personal_var]
except Exception as e:
return None
def copropietario(self):
try:
return BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.COPROPIETARIO)
except Exception as e:
print(e)
return None
def propierario_anterior(self):
try:
return BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.PROPIETARIO_ANTERIOR)
except Exception as e:
return None
def observacion(self):
return [self.observaciones]
def columna_uno(self):
if self.cat_tipos_operaciones:
return u"{}".format(self.cat_tipos_operaciones)
else:
return u""
def columna_dos(self):
if self.cat_formas_adquisiciones:
return u"{}".format(self.cat_formas_adquisiciones)
else:
return u""
def columna_tres(self):
if self.cat_tipos_titulares:
return u"{}".format(self.cat_tipos_titulares)
else:
return u""
def url_editar(self):
return reverse_lazy('declaracion:bienes-muebles-editar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_borrar(self):
return reverse_lazy('declaracion:bienes-muebles-borrar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_todos(self):
return reverse_lazy('declaracion:bienes-muebles',
kwargs={'folio': self.declaraciones.folio})
def tipo_operacion(self):
try:
if self.cat_tipos_operaciones.default:
return u"{} {}".format(self.cat_tipos_operaciones,
self.otra_operacion)
else:
return u"{}".format(self.cat_tipos_operaciones)
except Exception as e:
return u""
def tipo_mueble(self):
try:
if self.cat_tipos_muebles.default:
return u"{} {}".format(self.cat_tipos_muebles,
self.otro_tipo_mueble)
else:
return u"{}".format(self.cat_tipos_muebles)
except Exception as e:
return u""
def titular(self):
try:
if self.cat_tipos_titulares.default:
return u"{} {}".format(self.cat_tipos_titulares,
self.otro_titular)
else:
return u"{}".format(self.cat_tipos_titulares)
except Exception as e:
return u""
def forma_adquisicion(self):
try:
if self.cat_formas_adquisiciones.default:
return u"{} {}".format(self.cat_formas_adquisiciones,
self.otra_forma)
else:
return u"{}".format(self.cat_formas_adquisiciones)
except Exception as e:
return u""
class MueblesNoRegistrables(models.Model):
otra_operacion = models.CharField(max_length=255, blank=True)
otro_bien_mueble = models.CharField(max_length=255, blank=True)
descripcion_bien = models.CharField(max_length=255, blank=True)
otro_titular = models.CharField(max_length=255, blank=True)
otra_forma = models.CharField(max_length=255, blank=True)
fecha_adquisicion = models.DateField(null=True, blank=True)
precio_adquisicion = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
cat_formas_adquisiciones = models.ForeignKey(CatFormasAdquisiciones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_monedas = models.ForeignKey(CatMonedas, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_muebles = models.ForeignKey(CatTiposMuebles, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_operaciones = models.ForeignKey(CatTiposOperaciones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_titulares = models.ForeignKey(CatTiposTitulares, on_delete=models.DO_NOTHING, null=True, blank=True)
declaraciones = models.ForeignKey(Declaraciones, on_delete=models.DO_NOTHING)
observaciones = models.ForeignKey(Observaciones, on_delete=models.DO_NOTHING)
activos_bienes = models.ForeignKey(ActivosBienes, on_delete=models.DO_NOTHING)
def declarante(self):
try:
return [BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.DECLARANTE).first().info_personal_var]
except Exception as e:
return None
def copropietario(self):
try:
return BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.COPROPIETARIO)
except Exception as e:
return None
def propierario_anterior(self):
try:
return BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.PROPIETARIO_ANTERIOR)
except Exception as e:
return None
def observacion(self):
return [self.observaciones]
def columna_uno(self):
if self.cat_tipos_operaciones:
return u"{}".format(self.cat_tipos_operaciones)
else:
return u""
def columna_dos(self):
if self.cat_formas_adquisiciones:
return u"{}".format(self.cat_formas_adquisiciones)
else:
return u""
def columna_tres(self):
if self.cat_tipos_titulares:
return u"{}".format(self.cat_tipos_titulares)
else:
return u""
def url_editar(self):
return reverse_lazy('declaracion:muebles-noregistrables-editar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_borrar(self):
return reverse_lazy('declaracion:muebles-noregistrables-borrar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_todos(self):
return reverse_lazy('declaracion:muebles-noregistrables',
kwargs={'folio': self.declaraciones.folio})
def tipo_operacion(self):
try:
if self.cat_tipos_operaciones.default:
return u"{} {}".format(self.cat_tipos_operaciones,
self.otra_operacion)
else:
return u"{}".format(self.cat_tipos_operaciones)
except Exception as e:
return u""
def tipo_mueble(self):
try:
if self.cat_tipos_muebles.default:
return u"{} {}".format(self.cat_tipos_muebles,
self.otro_bien_mueble)
else:
return u"{}".format(self.cat_tipos_muebles)
except Exception as e:
return u""
def titular(self):
try:
if self.cat_tipos_titulares.default:
return u"{} {}".format(self.cat_tipos_titulares,
self.otro_titular)
else:
return u"{}".format(self.cat_tipos_titulares)
except Exception as e:
return u""
def forma_adquisicion(self):
try:
if self.cat_formas_adquisiciones.default:
return u"{} {}".format(self.cat_formas_adquisiciones,
self.otra_forma)
else:
return u"{}".format(self.cat_formas_adquisiciones)
except Exception as e:
return u""
class Inversiones(models.Model):
otra_operacion = models.CharField(max_length=255, blank=True)
otra_inversion = models.CharField(max_length=255, blank=True)
otro_tipo_especifico = models.CharField(max_length=255, blank=True)
num_cuenta = models.CharField(max_length=255, blank=True)
otra_forma = models.CharField(max_length=255, blank=True)
fecha_inicio = models.DateField(null=True, blank=True)
monto_original = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
tasa_interes = models.DecimalField(max_digits=5, decimal_places=2, null=True, blank=True)
saldo_actual = models.DecimalField(max_digits=10, decimal_places=2, null=True, blank=True)
plazo = models.DecimalField(max_digits=6, decimal_places=2, null=True, blank=True, default=0)
cat_tipos_titulares = models.ForeignKey(CatTiposTitulares, on_delete=models.DO_NOTHING, null=True, blank=True)
otro_tipo_titular = models.CharField(max_length=255, blank=True)
porcentaje_inversion = models.DecimalField(max_digits=5, decimal_places=2, null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
cat_formas_adquisiciones = models.ForeignKey(CatFormasAdquisiciones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_monedas = models.ForeignKey(CatMonedas, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_paises = models.ForeignKey(CatPaises, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_especificos_inversiones = models.ForeignKey(CatTiposEspecificosInversiones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_inversiones = models.ForeignKey(CatTiposInversiones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_operaciones = models.ForeignKey(CatTiposOperaciones, on_delete=models.DO_NOTHING, null=True, blank=True)
declaraciones = models.ForeignKey(Declaraciones, on_delete=models.DO_NOTHING)
observaciones = models.ForeignKey(Observaciones, on_delete=models.DO_NOTHING)
info_personal_var = models.ForeignKey(InfoPersonalVar, on_delete=models.DO_NOTHING)
cat_unidades_temporales = models.ForeignKey(CatUnidadesTemporales, on_delete=models.DO_NOTHING, null=True, blank=True)
def observacion(self):
return [self.observaciones]
def columna_uno(self):
if self.cat_tipos_operaciones:
return u"{}".format(self.cat_tipos_operaciones)
else:
return u""
def columna_dos(self):
if self.cat_formas_adquisiciones:
return u"{}".format(self.cat_formas_adquisiciones)
else: return u""
def columna_tres(self):
if self.cat_tipos_titulares:
return u"{}".format(self.cat_tipos_titulares)
else:
return u""
def url_editar(self):
return reverse_lazy('declaracion:inversiones-editar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_borrar(self):
return reverse_lazy('declaracion:inversiones-borrar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_todos(self):
return reverse_lazy('declaracion:inversiones',
kwargs={'folio': self.declaraciones.folio})
def persona(self):
return [self.info_personal_var]
def tipo_operacion(self):
try:
if self.cat_tipos_operaciones.default:
return u"{} {}".format(self.cat_tipos_operaciones,
self.otra_operacion)
else:
return u"{}".format(self.cat_tipos_operaciones)
except Exception as e:
return u""
def titular(self):
try:
if self.cat_tipos_titulares.default:
return u"{} {}".format(self.cat_tipos_titulares,
self.otro_tipo_titular)
else:
return u"{}".format(self.cat_tipos_titulares)
except Exception as e:
return u""
def forma_adquisicion(self):
try:
if self.cat_formas_adquisiciones.default:
return u"{} {}".format(self.cat_formas_adquisiciones,
self.otra_forma)
else:
return u"{}".format(self.cat_formas_adquisiciones)
except Exception as e:
return u""
def tipo_inversion(self):
try:
if self.cat_tipos_inversiones.default:
return u"{} {}".format(self.cat_tipos_inversiones,
self.otra_inversion)
else:
return u"{}".format(self.cat_tipos_inversiones)
except Exception as e:
return u""
def tipo_especifico(self):
try:
if self.cat_tipos_especificos_inversiones.default:
return u"{} {}".format(self.cat_tipos_especificos_inversiones,
self.otro_tipo_especifico)
else:
return u"{}".format(self.cat_tipos_especificos_inversiones)
except Exception as e:
return u""
class EfectivoMetales(models.Model):
otro_tipo_operacion = models.CharField(max_length=255, blank=True)
monto_efectivo = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
otro_metal = models.CharField(max_length=255, blank=True)
unidades = models.CharField(max_length=255, blank=True)
monto_metales = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
otra_forma = models.CharField(max_length=255, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
cat_formas_adquisiciones = models.ForeignKey(CatFormasAdquisiciones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_monedas = models.ForeignKey(CatMonedas, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_metales = models.ForeignKey(CatTiposMetales, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_operaciones = models.ForeignKey(CatTiposOperaciones, on_delete=models.DO_NOTHING, null=True, blank=True)
declaraciones = models.ForeignKey(Declaraciones, on_delete=models.DO_NOTHING)
observaciones = models.ForeignKey(Observaciones, on_delete=models.DO_NOTHING)
def observacion(self):
return [self.observaciones]
def columna_uno(self):
if self.cat_tipos_operaciones:
return u"{}".format(self.cat_tipos_operaciones)
else:
return u""
def columna_dos(self):
if self.cat_formas_adquisiciones:
return u"{}".format(self.cat_formas_adquisiciones)
else:
return u""
def columna_tres(self):
if self.cat_tipos_metales:
return u"{}".format(self.cat_tipos_metales)
else:
return u""
def url_editar(self):
return reverse_lazy('declaracion:efectivo-metales-editar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_borrar(self):
return reverse_lazy('declaracion:efectivo-metales-borrar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_todos(self):
return reverse_lazy('declaracion:efectivo-metales',
kwargs={'folio': self.declaraciones.folio})
def tipo_operacion(self):
try:
if self.cat_tipos_operaciones.default:
return u"{} {}".format(self.cat_tipos_operaciones,
self.otro_tipo_operacion)
else:
return u"{}".format(self.cat_tipos_operaciones)
except Exception as e:
return u""
def tipo_metal(self):
try:
if self.cat_tipos_metales.default:
return u"{} {}".format(self.cat_tipos_metales,
self.otro_metal)
else:
return u"{}".format(self.cat_tipos_metales)
except Exception as e:
return u""
def forma_adquisicion(self):
try:
if self.cat_formas_adquisiciones.default:
return u"{} {}".format(self.cat_formas_adquisiciones,
self.otra_forma)
else:
return u"{}".format(self.cat_formas_adquisiciones)
except Exception as e:
return u""
class Fideicomisos(models.Model):
nombre_fideicomiso = models.CharField(max_length=255, blank=True)
otra_operacion = models.CharField(max_length=255, blank=True)
otro_fideicomiso = models.CharField(max_length=255, blank=True)
objetivo_fideicomiso = models.CharField(max_length=255, blank=True)
num_registro = models.CharField(max_length=255, blank=True)
fecha_creacion = models.DateField(null=True, blank=True)
plazo_vigencia = models.CharField(max_length=255, blank=True)
valor_fideicomiso = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
ingreso_monetario = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
porcentaje = models.DecimalField(max_digits=5, decimal_places=2, null=True, blank=True)
institucion_fiduciaria = models.CharField(max_length=255, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
cat_monedas = models.ForeignKey(CatMonedas, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_paises = models.ForeignKey(CatPaises, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_fideicomisos = models.ForeignKey(CatTiposFideicomisos, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_operaciones = models.ForeignKey(CatTiposOperaciones, on_delete=models.DO_NOTHING, null=True, blank=True)
declaraciones = models.ForeignKey(Declaraciones, on_delete=models.DO_NOTHING)
observaciones = models.ForeignKey(Observaciones, on_delete=models.DO_NOTHING)
activos_bienes = models.ForeignKey(ActivosBienes, on_delete=models.DO_NOTHING)
def fideicomitente(self):
try:
return [o.otra_persona for o in BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.FIDEICOMITENTE)]
except Exception as e:
return None
def fideicomisario(self):
try:
return [o.otra_persona for o in BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.FIDEICOMISARIO)]
except Exception as e:
return None
def fiduciario(self):
try:
return [o.otra_persona for o in BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.FIDUCIARIO)]
except Exception as e:
return None
def observacion(self):
return [self.observaciones]
def porcentajes(self):
try:
return [BienesPersonas.objects.filter(
activos_bienes=self.activos_bienes,
cat_tipo_participacion_id=BienesPersonas.DECLARANTE,
).first().porcentaje]
except:
None
def columna_uno(self):
if self.cat_tipos_operaciones:
return u"{}".format(self.cat_tipos_operaciones)
else:
return u""
def columna_dos(self):
if self.cat_tipos_fideicomisos:
return u"{}".format(self.cat_tipos_fideicomisos)
else:
return u""
def columna_tres(self):
if self.nombre_fideicomiso:
return u"{}".format(self.nombre_fideicomiso)
else:
return u""
def url_editar(self):
return reverse_lazy('declaracion:fideicomisos-editar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_borrar(self):
return reverse_lazy('declaracion:fideicomisos-borrar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_todos(self):
return reverse_lazy('declaracion:fideicomisos',
kwargs={'folio': self.declaraciones.folio})
def tipo_operacion(self):
try:
if self.cat_tipos_operaciones.default:
return u"{} {}".format(self.cat_tipos_operaciones,
self.otra_operacion)
else:
return u"{}".format(self.cat_tipos_operaciones)
except Exception as e:
return u""
def tipo_fideicomiso(self):
try:
if self.cat_tipos_fideicomisos.default:
return u"{} {}".format(self.cat_tipos_fideicomisos,
self.otro_fideicomiso)
else:
return u"{}".format(self.cat_tipos_fideicomisos)
except Exception as e:
return u""
class BienesIntangibles(models.Model):
otra_operacion = models.CharField(max_length=255, blank=True)
descripcion = models.CharField(max_length=255, blank=True)
otra_dependencia = models.CharField(max_length=255, blank=True)
num_registro = models.CharField(max_length=255, blank=True)
fecha_registro = models.DateField(null=True, blank=True)
otro_sector = models.CharField(max_length=255, blank=True)
precio_adquisicion = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
otra_forma = models.CharField(max_length=255, blank=True)
fecha_vencimiento = models.DateField(null=True, blank=True)
activos_bienes = models.ForeignKey(ActivosBienes, on_delete=models.DO_NOTHING)
precio_total_adquisicion = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
cat_formas_adquisiciones = models.ForeignKey(CatFormasAdquisiciones, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_monedas = models.ForeignKey(CatMonedas, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_sectores_industria = models.ForeignKey(CatSectoresIndustria, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_operaciones = models.ForeignKey(CatTiposOperaciones, on_delete=models.DO_NOTHING, null=True, blank=True)
declaraciones = models.ForeignKey(Declaraciones, on_delete=models.DO_NOTHING)
observaciones = models.ForeignKey(Observaciones, on_delete=models.DO_NOTHING)
cat_entes_publicos = models.ForeignKey(CatEntesPublicos, on_delete=models.DO_NOTHING, null=True, blank=True)
otro_ente = models.CharField(max_length=255, blank=True, null=True)
def observacion(self):
return [self.observaciones]
def columna_uno(self):
if self.cat_tipos_operaciones:
return u"{}".format(self.cat_tipos_operaciones)
else:
return u""
def columna_dos(self):
if self.cat_formas_adquisiciones:
return u"{}".format(self.cat_formas_adquisiciones)
else:
return u""
def columna_tres(self):
if self.cat_sectores_industria:
return u"{}".format(self.cat_sectores_industria)
else:
return u""
def url_editar(self):
return reverse_lazy('declaracion:bienes-intangibles-editar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_borrar(self):
return reverse_lazy('declaracion:bienes-intangibles-borrar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_todos(self):
return reverse_lazy('declaracion:bienes-intangibles',
kwargs={'folio': self.declaraciones.folio})
def tipo_operacion(self):
try:
if self.cat_tipos_operaciones.default:
return u"{} {}".format(self.cat_tipos_operaciones,
self.otra_operacion)
else:
return u"{}".format(self.cat_tipos_operaciones)
except Exception as e:
return u""
def sectores_industrias(self):
try:
if self.cat_sectores_industria.default:
return u"{} {}".format(self.cat_sectores_industria,
self.otro_sector)
else:
return u"{}".format(self.cat_sectores_industria)
except Exception as e:
return u""
def forma_adquisicion(self):
try:
if self.cat_formas_adquisiciones.default:
return u"{} {}".format(self.cat_formas_adquisiciones,
self.otra_forma)
else:
return u"{}".format(self.cat_formas_adquisiciones)
except Exception as e:
return u""
def copropietario(self):
try:
return BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.COPROPIETARIO)
except Exception as e:
return None
class CuentasPorCobrar(models.Model):
fecha_prestamo = models.DateField(null=True, blank=True)
monto_original = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
tasa_interes = models.DecimalField(max_digits=5, decimal_places=2, null=True, blank=True)
num_registro = models.CharField(max_length=255, blank=True)
saldo_pendiente = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
fecha_vencimiento = models.DateField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
cat_monedas = models.ForeignKey(CatMonedas, on_delete=models.DO_NOTHING, null=True, blank=True)
declaraciones = models.ForeignKey(Declaraciones, on_delete=models.DO_NOTHING)
observaciones = models.ForeignKey(Observaciones, on_delete=models.DO_NOTHING)
info_personal_var = models.ForeignKey(InfoPersonalVar, on_delete=models.DO_NOTHING, blank=True, null=True)
activos_bienes = models.ForeignKey(ActivosBienes, on_delete=models.DO_NOTHING, blank=True, null=True)
def observacion(self):
return [self.observaciones]
def columna_uno(self):
try:
if self.info_personal_var.es_fisica:
return u"{} {} {}".format(
self.info_personal_var.nombres,
self.info_personal_var.apellido1,
self.info_personal_var.apellido2,
)
else:
return u"{}".format(self.info_personal_var.razon_social)
except Exception as e:
return u""
def columna_dos(self):
if self.monto_original:
return u"{}".format(self.monto_original)
else:
return u""
def columna_tres(self):
if self.saldo_pendiente:
return u"{}".format(self.saldo_pendiente)
else:
return u""
def url_editar(self):
return reverse_lazy('declaracion:cuentas-por-cobrar-editar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_borrar(self):
return reverse_lazy('declaracion:cuentas-por-cobrar-borrar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_todos(self):
return reverse_lazy('declaracion:cuentas-por-cobrar',
kwargs={'folio': self.declaraciones.folio})
def prestatario(self):
try:
return BienesPersonas.objects.filter(activos_bienes = self.activos_bienes,cat_tipo_participacion_id=BienesPersonas.PRESTATARIO_O_DEUDOR)
except Exception as e:
return None
class BeneficiosEspecie(models.Model):
tipo_bien_servicio = models.CharField(max_length=255, blank=True)
valor_mercado = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
otro_familiar = models.CharField(max_length=255, blank=True)
otra_relacion = models.CharField(max_length=255, blank=True)
otra_relacion_familiar = models.CharField(max_length=255, blank=True)
fecha_inicio = models.DateField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
cat_sectores_industria = models.ForeignKey(CatSectoresIndustria, on_delete=models.DO_NOTHING, null=True, blank=True)
cat_tipos_relaciones_personales = models.ForeignKey(CatTiposRelacionesPersonales, on_delete=models.DO_NOTHING, null=True, blank=True)
declaraciones = models.ForeignKey(Declaraciones, on_delete=models.DO_NOTHING)
domicilios = models.ForeignKey(Domicilios, on_delete=models.DO_NOTHING)
observaciones = models.ForeignKey(Observaciones, on_delete=models.DO_NOTHING)
info_personal_var = models.ForeignKey(InfoPersonalVar, on_delete=models.DO_NOTHING)
def observacion(self):
return [self.observaciones]
def columna_uno(self):
if self.tipo_bien_servicio:
return u"{}".format(self.tipo_bien_servicio)
else:
return u""
def columna_dos(self):
if self.cat_tipos_relaciones_personales:
return u"{}".format(self.cat_tipos_relaciones_personales)
else:
return u""
def columna_tres(self):
if self.cat_sectores_industria:
return u"{}".format(self.cat_sectores_industria)
else:
return u""
def url_editar(self):
return reverse_lazy('declaracion:beneficios-especie-editar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_borrar(self):
return reverse_lazy('declaracion:beneficios-especie-borrar',
kwargs={'folio': self.declaraciones.folio,
'pk': self.id})
def url_todos(self):
return reverse_lazy('declaracion:beneficios-especie',
kwargs={'folio': self.declaraciones.folio})
def sectores_industrias(self):
try:
if self.cat_sectores_industria.default:
return u"{} {}".format(self.cat_sectores_industria,
self.otro_sector)
else:
return u"{}".format(self.cat_sectores_industria)
except Exception as e:
return u""
def tipo_relacion(self):
try:
if self.cat_tipos_relaciones_personales.default:
return u"{} {}".format(self.cat_tipos_relaciones_personales,
self.otro_familiar)
else:
return u"{}".format(self.cat_tipos_relaciones_personales)
except Exception as e:
return u""
| 44.462185 | 169 | 0.651602 | 4,717 | 42,328 | 5.626246 | 0.053 | 0.036663 | 0.038434 | 0.053808 | 0.890312 | 0.884773 | 0.865971 | 0.823656 | 0.780738 | 0.764234 | 0 | 0.00762 | 0.255906 | 42,328 | 951 | 170 | 44.508938 | 0.834995 | 0 | 0 | 0.746973 | 0 | 0 | 0.032484 | 0.022349 | 0 | 0 | 0 | 0 | 0 | 1 | 0.131961 | false | 0 | 0.004843 | 0.046005 | 0.64891 | 0.001211 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
70c7b891dae82e58160d97763aa9503d6ae7b58d | 11,513 | py | Python | database_management/database_management.py | nareshram256/EnergyManagementSystem | 2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa | [
"MIT"
] | 9 | 2020-04-24T14:34:16.000Z | 2022-01-25T07:16:03.000Z | database_management/database_management.py | casemsee/EnergyManagementSystem | 2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa | [
"MIT"
] | null | null | null | database_management/database_management.py | casemsee/EnergyManagementSystem | 2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa | [
"MIT"
] | 7 | 2019-09-19T13:26:02.000Z | 2021-11-27T09:53:54.000Z | # Database query and record funtion for universal energy management system
import time
from modelling.database.database_format import db_short_term, db_middle_term, db_long_term
class database_storage_operation():
# Database operation in universal energy management system
def default_long_term_operation_data(*args): # All zeros insert
Target_time = args[0]
default_result = db_long_term \
(TIME_STAMP=Target_time,
AC_PD=0,
NAC_PD=0,
DC_PD=0,
NDC_PD=0,
PV_PG=0,
WP_PG=0,
PRICE=0,
DG_STATUS=0,
DG_PG=0,
UG_STATUS=0,
UG_PG=0,
BIC_PG=0,
BAT_PG=0,
BAT_SOC=0,
PMG=0,
V_DC=0,
PV_CURT=0,
WP_CURT=0,
AC_SHED=0,
NAC_SHED=0,
DC_SHED=0,
NDC_SHED=0,
COST=0,)
return default_result
def default_middle_term_operation_data(*args):
Target_time = args[0]
default_result = db_middle_term \
(TIME_STAMP=Target_time,
AC_PD=0,
NAC_PD=0,
DC_PD=0,
NDC_PD=0,
PV_PG=0,
WP_PG=0,
PRICE=0,
DG_STATUS=0,
DG_PG=0,
UG_STATUS=0,
UG_PG=0,
BIC_PG=0,
BAT_PG=0,
BAT_SOC=0,
PMG=0,
V_DC=0,
PV_CURT=0,
WP_CURT=0,
AC_SHED=0,
NAC_SHED=0,
DC_SHED=0,
NDC_SHED=0,
COST=0)
return default_result
def default_short_term_operation_data(*args):
Target_time = args[0]
default_result = db_short_term \
(TIME_STAMP=Target_time,
AC_PD=0,
AC_QD=0,
NAC_PD=0,
NAC_QD=0,
DC_PD=0,
NDC_PD=0,
PV_PG=0,
WP_PG=0,
DG_STATUS=0,
DG_PG=0,
DG_QG=0,
UG_STATUS=0,
UG_PG=0,
UG_QG=0,
BIC_PG=0,
BIC_QG=0,
BAT_PG=0,
BAT_SOC=0,
PMG=0,
V_DC=0,
PV_CURT=0,
WP_CURT=0,
AC_SHED=0,
NAC_SHED=0,
DC_SHED=0,
NDC_SHED=0,
COST=0)
return default_result
def database_query(input, session):
# The input information check for the databases
print(time.time())
def database_record(*args):
# The result storage operation for obtained result
session = args[0]
model = args[1]
Target_time = args[2]
## control model of UC, ED or OPF
function = args[3]
database_target = {"UC": db_long_term,
"ED": db_middle_term,
"OPF": db_short_term}
if function == "OPF":
from configuration.configuration_time_line import default_time
if session.query(database_target[function]).filter(
database_target[function].TIME_STAMP == Target_time).count() == 0:
row = database_storage_operation.default_short_term_operation_data(Target_time)
row.AC_PD = model["Load_ac"]["PD"]
row.AC_QD = model["Load_ac"]["QD"]
row.NAC_PD = model["Load_nac"]["PD"]
row.NAC_QD = model["Load_nac"]["QD"]
row.DC_PD = model["Load_dc"]["PD"]
row.NDC_PD = model["Load_ndc"]["PD"]
row.PV_PG = model["PV"]["PG"]
row.WP_PG = model["WP"]["PG"]
row.DG_STATUS = model["DG"]["COMMAND_START_UP"]
row.DG_PG = model["DG"]["COMMAND_PG"]
row.DG_QG = model["DG"]["COMMAND_QG"]
row.UG_STATUS = model["UG"]["COMMAND_START_UP"]
row.UG_PG = model["UG"]["COMMAND_PG"]
row.UG_QG = model["UG"]["COMMAND_QG"]
row.BIC_PG = model["BIC"]["COMMAND_DC2AC"] - model["BIC"]["COMMAND_AC2DC"]
row.BIC_QG = model["BIC"]["COMMAND_Q"]
row.BAT_PG = model["ESS"]["COMMAND_PG"]
# Update the SOC record information
if row.BAT_PG > 0:
row.BAT_SOC = model["ESS"]["SOC"] - row.BAT_PG * default_time["Time_step_opf"] / model["ESS"][
"EFF_DIS"] / model["ESS"]["CAP"] / 3600
else:
row.BAT_SOC = model["ESS"]["SOC"] - row.BAT_PG * model["ESS"]["EFF_CH"] * default_time[
"Time_step_opf"] / model["ESS"]["CAP"] / 3600
row.PMG = model["PMG"]
row.V_DC = model["V_DC"]
row.PV_CURT = model["PV"]["COMMAND_CURT"]
row.WP_CURT = model["WP"]["COMMAND_CURT"]
row.AC_SHED = model["Load_ac"]["COMMAND_SHED"]
row.NAC_SHED = model["Load_nac"]["COMMAND_SHED"]
row.DC_SHED = model["Load_dc"]["COMMAND_SHED"]
row.NDC_SHED = model["Load_ndc"]["COMMAND_SHED"]
row.COST = model["COST"]
session.add(row)
else:
row = session.query(database_target[function]).filter(database_target[function].TIME_STAMP == Target_time).first()
row.AC_PD = model["Load_ac"]["PD"]
row.AC_QD = model["Load_ac"]["QD"]
row.NAC_PD = model["Load_nac"]["PD"]
row.NAC_QD = model["Load_nac"]["QD"]
row.DC_PD = model["Load_dc"]["PD"]
row.NDC_PD = model["Load_ndc"]["PD"]
row.PV_PG = model["PV"]["PG"]
row.WP_PG = model["WP"]["PG"]
row.DG_STATUS = model["DG"]["COMMAND_START_UP"]
row.DG_PG = model["DG"]["COMMAND_PG"]
row.DG_QG = model["DG"]["COMMAND_QG"]
row.UG_STATUS = model["UG"]["COMMAND_START_UP"]
row.UG_PG = model["UG"]["COMMAND_PG"]
row.UG_QG = model["UG"]["COMMAND_QG"]
row.BIC_PG = model["BIC"]["COMMAND_DC2AC"]-model["BIC"]["COMMAND_AC2DC"]
row.BIC_QG = model["BIC"]["COMMAND_Q"]
row.BAT_PG = model["ESS"]["COMMAND_PG"]
# Update the SOC record information
if row.BAT_PG > 0:
row.BAT_SOC = model["ESS"]["SOC"] - row.BAT_PG * default_time["Time_step_opf"] / model["ESS"][
"EFF_DIS"] / model["ESS"]["CAP"] / 3600
else:
row.BAT_SOC = model["ESS"]["SOC"] - row.BAT_PG * model["ESS"]["EFF_CH"] * default_time[
"Time_step_opf"] / model["ESS"]["CAP"] / 3600
row.PMG = model["PMG"]
row.V_DC = model["V_DC"]
row.PV_CURT = model["PV"]["COMMAND_CURT"]
row.WP_CURT = model["WP"]["COMMAND_CURT"]
row.AC_SHED = model["Load_ac"]["COMMAND_SHED"]
row.NAC_SHED = model["Load_nac"]["COMMAND_SHED"]
row.DC_SHED = model["Load_dc"]["COMMAND_SHED"]
row.NDC_SHED = model["Load_ndc"]["COMMAND_SHED"]
row.COST = model["COST"]
session.commit()
elif function == "ED":
from configuration.configuration_time_line import default_look_ahead_time_step
from configuration.configuration_time_line import default_time
T = default_look_ahead_time_step["Look_ahead_time_ed_time_step"]
delta_T = default_time["Time_step_ed"]
for i in range(T):
if session.query(database_target[function]).filter(database_target[function].TIME_STAMP == Target_time + i * delta_T).count() == 0:
blank_row = database_storage_operation.default_middle_term_operation_data(Target_time + i * delta_T)
blank_row.AC_PD = model["Load_ac"]["PD"][i]
blank_row.NAC_PD = model["Load_nac"]["PD"][i]
blank_row.DC_PD = model["Load_dc"]["PD"][i]
blank_row.NDC_PD = model["Load_ndc"]["PD"][i]
blank_row.PV_PG = model["PV"]["PG"][i]
blank_row.WP_PG = model["WP"]["PG"][i]
blank_row.DG_STATUS = model["DG"]["COMMAND_START_UP"][i]
blank_row.DG_PG = model["DG"]["COMMAND_PG"][i]
blank_row.UG_STATUS = model["UG"]["COMMAND_START_UP"][i]
blank_row.UG_PG = model["UG"]["COMMAND_PG"][i]
blank_row.BIC_PG = model["BIC"]["COMMAND_DC2AC"][i]-model["BIC"]["COMMAND_AC2DC"][i]
blank_row.BAT_PG = model["ESS"]["COMMAND_PG"][i]
blank_row.BAT_SOC = model["ESS"]["SOC"][i]
blank_row.PMG = model["PMG"][i]
blank_row.PV_CURT = model["PV"]["COMMAND_CURT"][i]
blank_row.WP_CURT = model["WP"]["COMMAND_CURT"][i]
blank_row.AC_SHED = model["Load_ac"]["COMMAND_SHED"][i]
blank_row.NAC_SHED = model["Load_nac"]["COMMAND_SHED"][i]
blank_row.DC_SHED = model["Load_dc"]["COMMAND_SHED"][i]
blank_row.NDC_SHED = model["Load_ndc"]["COMMAND_SHED"][i]
blank_row.COST = model["COST"][i]
session.add(blank_row)
session.commit()
else:
row = session.query(database_target[function]).filter(database_target[function].TIME_STAMP == Target_time + i * delta_T).first()
row.AC_PD = model["Load_ac"]["PD"][i]
row.NAC_PD = model["Load_nac"]["PD"][i]
row.DC_PD = model["Load_dc"]["PD"][i]
row.NDC_PD = model["Load_ndc"]["PD"][i]
row.PV_PG = model["PV"]["PG"][i]
row.WP_PG = model["WP"]["PG"][i]
row.DG_STATUS = model["DG"]["COMMAND_START_UP"][i]
row.DG_PG = model["DG"]["COMMAND_PG"][i]
row.UG_STATUS = model["UG"]["COMMAND_START_UP"][i]
row.UG_PG = model["UG"]["COMMAND_PG"][i]
row.BIC_PG = model["BIC"]["COMMAND_DC2AC"][i]-model["BIC"]["COMMAND_AC2DC"][i]
row.BAT_PG = model["ESS"]["COMMAND_PG"][i]
row.BAT_SOC = model["ESS"]["SOC"][i]
row.PMG = model["PMG"][i]
row.PV_CURT = model["PV"]["COMMAND_CURT"][i]
row.WP_CURT = model["WP"]["COMMAND_CURT"][i]
row.AC_SHED = model["Load_ac"]["COMMAND_SHED"][i]
row.NAC_SHED = model["Load_nac"]["COMMAND_SHED"][i]
row.DC_SHED = model["Load_dc"]["COMMAND_SHED"][i]
row.NDC_SHED = model["Load_ndc"]["COMMAND_SHED"][i]
row.COST = model["COST"][i]
session.commit()
else:
from configuration.configuration_time_line import default_look_ahead_time_step
from configuration.configuration_time_line import default_time
T = default_look_ahead_time_step["Look_ahead_time_uc_time_step"]
delta_T = default_time["Time_step_uc"]
for i in range(T):
if session.query(database_target[function]).filter(database_target[function].TIME_STAMP == Target_time + i * delta_T).count() == 0:
blank_row = database_storage_operation.default_long_term_operation_data(Target_time + i * delta_T)
blank_row.AC_PD = model["Load_ac"]["PD"][i]
blank_row.NAC_PD = model["Load_nac"]["PD"][i]
blank_row.DC_PD = model["Load_dc"]["PD"][i]
blank_row.NDC_PD = model["Load_ndc"]["PD"][i]
blank_row.PV_PG = model["PV"]["PG"][i]
blank_row.WP_PG = model["WP"]["PG"][i]
blank_row.DG_STATUS = model["DG"]["COMMAND_START_UP"][i]
blank_row.DG_PG = model["DG"]["COMMAND_PG"][i]
blank_row.UG_STATUS = model["UG"]["COMMAND_START_UP"][i]
blank_row.UG_PG = model["UG"]["COMMAND_PG"][i]
blank_row.BIC_PG = model["BIC"]["COMMAND_DC2AC"][i]-model["BIC"]["COMMAND_AC2DC"][i]
blank_row.BAT_PG = model["ESS"]["COMMAND_PG"][i]
blank_row.BAT_SOC = model["ESS"]["SOC"][i]
blank_row.PMG = model["PMG"][i]
blank_row.PV_CURT = model["PV"]["COMMAND_CURT"][i]
blank_row.WP_CURT = model["WP"]["COMMAND_CURT"][i]
blank_row.AC_SHED = model["Load_ac"]["COMMAND_SHED"][i]
blank_row.UAC_SHED = model["Load_nac"]["COMMAND_SHED"][i]
blank_row.DC_SHED = model["Load_dc"]["COMMAND_SHED"][i]
blank_row.UDC_SHED = model["Load_ndc"]["COMMAND_SHED"][i]
blank_row.COST = model["COST"][i]
session.add(blank_row)
session.commit()
else:
row = session.query(database_target[function]).filter(database_target[function].TIME_STAMP == Target_time + i * delta_T).first()
row.AC_PD = model["Load_ac"]["PD"][i]
row.NAC_PD = model["Load_nac"]["PD"][i]
row.DC_PD = model["Load_dc"]["PD"][i]
row.NDC_PD = model["Load_ndc"]["PD"][i]
row.PV_PG = model["PV"]["PG"][i]
row.WP_PG = model["WP"]["PG"][i]
row.DG_STATUS = model["DG"]["COMMAND_START_UP"][i]
row.DG_PG = model["DG"]["COMMAND_PG"][i]
row.UG_STATUS = model["UG"]["COMMAND_START_UP"][i]
row.UG_PG = model["UG"]["COMMAND_PG"][i]
row.BIC_PG = model["BIC"]["COMMAND_DC2AC"][i] - model["BIC"]["COMMAND_AC2DC"][i]
row.BAT_PG = model["ESS"]["COMMAND_PG"][i]
row.BAT_SOC = model["ESS"]["SOC"][i]
row.PMG = model["PMG"][i]
row.PV_CURT = model["PV"]["COMMAND_CURT"][i]
row.WP_CURT = model["WP"]["COMMAND_CURT"][i]
row.AC_SHED = model["Load_ac"]["COMMAND_SHED"][i]
row.NAC_SHED = model["Load_nac"]["COMMAND_SHED"][i]
row.DC_SHED = model["Load_dc"]["COMMAND_SHED"][i]
row.NDC_SHED = model["Load_ndc"]["COMMAND_SHED"][i]
row.COST = model["COST"][i]
session.commit()
| 37.019293 | 135 | 0.640233 | 1,850 | 11,513 | 3.681081 | 0.060541 | 0.068722 | 0.052863 | 0.016153 | 0.907783 | 0.890896 | 0.890896 | 0.882232 | 0.859618 | 0.858737 | 0 | 0.011898 | 0.175106 | 11,513 | 310 | 136 | 37.13871 | 0.70517 | 0.029532 | 0 | 0.852113 | 0 | 0 | 0.175222 | 0.005017 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017606 | false | 0 | 0.024648 | 0 | 0.056338 | 0.003521 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
cb00df299d281df66bbbaa0679fae9ab4f81eed8 | 10,623 | py | Python | tests/test_plotting/test_matplotlib/test_embedding_plot.py | tttthomasssss/whatlies | 7fc7d9ede0f4bb314d74d03d10bd971ca65bc697 | [
"Apache-2.0"
] | 1 | 2021-03-30T11:55:42.000Z | 2021-03-30T11:55:42.000Z | tests/test_plotting/test_matplotlib/test_embedding_plot.py | tttthomasssss/whatlies | 7fc7d9ede0f4bb314d74d03d10bd971ca65bc697 | [
"Apache-2.0"
] | null | null | null | tests/test_plotting/test_matplotlib/test_embedding_plot.py | tttthomasssss/whatlies | 7fc7d9ede0f4bb314d74d03d10bd971ca65bc697 | [
"Apache-2.0"
] | null | null | null | import pytest
import numpy as np
import matplotlib as mpl
import scipy.spatial.distance as scipy_distance
from whatlies import Embedding, EmbeddingSet
from common import validate_plot_general_properties
"""
*Guide*
Here are the plot's propertites which could be checked (some of them may not be applicable
for a particular plot):
- type: the class type of collection in matplotlib to ensure the right kind of plot
has been created.
- data: the position of points, arrows or texts in the plot, depending on the plot's type.
- x_label: label of x-axis.
- y_label: label of y-axis.
- tilte: title of the plot.
- aspect: aspect ratio of plot, usually 'auto' unless `axis_option` argument is set.
- color: color of points (in scatter plot) or arrows (in arrow plot). It should be rgba values.
- label: label of points (in scatter plot) or arrows (in arrow plot).
"""
@pytest.fixture
def embset():
names = ["red", "blue", "green", "yellow", "white"]
vectors = np.random.rand(5, 3)
embeddings = [Embedding(name, vector) for name, vector in zip(names, vectors)]
return EmbeddingSet(*embeddings)
def test_embedding_plot_scatter_integer_axis(embset):
emb = embset["red"]
fig, ax = mpl.pyplot.subplots()
emb.plot(kind="scatter", x_axis=0, y_axis=1)
props = {
"type": mpl.collections.PathCollection,
"data": emb.vector[0:2],
"x_label": "Dimension 0",
"y_label": "Dimension 1",
"title": "",
"color": mpl.colors.to_rgba_array("steelblue"),
"label": "red",
"aspect": "auto",
}
assert np.array_equal(ax.collections[0].get_offsets()[0], props["data"])
assert isinstance(ax.collections[0], props["type"])
assert np.array_equal(ax.collections[0].get_facecolor(), props["color"])
assert ax.texts[0].get_text() == props["label"]
validate_plot_general_properties(ax, props)
mpl.pyplot.close(fig)
def test_embedding_plot_arrow_integer_axis(embset):
emb = embset["red"]
fig, ax = mpl.pyplot.subplots()
emb.plot(
kind="arrow",
x_axis=0,
y_axis=2,
color="blue",
x_label="xlabel",
y_label="ylabel",
title="test plot",
annot=False,
)
props = {
"type": mpl.collections.PolyCollection,
"data": np.concatenate((emb.vector[0:1], emb.vector[2:3])),
"x_label": "xlabel",
"y_label": "ylabel",
"title": "test plot",
"color": mpl.colors.to_rgba_array("blue"),
"aspect": "auto",
# Not applicable: label
}
UV = np.concatenate((ax.collections[1].U, ax.collections[1].V))
assert isinstance(ax.collections[1], props["type"])
assert np.array_equal(UV, props["data"])
assert np.array_equal(ax.collections[1].get_facecolor(), props["color"])
assert ax.texts == []
validate_plot_general_properties(ax, props)
mpl.pyplot.close(fig)
def test_embedding_plot_text_integer_axis(embset):
emb = embset["red"]
fig, ax = mpl.pyplot.subplots()
emb.plot(kind="text", x_axis=1, y_axis=2)
props = {
"data": np.concatenate((emb.vector[1:2] + 0.01, emb.vector[2:3])),
"x_label": "Dimension 1",
"y_label": "Dimension 2",
"title": "",
"label": "red",
"aspect": "auto",
# Not applicable: type, color
}
assert np.array_equal(ax.texts[0].get_position(), props["data"])
assert ax.collections == []
assert ax.texts[0].get_text() == props["label"]
validate_plot_general_properties(ax, props)
mpl.pyplot.close(fig)
def test_embedding_plot_scatter_emb_axis(embset):
emb = embset["red"]
fig, ax = mpl.pyplot.subplots()
emb.plot(kind="scatter", x_axis=embset["blue"], y_axis=embset["green"])
props = {
"type": mpl.collections.PathCollection,
"data": np.array([emb > embset["blue"], emb > embset["green"]]),
"x_label": "blue",
"y_label": "green",
"color": mpl.colors.to_rgba_array("steelblue"),
"title": "",
"label": "red",
"aspect": "auto",
}
assert np.array_equal(ax.collections[0].get_offsets()[0], props["data"])
assert isinstance(ax.collections[0], props["type"])
assert ax.texts[0].get_text() == props["label"]
validate_plot_general_properties(ax, props)
mpl.pyplot.close(fig)
def test_embedding_plot_arrow_emb_axis(embset):
emb = embset["red"] + embset["yellow"]
fig, ax = mpl.pyplot.subplots()
emb.plot(
kind="arrow",
x_axis=embset["blue"],
y_axis=embset["green"],
color="yellow",
show_ops=True,
axis_option="equal",
)
props = {
"type": mpl.collections.PolyCollection,
"data": np.array([emb > embset["blue"], emb > embset["green"]]),
"x_label": "blue",
"y_label": "green",
"color": mpl.colors.to_rgba_array("yellow"),
"title": "",
"label": "(red + yellow)",
"aspect": 1.0,
}
UV = np.concatenate((ax.collections[1].U, ax.collections[1].V))
assert isinstance(ax.collections[1], props["type"])
assert np.array_equal(UV, props["data"])
assert np.array_equal(ax.collections[1].get_facecolor(), props["color"])
assert ax.texts[0].get_text() == props["label"]
validate_plot_general_properties(ax, props)
mpl.pyplot.close(fig)
def test_embedding_plot_arrow_integer_axis_with_str_axis_metric(embset):
emb = embset["red"]
fig, ax = mpl.pyplot.subplots()
emb.plot(
kind="arrow",
x_axis=0,
y_axis=2,
axis_metric="euclidean",
color="blue",
x_label="xlabel",
y_label="ylabel",
title="test plot",
annot=False,
)
props = {
"type": mpl.collections.PolyCollection,
"data": np.concatenate((emb.vector[0:1], emb.vector[2:3])),
"x_label": "xlabel",
"y_label": "ylabel",
"title": "test plot",
"color": mpl.colors.to_rgba_array("blue"),
"aspect": "auto",
# Not applicable: label
}
UV = np.concatenate((ax.collections[1].U, ax.collections[1].V))
assert isinstance(ax.collections[1], props["type"])
assert np.array_equal(UV, props["data"])
assert np.array_equal(ax.collections[1].get_facecolor(), props["color"])
assert ax.texts == []
validate_plot_general_properties(ax, props)
mpl.pyplot.close(fig)
def test_embedding_plot_scatter_emb_axis_with_common_str_axis_metric(embset):
emb = embset["red"]
fig, ax = mpl.pyplot.subplots()
emb.plot(
kind="scatter",
x_axis=embset["blue"],
y_axis=embset["green"],
axis_metric="cosine_distance",
)
props = {
"type": mpl.collections.PathCollection,
"data": np.array(
[
scipy_distance.cosine(emb.vector, embset["blue"].vector),
scipy_distance.cosine(emb.vector, embset["green"].vector),
]
),
"x_label": "blue",
"y_label": "green",
"color": mpl.colors.to_rgba_array("steelblue"),
"title": "",
"label": "red",
"aspect": "auto",
}
assert np.array_equal(ax.collections[0].get_offsets()[0], props["data"])
assert isinstance(ax.collections[0], props["type"])
assert ax.texts[0].get_text() == props["label"]
validate_plot_general_properties(ax, props)
mpl.pyplot.close(fig)
def test_embedding_plot_arrow_emb_axis_with_different_str_axis_metric(embset):
emb = embset["red"] + embset["yellow"]
fig, ax = mpl.pyplot.subplots()
emb.plot(
kind="arrow",
x_axis=embset["blue"],
y_axis=embset["green"],
axis_metric=["euclidean", "cosine_similarity"],
color="yellow",
show_ops=True,
axis_option="equal",
)
props = {
"type": mpl.collections.PolyCollection,
"data": np.array(
[
scipy_distance.euclidean(emb.vector, embset["blue"].vector),
1.0 - scipy_distance.cosine(emb.vector, embset["green"].vector),
]
),
"x_label": "blue",
"y_label": "green",
"color": mpl.colors.to_rgba_array("yellow"),
"title": "",
"label": "(red + yellow)",
"aspect": 1.0,
}
UV = np.concatenate((ax.collections[1].U, ax.collections[1].V))
assert isinstance(ax.collections[1], props["type"])
assert np.array_equal(UV, props["data"])
assert np.array_equal(ax.collections[1].get_facecolor(), props["color"])
assert ax.texts[0].get_text() == props["label"]
validate_plot_general_properties(ax, props)
mpl.pyplot.close(fig)
def test_embedding_plot_arrow_emb_axis_with_callable_y_axis_metric(embset):
emb = embset["red"] + embset["yellow"]
fig, ax = mpl.pyplot.subplots()
emb.plot(
kind="arrow",
x_axis=embset["blue"],
y_axis=embset["green"],
axis_metric=[None, scipy_distance.correlation],
x_label="xaxis",
y_label="corr",
color="yellow",
show_ops=True,
axis_option="equal",
)
props = {
"type": mpl.collections.PolyCollection,
"data": np.array(
[
emb > embset["blue"],
scipy_distance.correlation(emb.vector, embset["green"].vector),
]
),
"x_label": "xaxis",
"y_label": "corr",
"color": mpl.colors.to_rgba_array("yellow"),
"title": "",
"label": "(red + yellow)",
"aspect": 1.0,
}
UV = np.concatenate((ax.collections[1].U, ax.collections[1].V))
assert isinstance(ax.collections[1], props["type"])
assert np.array_equal(UV, props["data"])
assert np.array_equal(ax.collections[1].get_facecolor(), props["color"])
assert ax.texts[0].get_text() == props["label"]
validate_plot_general_properties(ax, props)
mpl.pyplot.close(fig)
def test_embedding_plot_raises_error_when_incorrect_axis_type(embset):
emb = embset["red"]
with pytest.raises(ValueError, match="The `x_axis` value should be"):
emb.plot(x_axis=1.0)
with pytest.raises(ValueError, match="The `y_axis` value should be"):
emb.plot(y_axis="blue")
def test_embedding_plot_raises_error_when_incorrect_axis_metric(embset):
emb = embset["red"]
with pytest.raises(ValueError, match="The given axis metric is not supported"):
emb.plot(x_axis=embset["blue"], axis_metric="correlation")
with pytest.raises(ValueError, match="The given axis metric type is not"):
emb.plot(y_axis=embset["blue"], axis_metric=1)
| 34.048077 | 99 | 0.612539 | 1,363 | 10,623 | 4.605282 | 0.115187 | 0.057989 | 0.044607 | 0.043014 | 0.819818 | 0.788275 | 0.759599 | 0.742871 | 0.725187 | 0.702884 | 0 | 0.00977 | 0.22922 | 10,623 | 311 | 100 | 34.157556 | 0.756839 | 0.006684 | 0 | 0.703008 | 0 | 0 | 0.128846 | 0 | 0 | 0 | 0 | 0 | 0.12406 | 1 | 0.045113 | false | 0 | 0.022556 | 0 | 0.071429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
cb3eed68bc52b7760df6ee95907b48cdfe93950b | 169 | py | Python | titan/tools_pkg/pkgdependency/props.py | mnieber/gen | 65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9 | [
"MIT"
] | null | null | null | titan/tools_pkg/pkgdependency/props.py | mnieber/gen | 65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9 | [
"MIT"
] | null | null | null | titan/tools_pkg/pkgdependency/props.py | mnieber/gen | 65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9 | [
"MIT"
] | null | null | null | from titan.tools_pkg.pipdependency.props import list_of_package_names
def get_pkg_names():
return list_of_package_names(lambda tool: tool.pkg_dependencies.merged)
| 28.166667 | 75 | 0.840237 | 26 | 169 | 5.076923 | 0.692308 | 0.090909 | 0.19697 | 0.272727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094675 | 169 | 5 | 76 | 33.8 | 0.862745 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 9 |
cb4c34d6cfc1a5e995ba4bb9469c2b3c7c417edd | 244,364 | py | Python | splunk_sdk/search/v3alpha1/gen_models.py | splunk/splunk-cloud-sdk-python | 7cc19473f5409103bf9f7c46ddb529905f682533 | [
"ECL-2.0",
"Apache-2.0"
] | 12 | 2019-08-01T06:16:17.000Z | 2021-04-16T20:00:02.000Z | splunk_sdk/search/v3alpha1/gen_models.py | splunk/splunk-cloud-sdk-python | 7cc19473f5409103bf9f7c46ddb529905f682533 | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2020-09-27T12:03:24.000Z | 2021-08-06T18:01:32.000Z | splunk_sdk/search/v3alpha1/gen_models.py | splunk/splunk-cloud-sdk-python | 7cc19473f5409103bf9f7c46ddb529905f682533 | [
"ECL-2.0",
"Apache-2.0"
] | 4 | 2019-08-20T17:49:27.000Z | 2022-03-27T16:39:10.000Z | # Copyright © 2021 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# [http://www.apache.org/licenses/LICENSE-2.0]
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
############# This file is auto-generated. Do not edit! #############
"""
SDC Service: Splunk Search service
Use the Search service in Splunk Cloud Services to dispatch, review, and manage searches and search jobs. You can finalize or cancel jobs, retrieve search results, and request search-related configurations from the Metadata Catalog service in Splunk Cloud Services.
OpenAPI spec version: v3alpha1
Generated by: https://openapi-generator.tech
"""
from datetime import datetime
from typing import List, Dict
from splunk_sdk.common.sscmodel import SSCModel
from splunk_sdk.base_client import dictify, inflate
from enum import Enum
class Dataset(SSCModel):
from_dict_handlers = dict()
@staticmethod
def _from_dict(model: dict) -> "Dataset":
def default_handler(model: dict) -> "Dataset":
instance = Dataset.__new__(Dataset)
instance._attrs = model
return instance
kind = model['kind']
handler = Dataset.from_dict_handlers.get(kind, default_handler)
return handler(model)
def __init__(self, appclientidcreatedby: "str" = None, appclientidmodifiedby: "str" = None, created: "str" = None, createdby: "str" = None, description: "str" = None, id: "str" = None, modified: "str" = None, modifiedby: "str" = None, name: "str" = None, namespace: "str" = None, owner: "str" = None, resourcename: "str" = None, summary: "str" = None, title: "str" = None, **extra):
"""Dataset"""
self._attrs = dict()
if created is not None:
self._attrs["created"] = created
if createdby is not None:
self._attrs["createdby"] = createdby
if id is not None:
self._attrs["id"] = id
if modified is not None:
self._attrs["modified"] = modified
if modifiedby is not None:
self._attrs["modifiedby"] = modifiedby
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
if resourcename is not None:
self._attrs["resourcename"] = resourcename
if appclientidcreatedby is not None:
self._attrs["appclientidcreatedby"] = appclientidcreatedby
if appclientidmodifiedby is not None:
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
if description is not None:
self._attrs["description"] = description
if namespace is not None:
self._attrs["namespace"] = namespace
if summary is not None:
self._attrs["summary"] = summary
if title is not None:
self._attrs["title"] = title
for k, v in extra.items():
self._attrs[k] = v
@property
def created(self) -> "str":
""" Gets the created of this Dataset.
The date and time object was created.
"""
return self._attrs.get("created")
@created.setter
def created(self, created: "str"):
"""Sets the created of this Dataset.
The date and time object was created.
:param created: The created of this Dataset.
:type: str
"""
if created is None:
raise ValueError("Invalid value for `created`, must not be `None`")
self._attrs["created"] = created
@property
def createdby(self) -> "str":
""" Gets the createdby of this Dataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
"""
return self._attrs.get("createdby")
@createdby.setter
def createdby(self, createdby: "str"):
"""Sets the createdby of this Dataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
:param createdby: The createdby of this Dataset.
:type: str
"""
if createdby is None:
raise ValueError("Invalid value for `createdby`, must not be `None`")
self._attrs["createdby"] = createdby
@property
def id(self) -> "str":
""" Gets the id of this Dataset.
A unique dataset ID.
"""
return self._attrs.get("id")
@id.setter
def id(self, id: "str"):
"""Sets the id of this Dataset.
A unique dataset ID.
:param id: The id of this Dataset.
:type: str
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`")
self._attrs["id"] = id
@property
def modified(self) -> "str":
""" Gets the modified of this Dataset.
The date and time object was modified.
"""
return self._attrs.get("modified")
@modified.setter
def modified(self, modified: "str"):
"""Sets the modified of this Dataset.
The date and time object was modified.
:param modified: The modified of this Dataset.
:type: str
"""
if modified is None:
raise ValueError("Invalid value for `modified`, must not be `None`")
self._attrs["modified"] = modified
@property
def modifiedby(self) -> "str":
""" Gets the modifiedby of this Dataset.
The name of the user who most recently modified the object.
"""
return self._attrs.get("modifiedby")
@modifiedby.setter
def modifiedby(self, modifiedby: "str"):
"""Sets the modifiedby of this Dataset.
The name of the user who most recently modified the object.
:param modifiedby: The modifiedby of this Dataset.
:type: str
"""
if modifiedby is None:
raise ValueError("Invalid value for `modifiedby`, must not be `None`")
self._attrs["modifiedby"] = modifiedby
@property
def name(self) -> "str":
""" Gets the name of this Dataset.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this Dataset.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this Dataset.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this Dataset.
The name of the object's owner.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this Dataset.
The name of the object's owner.
:param owner: The owner of this Dataset.
:type: str
"""
if owner is None:
raise ValueError("Invalid value for `owner`, must not be `None`")
self._attrs["owner"] = owner
@property
def resourcename(self) -> "str":
""" Gets the resourcename of this Dataset.
The dataset name qualified by the module name.
"""
return self._attrs.get("resourcename")
@resourcename.setter
def resourcename(self, resourcename: "str"):
"""Sets the resourcename of this Dataset.
The dataset name qualified by the module name.
:param resourcename: The resourcename of this Dataset.
:type: str
"""
if resourcename is None:
raise ValueError("Invalid value for `resourcename`, must not be `None`")
self._attrs["resourcename"] = resourcename
@property
def appclientidcreatedby(self) -> "str":
""" Gets the appclientidcreatedby of this Dataset.
AppClinetId of the creator app of the dataset.
"""
return self._attrs.get("appclientidcreatedby")
@appclientidcreatedby.setter
def appclientidcreatedby(self, appclientidcreatedby: "str"):
"""Sets the appclientidcreatedby of this Dataset.
AppClinetId of the creator app of the dataset.
:param appclientidcreatedby: The appclientidcreatedby of this Dataset.
:type: str
"""
self._attrs["appclientidcreatedby"] = appclientidcreatedby
@property
def appclientidmodifiedby(self) -> "str":
""" Gets the appclientidmodifiedby of this Dataset.
AppClinetId of the modifier app of the dataset.
"""
return self._attrs.get("appclientidmodifiedby")
@appclientidmodifiedby.setter
def appclientidmodifiedby(self, appclientidmodifiedby: "str"):
"""Sets the appclientidmodifiedby of this Dataset.
AppClinetId of the modifier app of the dataset.
:param appclientidmodifiedby: The appclientidmodifiedby of this Dataset.
:type: str
"""
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
@property
def description(self) -> "str":
""" Gets the description of this Dataset.
Detailed description of the dataset.
"""
return self._attrs.get("description")
@description.setter
def description(self, description: "str"):
"""Sets the description of this Dataset.
Detailed description of the dataset.
:param description: The description of this Dataset.
:type: str
"""
self._attrs["description"] = description
@property
def namespace(self) -> "str":
""" Gets the namespace of this Dataset.
The name of the namespace that contains the dataset.
"""
return self._attrs.get("namespace")
@namespace.setter
def namespace(self, namespace: "str"):
"""Sets the namespace of this Dataset.
The name of the namespace that contains the dataset.
:param namespace: The namespace of this Dataset.
:type: str
"""
self._attrs["namespace"] = namespace
@property
def summary(self) -> "str":
""" Gets the summary of this Dataset.
Summary of the dataset's purpose.
"""
return self._attrs.get("summary")
@summary.setter
def summary(self, summary: "str"):
"""Sets the summary of this Dataset.
Summary of the dataset's purpose.
:param summary: The summary of this Dataset.
:type: str
"""
self._attrs["summary"] = summary
@property
def title(self) -> "str":
""" Gets the title of this Dataset.
The title of the dataset. Does not have to be unique.
"""
return self._attrs.get("title")
@title.setter
def title(self, title: "str"):
"""Sets the title of this Dataset.
The title of the dataset. Does not have to be unique.
:param title: The title of this Dataset.
:type: str
"""
self._attrs["title"] = title
def to_dict(self):
raise NotImplementedError()
class DatasetPATCH(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "DatasetPATCH":
instance = DatasetPATCH.__new__(DatasetPATCH)
instance._attrs = model
return instance
def __init__(self, module: "str" = None, name: "str" = None, owner: "str" = None, **extra):
"""DatasetPATCH"""
self._attrs = dict()
if module is not None:
self._attrs["module"] = module
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
for k, v in extra.items():
self._attrs[k] = v
@property
def module(self) -> "str":
""" Gets the module of this DatasetPATCH.
The name of module to reassign dataset into.
"""
return self._attrs.get("module")
@module.setter
def module(self, module: "str"):
"""Sets the module of this DatasetPATCH.
The name of module to reassign dataset into.
:param module: The module of this DatasetPATCH.
:type: str
"""
self._attrs["module"] = module
@property
def name(self) -> "str":
""" Gets the name of this DatasetPATCH.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this DatasetPATCH.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this DatasetPATCH.
:type: str
"""
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this DatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this DatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
:param owner: The owner of this DatasetPATCH.
:type: str
"""
self._attrs["owner"] = owner
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class FieldPOST(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "FieldPOST":
instance = FieldPOST.__new__(FieldPOST)
instance._attrs = model
return instance
def __init__(self, datatype: "FieldDataType" = None, description: "str" = None, fieldtype: "FieldType" = None, indexed: "bool" = None, name: "str" = None, prevalence: "FieldPrevalence" = None, summary: "str" = None, title: "str" = None, **extra):
"""FieldPOST"""
self._attrs = dict()
if datatype is not None:
self._attrs["datatype"] = datatype
if description is not None:
self._attrs["description"] = description
if fieldtype is not None:
self._attrs["fieldtype"] = fieldtype
if indexed is not None:
self._attrs["indexed"] = indexed
if name is not None:
self._attrs["name"] = name
if prevalence is not None:
self._attrs["prevalence"] = prevalence
if summary is not None:
self._attrs["summary"] = summary
if title is not None:
self._attrs["title"] = title
for k, v in extra.items():
self._attrs[k] = v
@property
def datatype(self) -> "FieldDataType":
""" Gets the datatype of this FieldPOST.
"""
return FieldDataType.from_value(self._attrs.get("datatype"))
@datatype.setter
def datatype(self, datatype: "FieldDataType"):
"""Sets the datatype of this FieldPOST.
:param datatype: The datatype of this FieldPOST.
:type: FieldDataType
"""
if isinstance(datatype, Enum):
self._attrs["datatype"] = datatype.value
else:
self._attrs["datatype"] = datatype # If you supply a string, we presume you know the service will take it.
@property
def description(self) -> "str":
""" Gets the description of this FieldPOST.
The field description.
"""
return self._attrs.get("description")
@description.setter
def description(self, description: "str"):
"""Sets the description of this FieldPOST.
The field description.
:param description: The description of this FieldPOST.
:type: str
"""
self._attrs["description"] = description
@property
def fieldtype(self) -> "FieldType":
""" Gets the fieldtype of this FieldPOST.
"""
return FieldType.from_value(self._attrs.get("fieldtype"))
@fieldtype.setter
def fieldtype(self, fieldtype: "FieldType"):
"""Sets the fieldtype of this FieldPOST.
:param fieldtype: The fieldtype of this FieldPOST.
:type: FieldType
"""
if isinstance(fieldtype, Enum):
self._attrs["fieldtype"] = fieldtype.value
else:
self._attrs["fieldtype"] = fieldtype # If you supply a string, we presume you know the service will take it.
@property
def indexed(self) -> "bool":
""" Gets the indexed of this FieldPOST.
Whether or not the field has been indexed.
"""
return self._attrs.get("indexed")
@indexed.setter
def indexed(self, indexed: "bool"):
"""Sets the indexed of this FieldPOST.
Whether or not the field has been indexed.
:param indexed: The indexed of this FieldPOST.
:type: bool
"""
self._attrs["indexed"] = indexed
@property
def name(self) -> "str":
""" Gets the name of this FieldPOST.
The field name.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this FieldPOST.
The field name.
:param name: The name of this FieldPOST.
:type: str
"""
self._attrs["name"] = name
@property
def prevalence(self) -> "FieldPrevalence":
""" Gets the prevalence of this FieldPOST.
"""
return FieldPrevalence.from_value(self._attrs.get("prevalence"))
@prevalence.setter
def prevalence(self, prevalence: "FieldPrevalence"):
"""Sets the prevalence of this FieldPOST.
:param prevalence: The prevalence of this FieldPOST.
:type: FieldPrevalence
"""
if isinstance(prevalence, Enum):
self._attrs["prevalence"] = prevalence.value
else:
self._attrs["prevalence"] = prevalence # If you supply a string, we presume you know the service will take it.
@property
def summary(self) -> "str":
""" Gets the summary of this FieldPOST.
The field summary.
"""
return self._attrs.get("summary")
@summary.setter
def summary(self, summary: "str"):
"""Sets the summary of this FieldPOST.
The field summary.
:param summary: The summary of this FieldPOST.
:type: str
"""
self._attrs["summary"] = summary
@property
def title(self) -> "str":
""" Gets the title of this FieldPOST.
The field title.
"""
return self._attrs.get("title")
@title.setter
def title(self, title: "str"):
"""Sets the title of this FieldPOST.
The field title.
:param title: The title of this FieldPOST.
:type: str
"""
self._attrs["title"] = title
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class FieldDataType(str, Enum):
DATE = "DATE"
NUMBER = "NUMBER"
OBJECT_ID = "OBJECT_ID"
STRING = "STRING"
UNKNOWN = "UNKNOWN"
@staticmethod
def from_value(value: str):
if value == "DATE":
return FieldDataType.DATE
if value == "NUMBER":
return FieldDataType.NUMBER
if value == "OBJECT_ID":
return FieldDataType.OBJECT_ID
if value == "STRING":
return FieldDataType.STRING
if value == "UNKNOWN":
return FieldDataType.UNKNOWN
class FieldType(str, Enum):
DIMENSION = "DIMENSION"
MEASURE = "MEASURE"
UNKNOWN = "UNKNOWN"
@staticmethod
def from_value(value: str):
if value == "DIMENSION":
return FieldType.DIMENSION
if value == "MEASURE":
return FieldType.MEASURE
if value == "UNKNOWN":
return FieldType.UNKNOWN
class FieldPrevalence(str, Enum):
ALL = "ALL"
SOME = "SOME"
UNKNOWN = "UNKNOWN"
@staticmethod
def from_value(value: str):
if value == "ALL":
return FieldPrevalence.ALL
if value == "SOME":
return FieldPrevalence.SOME
if value == "UNKNOWN":
return FieldPrevalence.UNKNOWN
class DatasetPOST(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "DatasetPOST":
instance = DatasetPOST.__new__(DatasetPOST)
instance._attrs = model
return instance
def __init__(self, name: "str", fields: "List[FieldPOST]" = None, id: "str" = None, module: "str" = None, **extra):
"""DatasetPOST"""
self._attrs = dict()
if name is not None:
self._attrs["name"] = name
if fields is not None:
self._attrs["fields"] = fields
if id is not None:
self._attrs["id"] = id
if module is not None:
self._attrs["module"] = module
for k, v in extra.items():
self._attrs[k] = v
@property
def name(self) -> "str":
""" Gets the name of this DatasetPOST.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this DatasetPOST.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this DatasetPOST.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._attrs["name"] = name
@property
def fields(self) -> "List[FieldPOST]":
""" Gets the fields of this DatasetPOST.
The fields to be associated with this dataset.
"""
return [FieldPOST._from_dict(i) for i in self._attrs.get("fields")]
@fields.setter
def fields(self, fields: "List[FieldPOST]"):
"""Sets the fields of this DatasetPOST.
The fields to be associated with this dataset.
:param fields: The fields of this DatasetPOST.
:type: List[FieldPOST]
"""
self._attrs["fields"] = fields
@property
def id(self) -> "str":
""" Gets the id of this DatasetPOST.
A unique dataset ID. Random ID used if not provided.
"""
return self._attrs.get("id")
@id.setter
def id(self, id: "str"):
"""Sets the id of this DatasetPOST.
A unique dataset ID. Random ID used if not provided.
:param id: The id of this DatasetPOST.
:type: str
"""
self._attrs["id"] = id
@property
def module(self) -> "str":
""" Gets the module of this DatasetPOST.
The name of the module to create the new dataset in.
"""
return self._attrs.get("module")
@module.setter
def module(self, module: "str"):
"""Sets the module of this DatasetPOST.
The name of the module to create the new dataset in.
:param module: The module of this DatasetPOST.
:type: str
"""
self._attrs["module"] = module
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class TypeEnum(str, Enum):
INFO = "INFO"
DEBUG = "DEBUG"
FATAL = "FATAL"
ERROR = "ERROR"
@staticmethod
def from_value(value: str):
if value == "INFO":
return TypeEnum.INFO
if value == "DEBUG":
return TypeEnum.DEBUG
if value == "FATAL":
return TypeEnum.FATAL
if value == "ERROR":
return TypeEnum.ERROR
class Message(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "Message":
instance = Message.__new__(Message)
instance._attrs = model
return instance
def __init__(self, text: "str" = None, type: "str" = None, **extra):
"""Message"""
self._attrs = dict()
if text is not None:
self._attrs["text"] = text
if type is not None:
self._attrs["type"] = type
for k, v in extra.items():
self._attrs[k] = v
@property
def text(self) -> "str":
""" Gets the text of this Message.
"""
return self._attrs.get("text")
@text.setter
def text(self, text: "str"):
"""Sets the text of this Message.
:param text: The text of this Message.
:type: str
"""
self._attrs["text"] = text
@property
def type(self) -> "TypeEnum":
""" Gets the type of this Message.
"""
return TypeEnum.from_value(self._attrs.get("type"))
@type.setter
def type(self, type: "str"):
"""Sets the type of this Message.
:param type: The type of this Message.
:type: str
"""
if isinstance(type, Enum):
self._attrs["type"] = type.value
else:
self._attrs["type"] = type # If you supply a string, we presume you know the service will take it.
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class QueryParameters(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "QueryParameters":
instance = QueryParameters.__new__(QueryParameters)
instance._attrs = model
return instance
def __init__(self, earliest: "str" = '-24h@h', latest: "str" = 'now', relative_time_anchor: "datetime" = None, timezone: "object" = None, **extra):
"""QueryParameters"""
self._attrs = dict()
if earliest is not None:
self._attrs["earliest"] = earliest
if latest is not None:
self._attrs["latest"] = latest
if relative_time_anchor is not None:
self._attrs["relativeTimeAnchor"] = relative_time_anchor
if timezone is not None:
self._attrs["timezone"] = timezone
for k, v in extra.items():
self._attrs[k] = v
@property
def earliest(self) -> "str":
""" Gets the earliest of this QueryParameters.
The earliest time, in absolute or relative format, to retrieve events. When specifying an absolute time specify either UNIX time, or UTC in seconds using the ISO-8601 (%FT%T.%Q) format. For example 2019-01-25T13:15:30Z. GMT is the default timezone. You must specify GMT when you specify UTC. Any offset specified is ignored.
"""
return self._attrs.get("earliest")
@earliest.setter
def earliest(self, earliest: "str"):
"""Sets the earliest of this QueryParameters.
The earliest time, in absolute or relative format, to retrieve events. When specifying an absolute time specify either UNIX time, or UTC in seconds using the ISO-8601 (%FT%T.%Q) format. For example 2019-01-25T13:15:30Z. GMT is the default timezone. You must specify GMT when you specify UTC. Any offset specified is ignored.
:param earliest: The earliest of this QueryParameters.
:type: str
"""
self._attrs["earliest"] = earliest
@property
def latest(self) -> "str":
""" Gets the latest of this QueryParameters.
The latest time, in absolute or relative format, to retrieve events. When specifying an absolute time specify either UNIX time, or UTC in seconds using the ISO-8601 (%FT%T.%Q) format. For example 2019-01-25T13:15:30Z. GMT is the default timezone. You must specify GMT when you specify UTC. Any offset specified is ignored.
"""
return self._attrs.get("latest")
@latest.setter
def latest(self, latest: "str"):
"""Sets the latest of this QueryParameters.
The latest time, in absolute or relative format, to retrieve events. When specifying an absolute time specify either UNIX time, or UTC in seconds using the ISO-8601 (%FT%T.%Q) format. For example 2019-01-25T13:15:30Z. GMT is the default timezone. You must specify GMT when you specify UTC. Any offset specified is ignored.
:param latest: The latest of this QueryParameters.
:type: str
"""
self._attrs["latest"] = latest
@property
def relative_time_anchor(self) -> "datetime":
""" Gets the relative_time_anchor of this QueryParameters.
Relative values for the 'earliest' and 'latest' parameters snap to the unit that you specify. For example, if 'earliest' is set to -d@d, the unit is day. If the 'relativeTimeAnchor' is is set to '2020-10-05T13:15:30Z' then 'resolvedEarliest' is snapped to '2020-10-05T00:00:00Z', which is the day. Hours, minutes, and seconds are dropped. If no 'relativeTimeAnchor' is specified, the default value is set to the time the search job was created.
"""
return self._attrs.get("relativeTimeAnchor")
@relative_time_anchor.setter
def relative_time_anchor(self, relative_time_anchor: "datetime"):
"""Sets the relative_time_anchor of this QueryParameters.
Relative values for the 'earliest' and 'latest' parameters snap to the unit that you specify. For example, if 'earliest' is set to -d@d, the unit is day. If the 'relativeTimeAnchor' is is set to '2020-10-05T13:15:30Z' then 'resolvedEarliest' is snapped to '2020-10-05T00:00:00Z', which is the day. Hours, minutes, and seconds are dropped. If no 'relativeTimeAnchor' is specified, the default value is set to the time the search job was created.
:param relative_time_anchor: The relative_time_anchor of this QueryParameters.
:type: datetime
"""
self._attrs["relativeTimeAnchor"] = relative_time_anchor
@property
def timezone(self) -> "object":
""" Gets the timezone of this QueryParameters.
The timezone that relative time specifiers are based off of. Timezone only applies to relative time literals for 'earliest' and 'latest'. If UNIX time or UTC format is used for 'earliest' and 'latest', this field is ignored. For the list of supported timezone formats, see https://docs.splunk.com/Documentation/Splunk/latest/Data/Applytimezoneoffsetstotimestamps#zoneinfo_.28TZ.29_database type: string default: \"GMT\"
"""
return self._attrs.get("timezone")
@timezone.setter
def timezone(self, timezone: "object"):
"""Sets the timezone of this QueryParameters.
The timezone that relative time specifiers are based off of. Timezone only applies to relative time literals for 'earliest' and 'latest'. If UNIX time or UTC format is used for 'earliest' and 'latest', this field is ignored. For the list of supported timezone formats, see https://docs.splunk.com/Documentation/Splunk/latest/Data/Applytimezoneoffsetstotimestamps#zoneinfo_.28TZ.29_database type: string default: \"GMT\"
:param timezone: The timezone of this QueryParameters.
:type: object
"""
self._attrs["timezone"] = timezone
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class SearchStatus(str, Enum):
RUNNING = "running"
DONE = "done"
CANCELED = "canceled"
FAILED = "failed"
@staticmethod
def from_value(value: str):
if value == "running":
return SearchStatus.RUNNING
if value == "done":
return SearchStatus.DONE
if value == "canceled":
return SearchStatus.CANCELED
if value == "failed":
return SearchStatus.FAILED
class DeleteSearchJob(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "DeleteSearchJob":
instance = DeleteSearchJob.__new__(DeleteSearchJob)
instance._attrs = model
return instance
def __init__(self, index: "str", module: "str", predicate: "str", allow_side_effects: "bool" = True, collect_event_summary: "bool" = False, collect_field_summary: "bool" = False, collect_time_buckets: "bool" = False, completion_time: "str" = None, dispatch_time: "str" = None, enable_preview: "bool" = False, extract_all_fields: "bool" = False, extract_fields: "str" = '', max_time: "int" = 3600, messages: "List[Message]" = None, name: "str" = None, percent_complete: "int" = 0, preview_available: "str" = 'false', query: "str" = None, query_parameters: "QueryParameters" = None, required_freshness: "int" = 0, resolved_earliest: "str" = None, resolved_latest: "str" = None, results_available: "int" = 0, results_preview_available: "int" = 0, sid: "str" = None, status: "SearchStatus" = None, **extra):
"""DeleteSearchJob"""
self._attrs = dict()
if index is not None:
self._attrs["index"] = index
if module is not None:
self._attrs["module"] = module
if predicate is not None:
self._attrs["predicate"] = predicate
if allow_side_effects is not None:
self._attrs["allowSideEffects"] = allow_side_effects
if collect_event_summary is not None:
self._attrs["collectEventSummary"] = collect_event_summary
if collect_field_summary is not None:
self._attrs["collectFieldSummary"] = collect_field_summary
if collect_time_buckets is not None:
self._attrs["collectTimeBuckets"] = collect_time_buckets
if completion_time is not None:
self._attrs["completionTime"] = completion_time
if dispatch_time is not None:
self._attrs["dispatchTime"] = dispatch_time
if enable_preview is not None:
self._attrs["enablePreview"] = enable_preview
if extract_all_fields is not None:
self._attrs["extractAllFields"] = extract_all_fields
if extract_fields is not None:
self._attrs["extractFields"] = extract_fields
if max_time is not None:
self._attrs["maxTime"] = max_time
if messages is not None:
self._attrs["messages"] = messages
if name is not None:
self._attrs["name"] = name
if percent_complete is not None:
self._attrs["percentComplete"] = percent_complete
if preview_available is not None:
self._attrs["previewAvailable"] = preview_available
if query is not None:
self._attrs["query"] = query
if query_parameters is not None:
self._attrs["queryParameters"] = query_parameters.to_dict()
if required_freshness is not None:
self._attrs["requiredFreshness"] = required_freshness
if resolved_earliest is not None:
self._attrs["resolvedEarliest"] = resolved_earliest
if resolved_latest is not None:
self._attrs["resolvedLatest"] = resolved_latest
if results_available is not None:
self._attrs["resultsAvailable"] = results_available
if results_preview_available is not None:
self._attrs["resultsPreviewAvailable"] = results_preview_available
if sid is not None:
self._attrs["sid"] = sid
if status is not None:
self._attrs["status"] = status
for k, v in extra.items():
self._attrs[k] = v
@property
def index(self) -> "str":
""" Gets the index of this DeleteSearchJob.
The index to delete events from.
"""
return self._attrs.get("index")
@index.setter
def index(self, index: "str"):
"""Sets the index of this DeleteSearchJob.
The index to delete events from.
:param index: The index of this DeleteSearchJob.
:type: str
"""
if index is None:
raise ValueError("Invalid value for `index`, must not be `None`")
self._attrs["index"] = index
@property
def module(self) -> "str":
""" Gets the module of this DeleteSearchJob.
The module to run the delete search job in. The default module is used if module field is empty.
"""
return self._attrs.get("module")
@module.setter
def module(self, module: "str"):
"""Sets the module of this DeleteSearchJob.
The module to run the delete search job in. The default module is used if module field is empty.
:param module: The module of this DeleteSearchJob.
:type: str
"""
if module is None:
raise ValueError("Invalid value for `module`, must not be `None`")
self._attrs["module"] = module
@property
def predicate(self) -> "str":
""" Gets the predicate of this DeleteSearchJob.
The predicate expression that identifies the events to delete from the index. This expression must return true or false. To delete all events from the index, specify \"true\" instead of an expression.
"""
return self._attrs.get("predicate")
@predicate.setter
def predicate(self, predicate: "str"):
"""Sets the predicate of this DeleteSearchJob.
The predicate expression that identifies the events to delete from the index. This expression must return true or false. To delete all events from the index, specify \"true\" instead of an expression.
:param predicate: The predicate of this DeleteSearchJob.
:type: str
"""
if predicate is None:
raise ValueError("Invalid value for `predicate`, must not be `None`")
self._attrs["predicate"] = predicate
@property
def allow_side_effects(self) -> "bool":
""" Gets the allow_side_effects of this DeleteSearchJob.
Specifies that the delete search job will contain side effects, with possible security risks.
"""
return self._attrs.get("allowSideEffects")
@allow_side_effects.setter
def allow_side_effects(self, allow_side_effects: "bool"):
"""Sets the allow_side_effects of this DeleteSearchJob.
Specifies that the delete search job will contain side effects, with possible security risks.
:param allow_side_effects: The allow_side_effects of this DeleteSearchJob.
:type: bool
"""
self._attrs["allowSideEffects"] = allow_side_effects
@property
def collect_event_summary(self) -> "bool":
""" Gets the collect_event_summary of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to false.
"""
return self._attrs.get("collectEventSummary")
@collect_event_summary.setter
def collect_event_summary(self, collect_event_summary: "bool"):
"""Sets the collect_event_summary of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to false.
:param collect_event_summary: The collect_event_summary of this DeleteSearchJob.
:type: bool
"""
self._attrs["collectEventSummary"] = collect_event_summary
@property
def collect_field_summary(self) -> "bool":
""" Gets the collect_field_summary of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to false.
"""
return self._attrs.get("collectFieldSummary")
@collect_field_summary.setter
def collect_field_summary(self, collect_field_summary: "bool"):
"""Sets the collect_field_summary of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to false.
:param collect_field_summary: The collect_field_summary of this DeleteSearchJob.
:type: bool
"""
self._attrs["collectFieldSummary"] = collect_field_summary
@property
def collect_time_buckets(self) -> "bool":
""" Gets the collect_time_buckets of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to false.
"""
return self._attrs.get("collectTimeBuckets")
@collect_time_buckets.setter
def collect_time_buckets(self, collect_time_buckets: "bool"):
"""Sets the collect_time_buckets of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to false.
:param collect_time_buckets: The collect_time_buckets of this DeleteSearchJob.
:type: bool
"""
self._attrs["collectTimeBuckets"] = collect_time_buckets
@property
def completion_time(self) -> "str":
""" Gets the completion_time of this DeleteSearchJob.
The time, in GMT, that the search job is finished. Empty if the search job has not completed.
"""
return self._attrs.get("completionTime")
@completion_time.setter
def completion_time(self, completion_time: "str"):
"""Sets the completion_time of this DeleteSearchJob.
The time, in GMT, that the search job is finished. Empty if the search job has not completed.
:param completion_time: The completion_time of this DeleteSearchJob.
:type: str
"""
self._attrs["completionTime"] = completion_time
@property
def dispatch_time(self) -> "str":
""" Gets the dispatch_time of this DeleteSearchJob.
The time, in GMT, that the search job is dispatched.
"""
return self._attrs.get("dispatchTime")
@dispatch_time.setter
def dispatch_time(self, dispatch_time: "str"):
"""Sets the dispatch_time of this DeleteSearchJob.
The time, in GMT, that the search job is dispatched.
:param dispatch_time: The dispatch_time of this DeleteSearchJob.
:type: str
"""
self._attrs["dispatchTime"] = dispatch_time
@property
def enable_preview(self) -> "bool":
""" Gets the enable_preview of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to false.
"""
return self._attrs.get("enablePreview")
@enable_preview.setter
def enable_preview(self, enable_preview: "bool"):
"""Sets the enable_preview of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to false.
:param enable_preview: The enable_preview of this DeleteSearchJob.
:type: bool
"""
self._attrs["enablePreview"] = enable_preview
@property
def extract_all_fields(self) -> "bool":
""" Gets the extract_all_fields of this DeleteSearchJob.
Specifies whether the Search service should extract all of the available fields in the data, including fields not mentioned in the SPL for the search job. Set to 'false' for better search peformance. The 'extractAllFields' parameter is deprecated as of version v3alpha1. Although this parameter continues to function, it might be removed in a future version. Use the 'extractFields' parameter instead.
"""
return self._attrs.get("extractAllFields")
@extract_all_fields.setter
def extract_all_fields(self, extract_all_fields: "bool"):
"""Sets the extract_all_fields of this DeleteSearchJob.
Specifies whether the Search service should extract all of the available fields in the data, including fields not mentioned in the SPL for the search job. Set to 'false' for better search peformance. The 'extractAllFields' parameter is deprecated as of version v3alpha1. Although this parameter continues to function, it might be removed in a future version. Use the 'extractFields' parameter instead.
:param extract_all_fields: The extract_all_fields of this DeleteSearchJob.
:type: bool
"""
self._attrs["extractAllFields"] = extract_all_fields
@property
def extract_fields(self) -> "str":
""" Gets the extract_fields of this DeleteSearchJob.
Specifies how the Search service should extract fields. Valid values include 'all', 'none', or 'indexed'. 'all' will extract all fields, 'indexed' will extract only indexed fields, and 'none' will extract only the default fields. This parameter overwrites the value of the 'extractAllFields' parameter. Set to 'none' for better search performance.
"""
return self._attrs.get("extractFields")
@extract_fields.setter
def extract_fields(self, extract_fields: "str"):
"""Sets the extract_fields of this DeleteSearchJob.
Specifies how the Search service should extract fields. Valid values include 'all', 'none', or 'indexed'. 'all' will extract all fields, 'indexed' will extract only indexed fields, and 'none' will extract only the default fields. This parameter overwrites the value of the 'extractAllFields' parameter. Set to 'none' for better search performance.
:param extract_fields: The extract_fields of this DeleteSearchJob.
:type: str
"""
self._attrs["extractFields"] = extract_fields
@property
def max_time(self) -> "int":
""" Gets the max_time of this DeleteSearchJob.
The amount of time, in seconds, to run the delete search job before finalizing the search. The maximum value is 3600 seconds (1 hour).
"""
return self._attrs.get("maxTime")
@max_time.setter
def max_time(self, max_time: "int"):
"""Sets the max_time of this DeleteSearchJob.
The amount of time, in seconds, to run the delete search job before finalizing the search. The maximum value is 3600 seconds (1 hour).
:param max_time: The max_time of this DeleteSearchJob.
:type: int
"""
self._attrs["maxTime"] = max_time
@property
def messages(self) -> "List[Message]":
""" Gets the messages of this DeleteSearchJob.
"""
return [Message._from_dict(i) for i in self._attrs.get("messages")]
@messages.setter
def messages(self, messages: "List[Message]"):
"""Sets the messages of this DeleteSearchJob.
:param messages: The messages of this DeleteSearchJob.
:type: List[Message]
"""
self._attrs["messages"] = messages
@property
def name(self) -> "str":
""" Gets the name of this DeleteSearchJob.
The name of the created search job.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this DeleteSearchJob.
The name of the created search job.
:param name: The name of this DeleteSearchJob.
:type: str
"""
self._attrs["name"] = name
@property
def percent_complete(self) -> "int":
""" Gets the percent_complete of this DeleteSearchJob.
An estimate of the percent of time remaining before the delete search job completes.
"""
return self._attrs.get("percentComplete")
@percent_complete.setter
def percent_complete(self, percent_complete: "int"):
"""Sets the percent_complete of this DeleteSearchJob.
An estimate of the percent of time remaining before the delete search job completes.
:param percent_complete: The percent_complete of this DeleteSearchJob.
:type: int
"""
self._attrs["percentComplete"] = percent_complete
@property
def preview_available(self) -> "str":
""" Gets the preview_available of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to false.
"""
return self._attrs.get("previewAvailable")
@preview_available.setter
def preview_available(self, preview_available: "str"):
"""Sets the preview_available of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to false.
:param preview_available: The preview_available of this DeleteSearchJob.
:type: str
"""
self._attrs["previewAvailable"] = preview_available
@property
def query(self) -> "str":
""" Gets the query of this DeleteSearchJob.
The SPL search string that is generated based on index, module and predicate that are specified.
"""
return self._attrs.get("query")
@query.setter
def query(self, query: "str"):
"""Sets the query of this DeleteSearchJob.
The SPL search string that is generated based on index, module and predicate that are specified.
:param query: The query of this DeleteSearchJob.
:type: str
"""
self._attrs["query"] = query
@property
def query_parameters(self) -> "QueryParameters":
""" Gets the query_parameters of this DeleteSearchJob.
Represents parameters on the search job such as 'earliest' and 'latest'.
"""
return QueryParameters._from_dict(self._attrs["queryParameters"])
@query_parameters.setter
def query_parameters(self, query_parameters: "QueryParameters"):
"""Sets the query_parameters of this DeleteSearchJob.
Represents parameters on the search job such as 'earliest' and 'latest'.
:param query_parameters: The query_parameters of this DeleteSearchJob.
:type: QueryParameters
"""
self._attrs["queryParameters"] = query_parameters.to_dict()
@property
def required_freshness(self) -> "int":
""" Gets the required_freshness of this DeleteSearchJob.
This field does not apply to delete search jobs and is set to 0.
"""
return self._attrs.get("requiredFreshness")
@required_freshness.setter
def required_freshness(self, required_freshness: "int"):
"""Sets the required_freshness of this DeleteSearchJob.
This field does not apply to delete search jobs and is set to 0.
:param required_freshness: The required_freshness of this DeleteSearchJob.
:type: int
"""
self._attrs["requiredFreshness"] = required_freshness
@property
def resolved_earliest(self) -> "str":
""" Gets the resolved_earliest of this DeleteSearchJob.
The earliest time speciifed as an absolute value in GMT. The time is computed based on the values you specify for the 'timezone' and 'earliest' queryParameters.
"""
return self._attrs.get("resolvedEarliest")
@resolved_earliest.setter
def resolved_earliest(self, resolved_earliest: "str"):
"""Sets the resolved_earliest of this DeleteSearchJob.
The earliest time speciifed as an absolute value in GMT. The time is computed based on the values you specify for the 'timezone' and 'earliest' queryParameters.
:param resolved_earliest: The resolved_earliest of this DeleteSearchJob.
:type: str
"""
self._attrs["resolvedEarliest"] = resolved_earliest
@property
def resolved_latest(self) -> "str":
""" Gets the resolved_latest of this DeleteSearchJob.
The latest time specified as an absolute value in GMT. The time is computed based on the values you specify for the 'timezone' and 'earliest' queryParameters.
"""
return self._attrs.get("resolvedLatest")
@resolved_latest.setter
def resolved_latest(self, resolved_latest: "str"):
"""Sets the resolved_latest of this DeleteSearchJob.
The latest time specified as an absolute value in GMT. The time is computed based on the values you specify for the 'timezone' and 'earliest' queryParameters.
:param resolved_latest: The resolved_latest of this DeleteSearchJob.
:type: str
"""
self._attrs["resolvedLatest"] = resolved_latest
@property
def results_available(self) -> "int":
""" Gets the results_available of this DeleteSearchJob.
The number of results produced so far by the delete search job that are going to be deleted.
"""
return self._attrs.get("resultsAvailable")
@results_available.setter
def results_available(self, results_available: "int"):
"""Sets the results_available of this DeleteSearchJob.
The number of results produced so far by the delete search job that are going to be deleted.
:param results_available: The results_available of this DeleteSearchJob.
:type: int
"""
self._attrs["resultsAvailable"] = results_available
@property
def results_preview_available(self) -> "int":
""" Gets the results_preview_available of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to 0.
"""
return self._attrs.get("resultsPreviewAvailable")
@results_preview_available.setter
def results_preview_available(self, results_preview_available: "int"):
"""Sets the results_preview_available of this DeleteSearchJob.
This field does not apply to delete search jobs and is defaulted to 0.
:param results_preview_available: The results_preview_available of this DeleteSearchJob.
:type: int
"""
self._attrs["resultsPreviewAvailable"] = results_preview_available
@property
def sid(self) -> "str":
""" Gets the sid of this DeleteSearchJob.
The ID assigned to the delete search job.
"""
return self._attrs.get("sid")
@sid.setter
def sid(self, sid: "str"):
"""Sets the sid of this DeleteSearchJob.
The ID assigned to the delete search job.
:param sid: The sid of this DeleteSearchJob.
:type: str
"""
self._attrs["sid"] = sid
@property
def status(self) -> "SearchStatus":
""" Gets the status of this DeleteSearchJob.
"""
return SearchStatus.from_value(self._attrs.get("status"))
@status.setter
def status(self, status: "SearchStatus"):
"""Sets the status of this DeleteSearchJob.
:param status: The status of this DeleteSearchJob.
:type: SearchStatus
"""
if isinstance(status, Enum):
self._attrs["status"] = status.value
else:
self._attrs["status"] = status # If you supply a string, we presume you know the service will take it.
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class FederatedConnection(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "FederatedConnection":
instance = FederatedConnection.__new__(FederatedConnection)
instance._attrs = model
return instance
def __init__(self, created: "str" = None, createdby: "str" = None, hostnameip: "str" = None, modified: "str" = None, modifiedby: "str" = None, name: "str" = None, port: "float" = None, serviceaccountuser: "str" = None, **extra):
"""FederatedConnection"""
self._attrs = dict()
if created is not None:
self._attrs["created"] = created
if createdby is not None:
self._attrs["createdby"] = createdby
if hostnameip is not None:
self._attrs["hostnameip"] = hostnameip
if modified is not None:
self._attrs["modified"] = modified
if modifiedby is not None:
self._attrs["modifiedby"] = modifiedby
if name is not None:
self._attrs["name"] = name
if port is not None:
self._attrs["port"] = port
if serviceaccountuser is not None:
self._attrs["serviceaccountuser"] = serviceaccountuser
for k, v in extra.items():
self._attrs[k] = v
@property
def created(self) -> "str":
""" Gets the created of this FederatedConnection.
The timestamp when the federated connection was created.
"""
return self._attrs.get("created")
@created.setter
def created(self, created: "str"):
"""Sets the created of this FederatedConnection.
The timestamp when the federated connection was created.
:param created: The created of this FederatedConnection.
:type: str
"""
self._attrs["created"] = created
@property
def createdby(self) -> "str":
""" Gets the createdby of this FederatedConnection.
The user who created the federated connection.
"""
return self._attrs.get("createdby")
@createdby.setter
def createdby(self, createdby: "str"):
"""Sets the createdby of this FederatedConnection.
The user who created the federated connection.
:param createdby: The createdby of this FederatedConnection.
:type: str
"""
self._attrs["createdby"] = createdby
@property
def hostnameip(self) -> "str":
""" Gets the hostnameip of this FederatedConnection.
The remote hostname to connect yo.
"""
return self._attrs.get("hostnameip")
@hostnameip.setter
def hostnameip(self, hostnameip: "str"):
"""Sets the hostnameip of this FederatedConnection.
The remote hostname to connect yo.
:param hostnameip: The hostnameip of this FederatedConnection.
:type: str
"""
self._attrs["hostnameip"] = hostnameip
@property
def modified(self) -> "str":
""" Gets the modified of this FederatedConnection.
The timestamp when the federated connection was modified.
"""
return self._attrs.get("modified")
@modified.setter
def modified(self, modified: "str"):
"""Sets the modified of this FederatedConnection.
The timestamp when the federated connection was modified.
:param modified: The modified of this FederatedConnection.
:type: str
"""
self._attrs["modified"] = modified
@property
def modifiedby(self) -> "str":
""" Gets the modifiedby of this FederatedConnection.
The user who last modified the federated connection.
"""
return self._attrs.get("modifiedby")
@modifiedby.setter
def modifiedby(self, modifiedby: "str"):
"""Sets the modifiedby of this FederatedConnection.
The user who last modified the federated connection.
:param modifiedby: The modifiedby of this FederatedConnection.
:type: str
"""
self._attrs["modifiedby"] = modifiedby
@property
def name(self) -> "str":
""" Gets the name of this FederatedConnection.
The name of the federated connection.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this FederatedConnection.
The name of the federated connection.
:param name: The name of this FederatedConnection.
:type: str
"""
self._attrs["name"] = name
@property
def port(self) -> "float":
""" Gets the port of this FederatedConnection.
The remote port number.
"""
return self._attrs.get("port")
@port.setter
def port(self, port: "float"):
"""Sets the port of this FederatedConnection.
The remote port number.
:param port: The port of this FederatedConnection.
:type: float
"""
self._attrs["port"] = port
@property
def serviceaccountuser(self) -> "str":
""" Gets the serviceaccountuser of this FederatedConnection.
The username on the service account.
"""
return self._attrs.get("serviceaccountuser")
@serviceaccountuser.setter
def serviceaccountuser(self, serviceaccountuser: "str"):
"""Sets the serviceaccountuser of this FederatedConnection.
The username on the service account.
:param serviceaccountuser: The serviceaccountuser of this FederatedConnection.
:type: str
"""
self._attrs["serviceaccountuser"] = serviceaccountuser
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class FederatedConnectionInput(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "FederatedConnectionInput":
instance = FederatedConnectionInput.__new__(FederatedConnectionInput)
instance._attrs = model
return instance
def __init__(self, hostnameip: "str" = None, name: "str" = None, port: "float" = None, serviceaccountpassword: "str" = None, serviceaccountuser: "str" = None, **extra):
"""FederatedConnectionInput"""
self._attrs = dict()
if hostnameip is not None:
self._attrs["hostnameip"] = hostnameip
if name is not None:
self._attrs["name"] = name
if port is not None:
self._attrs["port"] = port
if serviceaccountpassword is not None:
self._attrs["serviceaccountpassword"] = serviceaccountpassword
if serviceaccountuser is not None:
self._attrs["serviceaccountuser"] = serviceaccountuser
for k, v in extra.items():
self._attrs[k] = v
@property
def hostnameip(self) -> "str":
""" Gets the hostnameip of this FederatedConnectionInput.
The remote hostname to connect to.
"""
return self._attrs.get("hostnameip")
@hostnameip.setter
def hostnameip(self, hostnameip: "str"):
"""Sets the hostnameip of this FederatedConnectionInput.
The remote hostname to connect to.
:param hostnameip: The hostnameip of this FederatedConnectionInput.
:type: str
"""
self._attrs["hostnameip"] = hostnameip
@property
def name(self) -> "str":
""" Gets the name of this FederatedConnectionInput.
The name of the federated connection.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this FederatedConnectionInput.
The name of the federated connection.
:param name: The name of this FederatedConnectionInput.
:type: str
"""
self._attrs["name"] = name
@property
def port(self) -> "float":
""" Gets the port of this FederatedConnectionInput.
The remote port number.
"""
return self._attrs.get("port")
@port.setter
def port(self, port: "float"):
"""Sets the port of this FederatedConnectionInput.
The remote port number.
:param port: The port of this FederatedConnectionInput.
:type: float
"""
self._attrs["port"] = port
@property
def serviceaccountpassword(self) -> "str":
""" Gets the serviceaccountpassword of this FederatedConnectionInput.
The password of the service account.
"""
return self._attrs.get("serviceaccountpassword")
@serviceaccountpassword.setter
def serviceaccountpassword(self, serviceaccountpassword: "str"):
"""Sets the serviceaccountpassword of this FederatedConnectionInput.
The password of the service account.
:param serviceaccountpassword: The serviceaccountpassword of this FederatedConnectionInput.
:type: str
"""
self._attrs["serviceaccountpassword"] = serviceaccountpassword
@property
def serviceaccountuser(self) -> "str":
""" Gets the serviceaccountuser of this FederatedConnectionInput.
The username on the service account.
"""
return self._attrs.get("serviceaccountuser")
@serviceaccountuser.setter
def serviceaccountuser(self, serviceaccountuser: "str"):
"""Sets the serviceaccountuser of this FederatedConnectionInput.
The username on the service account.
:param serviceaccountuser: The serviceaccountuser of this FederatedConnectionInput.
:type: str
"""
self._attrs["serviceaccountuser"] = serviceaccountuser
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class FederatedDataset(Dataset):
@staticmethod
def _from_dict(model: dict) -> "FederatedDataset":
instance = FederatedDataset.__new__(FederatedDataset)
instance._attrs = model
return instance
def __init__(self, created: "str", createdby: "str", id: "str", modified: "str", modifiedby: "str", name: "str", owner: "str", resourcename: "str", appclientidcreatedby: "str" = None, appclientidmodifiedby: "str" = None, description: "str" = None, federated_connection: "str" = None, federated_dataset: "str" = None, federated_dataset_kind: "str" = None, namespace: "str" = None, summary: "str" = None, title: "str" = None, **extra):
"""FederatedDataset"""
self._attrs = dict()
if created is not None:
self._attrs["created"] = created
if createdby is not None:
self._attrs["createdby"] = createdby
if id is not None:
self._attrs["id"] = id
if modified is not None:
self._attrs["modified"] = modified
if modifiedby is not None:
self._attrs["modifiedby"] = modifiedby
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
if resourcename is not None:
self._attrs["resourcename"] = resourcename
if appclientidcreatedby is not None:
self._attrs["appclientidcreatedby"] = appclientidcreatedby
if appclientidmodifiedby is not None:
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
if description is not None:
self._attrs["description"] = description
if federated_connection is not None:
self._attrs["federatedConnection"] = federated_connection
if federated_dataset is not None:
self._attrs["federatedDataset"] = federated_dataset
if federated_dataset_kind is not None:
self._attrs["federatedDatasetKind"] = federated_dataset_kind
self._attrs["kind"] = "federated"
if namespace is not None:
self._attrs["namespace"] = namespace
if summary is not None:
self._attrs["summary"] = summary
if title is not None:
self._attrs["title"] = title
for k, v in extra.items():
self._attrs[k] = v
@property
def created(self) -> "str":
""" Gets the created of this FederatedDataset.
The date and time object was created.
"""
return self._attrs.get("created")
@created.setter
def created(self, created: "str"):
"""Sets the created of this FederatedDataset.
The date and time object was created.
:param created: The created of this FederatedDataset.
:type: str
"""
if created is None:
raise ValueError("Invalid value for `created`, must not be `None`")
self._attrs["created"] = created
@property
def createdby(self) -> "str":
""" Gets the createdby of this FederatedDataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
"""
return self._attrs.get("createdby")
@createdby.setter
def createdby(self, createdby: "str"):
"""Sets the createdby of this FederatedDataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
:param createdby: The createdby of this FederatedDataset.
:type: str
"""
if createdby is None:
raise ValueError("Invalid value for `createdby`, must not be `None`")
self._attrs["createdby"] = createdby
@property
def id(self) -> "str":
""" Gets the id of this FederatedDataset.
A unique dataset ID.
"""
return self._attrs.get("id")
@id.setter
def id(self, id: "str"):
"""Sets the id of this FederatedDataset.
A unique dataset ID.
:param id: The id of this FederatedDataset.
:type: str
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`")
self._attrs["id"] = id
@property
def modified(self) -> "str":
""" Gets the modified of this FederatedDataset.
The date and time object was modified.
"""
return self._attrs.get("modified")
@modified.setter
def modified(self, modified: "str"):
"""Sets the modified of this FederatedDataset.
The date and time object was modified.
:param modified: The modified of this FederatedDataset.
:type: str
"""
if modified is None:
raise ValueError("Invalid value for `modified`, must not be `None`")
self._attrs["modified"] = modified
@property
def modifiedby(self) -> "str":
""" Gets the modifiedby of this FederatedDataset.
The name of the user who most recently modified the object.
"""
return self._attrs.get("modifiedby")
@modifiedby.setter
def modifiedby(self, modifiedby: "str"):
"""Sets the modifiedby of this FederatedDataset.
The name of the user who most recently modified the object.
:param modifiedby: The modifiedby of this FederatedDataset.
:type: str
"""
if modifiedby is None:
raise ValueError("Invalid value for `modifiedby`, must not be `None`")
self._attrs["modifiedby"] = modifiedby
@property
def name(self) -> "str":
""" Gets the name of this FederatedDataset.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this FederatedDataset.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this FederatedDataset.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this FederatedDataset.
The name of the object's owner.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this FederatedDataset.
The name of the object's owner.
:param owner: The owner of this FederatedDataset.
:type: str
"""
if owner is None:
raise ValueError("Invalid value for `owner`, must not be `None`")
self._attrs["owner"] = owner
@property
def resourcename(self) -> "str":
""" Gets the resourcename of this FederatedDataset.
The dataset name qualified by the module name.
"""
return self._attrs.get("resourcename")
@resourcename.setter
def resourcename(self, resourcename: "str"):
"""Sets the resourcename of this FederatedDataset.
The dataset name qualified by the module name.
:param resourcename: The resourcename of this FederatedDataset.
:type: str
"""
if resourcename is None:
raise ValueError("Invalid value for `resourcename`, must not be `None`")
self._attrs["resourcename"] = resourcename
@property
def appclientidcreatedby(self) -> "str":
""" Gets the appclientidcreatedby of this FederatedDataset.
AppClinetId of the creator app of the dataset.
"""
return self._attrs.get("appclientidcreatedby")
@appclientidcreatedby.setter
def appclientidcreatedby(self, appclientidcreatedby: "str"):
"""Sets the appclientidcreatedby of this FederatedDataset.
AppClinetId of the creator app of the dataset.
:param appclientidcreatedby: The appclientidcreatedby of this FederatedDataset.
:type: str
"""
self._attrs["appclientidcreatedby"] = appclientidcreatedby
@property
def appclientidmodifiedby(self) -> "str":
""" Gets the appclientidmodifiedby of this FederatedDataset.
AppClinetId of the modifier app of the dataset.
"""
return self._attrs.get("appclientidmodifiedby")
@appclientidmodifiedby.setter
def appclientidmodifiedby(self, appclientidmodifiedby: "str"):
"""Sets the appclientidmodifiedby of this FederatedDataset.
AppClinetId of the modifier app of the dataset.
:param appclientidmodifiedby: The appclientidmodifiedby of this FederatedDataset.
:type: str
"""
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
@property
def description(self) -> "str":
""" Gets the description of this FederatedDataset.
Detailed description of the dataset.
"""
return self._attrs.get("description")
@description.setter
def description(self, description: "str"):
"""Sets the description of this FederatedDataset.
Detailed description of the dataset.
:param description: The description of this FederatedDataset.
:type: str
"""
self._attrs["description"] = description
@property
def federated_connection(self) -> "str":
""" Gets the federated_connection of this FederatedDataset.
Connection information to connect to remote federated connection.
"""
return self._attrs.get("federatedConnection")
@federated_connection.setter
def federated_connection(self, federated_connection: "str"):
"""Sets the federated_connection of this FederatedDataset.
Connection information to connect to remote federated connection.
:param federated_connection: The federated_connection of this FederatedDataset.
:type: str
"""
self._attrs["federatedConnection"] = federated_connection
@property
def federated_dataset(self) -> "str":
""" Gets the federated_dataset of this FederatedDataset.
Dataset information in the remote instance.
"""
return self._attrs.get("federatedDataset")
@federated_dataset.setter
def federated_dataset(self, federated_dataset: "str"):
"""Sets the federated_dataset of this FederatedDataset.
Dataset information in the remote instance.
:param federated_dataset: The federated_dataset of this FederatedDataset.
:type: str
"""
self._attrs["federatedDataset"] = federated_dataset
@property
def federated_dataset_kind(self) -> "str":
""" Gets the federated_dataset_kind of this FederatedDataset.
Dataset kind information in the remote instance.
"""
return self._attrs.get("federatedDatasetKind")
@federated_dataset_kind.setter
def federated_dataset_kind(self, federated_dataset_kind: "str"):
"""Sets the federated_dataset_kind of this FederatedDataset.
Dataset kind information in the remote instance.
:param federated_dataset_kind: The federated_dataset_kind of this FederatedDataset.
:type: str
"""
self._attrs["federatedDatasetKind"] = federated_dataset_kind
@property
def kind(self) -> str:
return "federated"
@property
def namespace(self) -> "str":
""" Gets the namespace of this FederatedDataset.
The name of the namespace that contains the dataset.
"""
return self._attrs.get("namespace")
@namespace.setter
def namespace(self, namespace: "str"):
"""Sets the namespace of this FederatedDataset.
The name of the namespace that contains the dataset.
:param namespace: The namespace of this FederatedDataset.
:type: str
"""
self._attrs["namespace"] = namespace
@property
def summary(self) -> "str":
""" Gets the summary of this FederatedDataset.
Summary of the dataset's purpose.
"""
return self._attrs.get("summary")
@summary.setter
def summary(self, summary: "str"):
"""Sets the summary of this FederatedDataset.
Summary of the dataset's purpose.
:param summary: The summary of this FederatedDataset.
:type: str
"""
self._attrs["summary"] = summary
@property
def title(self) -> "str":
""" Gets the title of this FederatedDataset.
The title of the dataset. Does not have to be unique.
"""
return self._attrs.get("title")
@title.setter
def title(self, title: "str"):
"""Sets the title of this FederatedDataset.
The title of the dataset. Does not have to be unique.
:param title: The title of this FederatedDataset.
:type: str
"""
self._attrs["title"] = title
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
Dataset.from_dict_handlers["federated"] = FederatedDataset._from_dict
class FederatedDatasetKind(str, Enum):
FEDERATED = "federated"
@staticmethod
def from_value(value: str):
if value == "federated":
return FederatedDatasetKind.FEDERATED
class FederatedDatasetPATCH(DatasetPATCH):
@staticmethod
def _from_dict(model: dict) -> "FederatedDatasetPATCH":
instance = FederatedDatasetPATCH.__new__(FederatedDatasetPATCH)
instance._attrs = model
return instance
def __init__(self, federated_connection: "str" = None, federated_dataset: "str" = None, federated_dataset_kind: "str" = None, kind: "FederatedDatasetKind" = None, module: "str" = None, name: "str" = None, owner: "str" = None, **extra):
"""FederatedDatasetPATCH"""
self._attrs = dict()
if federated_connection is not None:
self._attrs["federatedConnection"] = federated_connection
if federated_dataset is not None:
self._attrs["federatedDataset"] = federated_dataset
if federated_dataset_kind is not None:
self._attrs["federatedDatasetKind"] = federated_dataset_kind
if kind is not None:
self._attrs["kind"] = kind
if module is not None:
self._attrs["module"] = module
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
for k, v in extra.items():
self._attrs[k] = v
@property
def federated_connection(self) -> "str":
""" Gets the federated_connection of this FederatedDatasetPATCH.
Connection information to connect to remote federated connection.
"""
return self._attrs.get("federatedConnection")
@federated_connection.setter
def federated_connection(self, federated_connection: "str"):
"""Sets the federated_connection of this FederatedDatasetPATCH.
Connection information to connect to remote federated connection.
:param federated_connection: The federated_connection of this FederatedDatasetPATCH.
:type: str
"""
self._attrs["federatedConnection"] = federated_connection
@property
def federated_dataset(self) -> "str":
""" Gets the federated_dataset of this FederatedDatasetPATCH.
Dataset information in the remote instance.
"""
return self._attrs.get("federatedDataset")
@federated_dataset.setter
def federated_dataset(self, federated_dataset: "str"):
"""Sets the federated_dataset of this FederatedDatasetPATCH.
Dataset information in the remote instance.
:param federated_dataset: The federated_dataset of this FederatedDatasetPATCH.
:type: str
"""
self._attrs["federatedDataset"] = federated_dataset
@property
def federated_dataset_kind(self) -> "str":
""" Gets the federated_dataset_kind of this FederatedDatasetPATCH.
Dataset kind information in the remote instance.
"""
return self._attrs.get("federatedDatasetKind")
@federated_dataset_kind.setter
def federated_dataset_kind(self, federated_dataset_kind: "str"):
"""Sets the federated_dataset_kind of this FederatedDatasetPATCH.
Dataset kind information in the remote instance.
:param federated_dataset_kind: The federated_dataset_kind of this FederatedDatasetPATCH.
:type: str
"""
self._attrs["federatedDatasetKind"] = federated_dataset_kind
@property
def kind(self) -> "FederatedDatasetKind":
""" Gets the kind of this FederatedDatasetPATCH.
"""
return FederatedDatasetKind.from_value(self._attrs.get("kind"))
@kind.setter
def kind(self, kind: "FederatedDatasetKind"):
"""Sets the kind of this FederatedDatasetPATCH.
:param kind: The kind of this FederatedDatasetPATCH.
:type: FederatedDatasetKind
"""
if isinstance(kind, Enum):
self._attrs["kind"] = kind.value
else:
self._attrs["kind"] = kind # If you supply a string, we presume you know the service will take it.
@property
def module(self) -> "str":
""" Gets the module of this FederatedDatasetPATCH.
The name of module to reassign dataset into.
"""
return self._attrs.get("module")
@module.setter
def module(self, module: "str"):
"""Sets the module of this FederatedDatasetPATCH.
The name of module to reassign dataset into.
:param module: The module of this FederatedDatasetPATCH.
:type: str
"""
self._attrs["module"] = module
@property
def name(self) -> "str":
""" Gets the name of this FederatedDatasetPATCH.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this FederatedDatasetPATCH.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this FederatedDatasetPATCH.
:type: str
"""
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this FederatedDatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this FederatedDatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
:param owner: The owner of this FederatedDatasetPATCH.
:type: str
"""
self._attrs["owner"] = owner
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class SingleFieldSummary(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "SingleFieldSummary":
instance = SingleFieldSummary.__new__(SingleFieldSummary)
instance._attrs = model
return instance
def __init__(self, count: "int" = None, distinct_count: "int" = None, is_exact: "bool" = None, max: "str" = None, mean: "float" = None, min: "str" = None, modes: "List[SingleValueMode]" = None, numeric_count: "int" = None, relevant: "bool" = None, stddev: "float" = None, **extra):
"""SingleFieldSummary"""
self._attrs = dict()
if count is not None:
self._attrs["count"] = count
if distinct_count is not None:
self._attrs["distinctCount"] = distinct_count
if is_exact is not None:
self._attrs["isExact"] = is_exact
if max is not None:
self._attrs["max"] = max
if mean is not None:
self._attrs["mean"] = mean
if min is not None:
self._attrs["min"] = min
if modes is not None:
self._attrs["modes"] = modes
if numeric_count is not None:
self._attrs["numericCount"] = numeric_count
if relevant is not None:
self._attrs["relevant"] = relevant
if stddev is not None:
self._attrs["stddev"] = stddev
for k, v in extra.items():
self._attrs[k] = v
@property
def count(self) -> "int":
""" Gets the count of this SingleFieldSummary.
The total number of events that contain the field.
"""
return self._attrs.get("count")
@count.setter
def count(self, count: "int"):
"""Sets the count of this SingleFieldSummary.
The total number of events that contain the field.
:param count: The count of this SingleFieldSummary.
:type: int
"""
self._attrs["count"] = count
@property
def distinct_count(self) -> "int":
""" Gets the distinct_count of this SingleFieldSummary.
The total number of unique values in the field.
"""
return self._attrs.get("distinctCount")
@distinct_count.setter
def distinct_count(self, distinct_count: "int"):
"""Sets the distinct_count of this SingleFieldSummary.
The total number of unique values in the field.
:param distinct_count: The distinct_count of this SingleFieldSummary.
:type: int
"""
self._attrs["distinctCount"] = distinct_count
@property
def is_exact(self) -> "bool":
""" Gets the is_exact of this SingleFieldSummary.
Specifies if the 'distinctCount' is accurate. The 'isExact' property is FALSE when the 'distinctCount' exceeds the maximum count and an exact count is not available.
"""
return self._attrs.get("isExact")
@is_exact.setter
def is_exact(self, is_exact: "bool"):
"""Sets the is_exact of this SingleFieldSummary.
Specifies if the 'distinctCount' is accurate. The 'isExact' property is FALSE when the 'distinctCount' exceeds the maximum count and an exact count is not available.
:param is_exact: The is_exact of this SingleFieldSummary.
:type: bool
"""
self._attrs["isExact"] = is_exact
@property
def max(self) -> "str":
""" Gets the max of this SingleFieldSummary.
The maximum numeric values in the field.
"""
return self._attrs.get("max")
@max.setter
def max(self, max: "str"):
"""Sets the max of this SingleFieldSummary.
The maximum numeric values in the field.
:param max: The max of this SingleFieldSummary.
:type: str
"""
self._attrs["max"] = max
@property
def mean(self) -> "float":
""" Gets the mean of this SingleFieldSummary.
The mean (average) for the numeric values in the field.
"""
return self._attrs.get("mean")
@mean.setter
def mean(self, mean: "float"):
"""Sets the mean of this SingleFieldSummary.
The mean (average) for the numeric values in the field.
:param mean: The mean of this SingleFieldSummary.
:type: float
"""
self._attrs["mean"] = mean
@property
def min(self) -> "str":
""" Gets the min of this SingleFieldSummary.
The minimum numeric values in the field.
"""
return self._attrs.get("min")
@min.setter
def min(self, min: "str"):
"""Sets the min of this SingleFieldSummary.
The minimum numeric values in the field.
:param min: The min of this SingleFieldSummary.
:type: str
"""
self._attrs["min"] = min
@property
def modes(self) -> "List[SingleValueMode]":
""" Gets the modes of this SingleFieldSummary.
An array of the values in the field.
"""
return [SingleValueMode._from_dict(i) for i in self._attrs.get("modes")]
@modes.setter
def modes(self, modes: "List[SingleValueMode]"):
"""Sets the modes of this SingleFieldSummary.
An array of the values in the field.
:param modes: The modes of this SingleFieldSummary.
:type: List[SingleValueMode]
"""
self._attrs["modes"] = modes
@property
def numeric_count(self) -> "int":
""" Gets the numeric_count of this SingleFieldSummary.
The count of the numeric values in the field.
"""
return self._attrs.get("numericCount")
@numeric_count.setter
def numeric_count(self, numeric_count: "int"):
"""Sets the numeric_count of this SingleFieldSummary.
The count of the numeric values in the field.
:param numeric_count: The numeric_count of this SingleFieldSummary.
:type: int
"""
self._attrs["numericCount"] = numeric_count
@property
def relevant(self) -> "bool":
""" Gets the relevant of this SingleFieldSummary.
Specifies if the field was added or changed by the search.
"""
return self._attrs.get("relevant")
@relevant.setter
def relevant(self, relevant: "bool"):
"""Sets the relevant of this SingleFieldSummary.
Specifies if the field was added or changed by the search.
:param relevant: The relevant of this SingleFieldSummary.
:type: bool
"""
self._attrs["relevant"] = relevant
@property
def stddev(self) -> "float":
""" Gets the stddev of this SingleFieldSummary.
The standard deviation for the numeric values in the field.
"""
return self._attrs.get("stddev")
@stddev.setter
def stddev(self, stddev: "float"):
"""Sets the stddev of this SingleFieldSummary.
The standard deviation for the numeric values in the field.
:param stddev: The stddev of this SingleFieldSummary.
:type: float
"""
self._attrs["stddev"] = stddev
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class SingleValueMode(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "SingleValueMode":
instance = SingleValueMode.__new__(SingleValueMode)
instance._attrs = model
return instance
def __init__(self, count: "int" = None, is_exact: "bool" = None, value: "str" = None, **extra):
"""SingleValueMode"""
self._attrs = dict()
if count is not None:
self._attrs["count"] = count
if is_exact is not None:
self._attrs["isExact"] = is_exact
if value is not None:
self._attrs["value"] = value
for k, v in extra.items():
self._attrs[k] = v
@property
def count(self) -> "int":
""" Gets the count of this SingleValueMode.
The number of occurences that the value appears in a field.
"""
return self._attrs.get("count")
@count.setter
def count(self, count: "int"):
"""Sets the count of this SingleValueMode.
The number of occurences that the value appears in a field.
:param count: The count of this SingleValueMode.
:type: int
"""
self._attrs["count"] = count
@property
def is_exact(self) -> "bool":
""" Gets the is_exact of this SingleValueMode.
Specifies if the count is accurate. The 'isExact' property is FALSE when the 'count' exceeds the maximum count and an exact count is not available.
"""
return self._attrs.get("isExact")
@is_exact.setter
def is_exact(self, is_exact: "bool"):
"""Sets the is_exact of this SingleValueMode.
Specifies if the count is accurate. The 'isExact' property is FALSE when the 'count' exceeds the maximum count and an exact count is not available.
:param is_exact: The is_exact of this SingleValueMode.
:type: bool
"""
self._attrs["isExact"] = is_exact
@property
def value(self) -> "str":
""" Gets the value of this SingleValueMode.
The value in the field.
"""
return self._attrs.get("value")
@value.setter
def value(self, value: "str"):
"""Sets the value of this SingleValueMode.
The value in the field.
:param value: The value of this SingleValueMode.
:type: str
"""
self._attrs["value"] = value
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class FieldsSummary(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "FieldsSummary":
instance = FieldsSummary.__new__(FieldsSummary)
instance._attrs = model
return instance
def __init__(self, duration: "float" = None, earliest_time: "str" = None, event_count: "int" = None, fields: "Dict[str, SingleFieldSummary]" = None, latest_time: "str" = None, **extra):
"""FieldsSummary"""
self._attrs = dict()
if duration is not None:
self._attrs["duration"] = duration
if earliest_time is not None:
self._attrs["earliestTime"] = earliest_time
if event_count is not None:
self._attrs["eventCount"] = event_count
if fields is not None:
self._attrs["fields"] = fields
if latest_time is not None:
self._attrs["latestTime"] = latest_time
for k, v in extra.items():
self._attrs[k] = v
@property
def duration(self) -> "float":
""" Gets the duration of this FieldsSummary.
The amount of time, in seconds, that a time bucket spans from the earliest to the latest time.
"""
return self._attrs.get("duration")
@duration.setter
def duration(self, duration: "float"):
"""Sets the duration of this FieldsSummary.
The amount of time, in seconds, that a time bucket spans from the earliest to the latest time.
:param duration: The duration of this FieldsSummary.
:type: float
"""
self._attrs["duration"] = duration
@property
def earliest_time(self) -> "str":
""" Gets the earliest_time of this FieldsSummary.
If specified, the earliest timestamp in UTC format of the events to process.
"""
return self._attrs.get("earliestTime")
@earliest_time.setter
def earliest_time(self, earliest_time: "str"):
"""Sets the earliest_time of this FieldsSummary.
If specified, the earliest timestamp in UTC format of the events to process.
:param earliest_time: The earliest_time of this FieldsSummary.
:type: str
"""
self._attrs["earliestTime"] = earliest_time
@property
def event_count(self) -> "int":
""" Gets the event_count of this FieldsSummary.
The total number of events for all fields returned in the time range ('earliestTime' and 'latestTime') specified.
"""
return self._attrs.get("eventCount")
@event_count.setter
def event_count(self, event_count: "int"):
"""Sets the event_count of this FieldsSummary.
The total number of events for all fields returned in the time range ('earliestTime' and 'latestTime') specified.
:param event_count: The event_count of this FieldsSummary.
:type: int
"""
self._attrs["eventCount"] = event_count
@property
def fields(self) -> "Dict[str, SingleFieldSummary]":
""" Gets the fields of this FieldsSummary.
A map of the fields in the time range specified.
"""
return self._attrs.get("fields")
@fields.setter
def fields(self, fields: "Dict[str, SingleFieldSummary]"):
"""Sets the fields of this FieldsSummary.
A map of the fields in the time range specified.
:param fields: The fields of this FieldsSummary.
:type: Dict[str, SingleFieldSummary]
"""
self._attrs["fields"] = fields
@property
def latest_time(self) -> "str":
""" Gets the latest_time of this FieldsSummary.
If specified, the latest timestamp in UTC format of the events to process.
"""
return self._attrs.get("latestTime")
@latest_time.setter
def latest_time(self, latest_time: "str"):
"""Sets the latest_time of this FieldsSummary.
If specified, the latest timestamp in UTC format of the events to process.
:param latest_time: The latest_time of this FieldsSummary.
:type: str
"""
self._attrs["latestTime"] = latest_time
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class IndexDataset(Dataset):
@staticmethod
def _from_dict(model: dict) -> "IndexDataset":
instance = IndexDataset.__new__(IndexDataset)
instance._attrs = model
return instance
def __init__(self, created: "str", createdby: "str", id: "str", modified: "str", modifiedby: "str", name: "str", owner: "str", resourcename: "str", appclientidcreatedby: "str" = None, appclientidmodifiedby: "str" = None, description: "str" = None, disabled: "bool" = None, earliest_event_time: "str" = None, earliest_ingest_time: "str" = None, frozen_time_period_in_secs: "int" = None, latest_event_time: "str" = None, latest_ingest_time: "str" = None, latest_metadata_update_time: "str" = None, namespace: "str" = None, summary: "str" = None, title: "str" = None, total_event_count: "int" = None, total_size: "int" = None, **extra):
"""IndexDataset"""
self._attrs = dict()
if created is not None:
self._attrs["created"] = created
if createdby is not None:
self._attrs["createdby"] = createdby
if id is not None:
self._attrs["id"] = id
if modified is not None:
self._attrs["modified"] = modified
if modifiedby is not None:
self._attrs["modifiedby"] = modifiedby
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
if resourcename is not None:
self._attrs["resourcename"] = resourcename
if appclientidcreatedby is not None:
self._attrs["appclientidcreatedby"] = appclientidcreatedby
if appclientidmodifiedby is not None:
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
if description is not None:
self._attrs["description"] = description
if disabled is not None:
self._attrs["disabled"] = disabled
if earliest_event_time is not None:
self._attrs["earliestEventTime"] = earliest_event_time
if earliest_ingest_time is not None:
self._attrs["earliestIngestTime"] = earliest_ingest_time
if frozen_time_period_in_secs is not None:
self._attrs["frozenTimePeriodInSecs"] = frozen_time_period_in_secs
self._attrs["kind"] = "index"
if latest_event_time is not None:
self._attrs["latestEventTime"] = latest_event_time
if latest_ingest_time is not None:
self._attrs["latestIngestTime"] = latest_ingest_time
if latest_metadata_update_time is not None:
self._attrs["latestMetadataUpdateTime"] = latest_metadata_update_time
if namespace is not None:
self._attrs["namespace"] = namespace
if summary is not None:
self._attrs["summary"] = summary
if title is not None:
self._attrs["title"] = title
if total_event_count is not None:
self._attrs["totalEventCount"] = total_event_count
if total_size is not None:
self._attrs["totalSize"] = total_size
for k, v in extra.items():
self._attrs[k] = v
@property
def created(self) -> "str":
""" Gets the created of this IndexDataset.
The date and time object was created.
"""
return self._attrs.get("created")
@created.setter
def created(self, created: "str"):
"""Sets the created of this IndexDataset.
The date and time object was created.
:param created: The created of this IndexDataset.
:type: str
"""
if created is None:
raise ValueError("Invalid value for `created`, must not be `None`")
self._attrs["created"] = created
@property
def createdby(self) -> "str":
""" Gets the createdby of this IndexDataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
"""
return self._attrs.get("createdby")
@createdby.setter
def createdby(self, createdby: "str"):
"""Sets the createdby of this IndexDataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
:param createdby: The createdby of this IndexDataset.
:type: str
"""
if createdby is None:
raise ValueError("Invalid value for `createdby`, must not be `None`")
self._attrs["createdby"] = createdby
@property
def id(self) -> "str":
""" Gets the id of this IndexDataset.
A unique dataset ID.
"""
return self._attrs.get("id")
@id.setter
def id(self, id: "str"):
"""Sets the id of this IndexDataset.
A unique dataset ID.
:param id: The id of this IndexDataset.
:type: str
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`")
self._attrs["id"] = id
@property
def modified(self) -> "str":
""" Gets the modified of this IndexDataset.
The date and time object was modified.
"""
return self._attrs.get("modified")
@modified.setter
def modified(self, modified: "str"):
"""Sets the modified of this IndexDataset.
The date and time object was modified.
:param modified: The modified of this IndexDataset.
:type: str
"""
if modified is None:
raise ValueError("Invalid value for `modified`, must not be `None`")
self._attrs["modified"] = modified
@property
def modifiedby(self) -> "str":
""" Gets the modifiedby of this IndexDataset.
The name of the user who most recently modified the object.
"""
return self._attrs.get("modifiedby")
@modifiedby.setter
def modifiedby(self, modifiedby: "str"):
"""Sets the modifiedby of this IndexDataset.
The name of the user who most recently modified the object.
:param modifiedby: The modifiedby of this IndexDataset.
:type: str
"""
if modifiedby is None:
raise ValueError("Invalid value for `modifiedby`, must not be `None`")
self._attrs["modifiedby"] = modifiedby
@property
def name(self) -> "str":
""" Gets the name of this IndexDataset.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this IndexDataset.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this IndexDataset.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this IndexDataset.
The name of the object's owner.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this IndexDataset.
The name of the object's owner.
:param owner: The owner of this IndexDataset.
:type: str
"""
if owner is None:
raise ValueError("Invalid value for `owner`, must not be `None`")
self._attrs["owner"] = owner
@property
def resourcename(self) -> "str":
""" Gets the resourcename of this IndexDataset.
The dataset name qualified by the module name.
"""
return self._attrs.get("resourcename")
@resourcename.setter
def resourcename(self, resourcename: "str"):
"""Sets the resourcename of this IndexDataset.
The dataset name qualified by the module name.
:param resourcename: The resourcename of this IndexDataset.
:type: str
"""
if resourcename is None:
raise ValueError("Invalid value for `resourcename`, must not be `None`")
self._attrs["resourcename"] = resourcename
@property
def appclientidcreatedby(self) -> "str":
""" Gets the appclientidcreatedby of this IndexDataset.
AppClinetId of the creator app of the dataset.
"""
return self._attrs.get("appclientidcreatedby")
@appclientidcreatedby.setter
def appclientidcreatedby(self, appclientidcreatedby: "str"):
"""Sets the appclientidcreatedby of this IndexDataset.
AppClinetId of the creator app of the dataset.
:param appclientidcreatedby: The appclientidcreatedby of this IndexDataset.
:type: str
"""
self._attrs["appclientidcreatedby"] = appclientidcreatedby
@property
def appclientidmodifiedby(self) -> "str":
""" Gets the appclientidmodifiedby of this IndexDataset.
AppClinetId of the modifier app of the dataset.
"""
return self._attrs.get("appclientidmodifiedby")
@appclientidmodifiedby.setter
def appclientidmodifiedby(self, appclientidmodifiedby: "str"):
"""Sets the appclientidmodifiedby of this IndexDataset.
AppClinetId of the modifier app of the dataset.
:param appclientidmodifiedby: The appclientidmodifiedby of this IndexDataset.
:type: str
"""
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
@property
def description(self) -> "str":
""" Gets the description of this IndexDataset.
Detailed description of the dataset.
"""
return self._attrs.get("description")
@description.setter
def description(self, description: "str"):
"""Sets the description of this IndexDataset.
Detailed description of the dataset.
:param description: The description of this IndexDataset.
:type: str
"""
self._attrs["description"] = description
@property
def disabled(self) -> "bool":
""" Gets the disabled of this IndexDataset.
Specifies whether or not the Splunk index is disabled.
"""
return self._attrs.get("disabled")
@disabled.setter
def disabled(self, disabled: "bool"):
"""Sets the disabled of this IndexDataset.
Specifies whether or not the Splunk index is disabled.
:param disabled: The disabled of this IndexDataset.
:type: bool
"""
self._attrs["disabled"] = disabled
@property
def earliest_event_time(self) -> "str":
""" Gets the earliest_event_time of this IndexDataset.
The timestamp, in seconds, of the earliest event. The timestamp is in UNIX time.
"""
return self._attrs.get("earliestEventTime")
@earliest_event_time.setter
def earliest_event_time(self, earliest_event_time: "str"):
"""Sets the earliest_event_time of this IndexDataset.
The timestamp, in seconds, of the earliest event. The timestamp is in UNIX time.
:param earliest_event_time: The earliest_event_time of this IndexDataset.
:type: str
"""
self._attrs["earliestEventTime"] = earliest_event_time
@property
def earliest_ingest_time(self) -> "str":
""" Gets the earliest_ingest_time of this IndexDataset.
The earliest index time for any of the events in this index.
"""
return self._attrs.get("earliestIngestTime")
@earliest_ingest_time.setter
def earliest_ingest_time(self, earliest_ingest_time: "str"):
"""Sets the earliest_ingest_time of this IndexDataset.
The earliest index time for any of the events in this index.
:param earliest_ingest_time: The earliest_ingest_time of this IndexDataset.
:type: str
"""
self._attrs["earliestIngestTime"] = earliest_ingest_time
@property
def frozen_time_period_in_secs(self) -> "int":
""" Gets the frozen_time_period_in_secs of this IndexDataset.
The frozenTimePeriodInSecs to use for the index
"""
return self._attrs.get("frozenTimePeriodInSecs")
@frozen_time_period_in_secs.setter
def frozen_time_period_in_secs(self, frozen_time_period_in_secs: "int"):
"""Sets the frozen_time_period_in_secs of this IndexDataset.
The frozenTimePeriodInSecs to use for the index
:param frozen_time_period_in_secs: The frozen_time_period_in_secs of this IndexDataset.
:type: int
"""
self._attrs["frozenTimePeriodInSecs"] = frozen_time_period_in_secs
@property
def kind(self) -> str:
return "index"
@property
def latest_event_time(self) -> "str":
""" Gets the latest_event_time of this IndexDataset.
The timestamp, in seconds, of the latest event. The timestamp is in UNIX time.
"""
return self._attrs.get("latestEventTime")
@latest_event_time.setter
def latest_event_time(self, latest_event_time: "str"):
"""Sets the latest_event_time of this IndexDataset.
The timestamp, in seconds, of the latest event. The timestamp is in UNIX time.
:param latest_event_time: The latest_event_time of this IndexDataset.
:type: str
"""
self._attrs["latestEventTime"] = latest_event_time
@property
def latest_ingest_time(self) -> "str":
""" Gets the latest_ingest_time of this IndexDataset.
The latest index time for any of the events in this index.
"""
return self._attrs.get("latestIngestTime")
@latest_ingest_time.setter
def latest_ingest_time(self, latest_ingest_time: "str"):
"""Sets the latest_ingest_time of this IndexDataset.
The latest index time for any of the events in this index.
:param latest_ingest_time: The latest_ingest_time of this IndexDataset.
:type: str
"""
self._attrs["latestIngestTime"] = latest_ingest_time
@property
def latest_metadata_update_time(self) -> "str":
""" Gets the latest_metadata_update_time of this IndexDataset.
The latest time that the index metadata was refreshed.
"""
return self._attrs.get("latestMetadataUpdateTime")
@latest_metadata_update_time.setter
def latest_metadata_update_time(self, latest_metadata_update_time: "str"):
"""Sets the latest_metadata_update_time of this IndexDataset.
The latest time that the index metadata was refreshed.
:param latest_metadata_update_time: The latest_metadata_update_time of this IndexDataset.
:type: str
"""
self._attrs["latestMetadataUpdateTime"] = latest_metadata_update_time
@property
def namespace(self) -> "str":
""" Gets the namespace of this IndexDataset.
The name of the namespace that contains the dataset.
"""
return self._attrs.get("namespace")
@namespace.setter
def namespace(self, namespace: "str"):
"""Sets the namespace of this IndexDataset.
The name of the namespace that contains the dataset.
:param namespace: The namespace of this IndexDataset.
:type: str
"""
self._attrs["namespace"] = namespace
@property
def summary(self) -> "str":
""" Gets the summary of this IndexDataset.
Summary of the dataset's purpose.
"""
return self._attrs.get("summary")
@summary.setter
def summary(self, summary: "str"):
"""Sets the summary of this IndexDataset.
Summary of the dataset's purpose.
:param summary: The summary of this IndexDataset.
:type: str
"""
self._attrs["summary"] = summary
@property
def title(self) -> "str":
""" Gets the title of this IndexDataset.
The title of the dataset. Does not have to be unique.
"""
return self._attrs.get("title")
@title.setter
def title(self, title: "str"):
"""Sets the title of this IndexDataset.
The title of the dataset. Does not have to be unique.
:param title: The title of this IndexDataset.
:type: str
"""
self._attrs["title"] = title
@property
def total_event_count(self) -> "int":
""" Gets the total_event_count of this IndexDataset.
The number of events in the index.
"""
return self._attrs.get("totalEventCount")
@total_event_count.setter
def total_event_count(self, total_event_count: "int"):
"""Sets the total_event_count of this IndexDataset.
The number of events in the index.
:param total_event_count: The total_event_count of this IndexDataset.
:type: int
"""
self._attrs["totalEventCount"] = total_event_count
@property
def total_size(self) -> "int":
""" Gets the total_size of this IndexDataset.
The raw size, in bytes, of the uncompressed data in the indexers.
"""
return self._attrs.get("totalSize")
@total_size.setter
def total_size(self, total_size: "int"):
"""Sets the total_size of this IndexDataset.
The raw size, in bytes, of the uncompressed data in the indexers.
:param total_size: The total_size of this IndexDataset.
:type: int
"""
self._attrs["totalSize"] = total_size
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
Dataset.from_dict_handlers["index"] = IndexDataset._from_dict
class IndexDatasetKind(str, Enum):
INDEX = "index"
@staticmethod
def from_value(value: str):
if value == "index":
return IndexDatasetKind.INDEX
class IndexDatasetPATCH(DatasetPATCH):
@staticmethod
def _from_dict(model: dict) -> "IndexDatasetPATCH":
instance = IndexDatasetPATCH.__new__(IndexDatasetPATCH)
instance._attrs = model
return instance
def __init__(self, disabled: "bool" = None, frozen_time_period_in_secs: "int" = None, kind: "IndexDatasetKind" = None, module: "str" = None, name: "str" = None, owner: "str" = None, **extra):
"""IndexDatasetPATCH"""
self._attrs = dict()
if disabled is not None:
self._attrs["disabled"] = disabled
if frozen_time_period_in_secs is not None:
self._attrs["frozenTimePeriodInSecs"] = frozen_time_period_in_secs
if kind is not None:
self._attrs["kind"] = kind
if module is not None:
self._attrs["module"] = module
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
for k, v in extra.items():
self._attrs[k] = v
@property
def disabled(self) -> "bool":
""" Gets the disabled of this IndexDatasetPATCH.
Specifies whether or not the Splunk index is disabled.
"""
return self._attrs.get("disabled")
@disabled.setter
def disabled(self, disabled: "bool"):
"""Sets the disabled of this IndexDatasetPATCH.
Specifies whether or not the Splunk index is disabled.
:param disabled: The disabled of this IndexDatasetPATCH.
:type: bool
"""
self._attrs["disabled"] = disabled
@property
def frozen_time_period_in_secs(self) -> "int":
""" Gets the frozen_time_period_in_secs of this IndexDatasetPATCH.
The frozenTimePeriodInSecs to use for the index
"""
return self._attrs.get("frozenTimePeriodInSecs")
@frozen_time_period_in_secs.setter
def frozen_time_period_in_secs(self, frozen_time_period_in_secs: "int"):
"""Sets the frozen_time_period_in_secs of this IndexDatasetPATCH.
The frozenTimePeriodInSecs to use for the index
:param frozen_time_period_in_secs: The frozen_time_period_in_secs of this IndexDatasetPATCH.
:type: int
"""
self._attrs["frozenTimePeriodInSecs"] = frozen_time_period_in_secs
@property
def kind(self) -> "IndexDatasetKind":
""" Gets the kind of this IndexDatasetPATCH.
"""
return IndexDatasetKind.from_value(self._attrs.get("kind"))
@kind.setter
def kind(self, kind: "IndexDatasetKind"):
"""Sets the kind of this IndexDatasetPATCH.
:param kind: The kind of this IndexDatasetPATCH.
:type: IndexDatasetKind
"""
if isinstance(kind, Enum):
self._attrs["kind"] = kind.value
else:
self._attrs["kind"] = kind # If you supply a string, we presume you know the service will take it.
@property
def module(self) -> "str":
""" Gets the module of this IndexDatasetPATCH.
The name of module to reassign dataset into.
"""
return self._attrs.get("module")
@module.setter
def module(self, module: "str"):
"""Sets the module of this IndexDatasetPATCH.
The name of module to reassign dataset into.
:param module: The module of this IndexDatasetPATCH.
:type: str
"""
self._attrs["module"] = module
@property
def name(self) -> "str":
""" Gets the name of this IndexDatasetPATCH.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this IndexDatasetPATCH.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this IndexDatasetPATCH.
:type: str
"""
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this IndexDatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this IndexDatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
:param owner: The owner of this IndexDatasetPATCH.
:type: str
"""
self._attrs["owner"] = owner
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class KVCollectionDataset(Dataset):
@staticmethod
def _from_dict(model: dict) -> "KVCollectionDataset":
instance = KVCollectionDataset.__new__(KVCollectionDataset)
instance._attrs = model
return instance
def __init__(self, created: "str", createdby: "str", id: "str", modified: "str", modifiedby: "str", name: "str", owner: "str", resourcename: "str", appclientidcreatedby: "str" = None, appclientidmodifiedby: "str" = None, description: "str" = None, namespace: "str" = None, summary: "str" = None, title: "str" = None, **extra):
"""KVCollectionDataset"""
self._attrs = dict()
if created is not None:
self._attrs["created"] = created
if createdby is not None:
self._attrs["createdby"] = createdby
if id is not None:
self._attrs["id"] = id
if modified is not None:
self._attrs["modified"] = modified
if modifiedby is not None:
self._attrs["modifiedby"] = modifiedby
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
if resourcename is not None:
self._attrs["resourcename"] = resourcename
if appclientidcreatedby is not None:
self._attrs["appclientidcreatedby"] = appclientidcreatedby
if appclientidmodifiedby is not None:
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
if description is not None:
self._attrs["description"] = description
self._attrs["kind"] = "kvcollection"
if namespace is not None:
self._attrs["namespace"] = namespace
if summary is not None:
self._attrs["summary"] = summary
if title is not None:
self._attrs["title"] = title
for k, v in extra.items():
self._attrs[k] = v
@property
def created(self) -> "str":
""" Gets the created of this KVCollectionDataset.
The date and time object was created.
"""
return self._attrs.get("created")
@created.setter
def created(self, created: "str"):
"""Sets the created of this KVCollectionDataset.
The date and time object was created.
:param created: The created of this KVCollectionDataset.
:type: str
"""
if created is None:
raise ValueError("Invalid value for `created`, must not be `None`")
self._attrs["created"] = created
@property
def createdby(self) -> "str":
""" Gets the createdby of this KVCollectionDataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
"""
return self._attrs.get("createdby")
@createdby.setter
def createdby(self, createdby: "str"):
"""Sets the createdby of this KVCollectionDataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
:param createdby: The createdby of this KVCollectionDataset.
:type: str
"""
if createdby is None:
raise ValueError("Invalid value for `createdby`, must not be `None`")
self._attrs["createdby"] = createdby
@property
def id(self) -> "str":
""" Gets the id of this KVCollectionDataset.
A unique dataset ID.
"""
return self._attrs.get("id")
@id.setter
def id(self, id: "str"):
"""Sets the id of this KVCollectionDataset.
A unique dataset ID.
:param id: The id of this KVCollectionDataset.
:type: str
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`")
self._attrs["id"] = id
@property
def modified(self) -> "str":
""" Gets the modified of this KVCollectionDataset.
The date and time object was modified.
"""
return self._attrs.get("modified")
@modified.setter
def modified(self, modified: "str"):
"""Sets the modified of this KVCollectionDataset.
The date and time object was modified.
:param modified: The modified of this KVCollectionDataset.
:type: str
"""
if modified is None:
raise ValueError("Invalid value for `modified`, must not be `None`")
self._attrs["modified"] = modified
@property
def modifiedby(self) -> "str":
""" Gets the modifiedby of this KVCollectionDataset.
The name of the user who most recently modified the object.
"""
return self._attrs.get("modifiedby")
@modifiedby.setter
def modifiedby(self, modifiedby: "str"):
"""Sets the modifiedby of this KVCollectionDataset.
The name of the user who most recently modified the object.
:param modifiedby: The modifiedby of this KVCollectionDataset.
:type: str
"""
if modifiedby is None:
raise ValueError("Invalid value for `modifiedby`, must not be `None`")
self._attrs["modifiedby"] = modifiedby
@property
def name(self) -> "str":
""" Gets the name of this KVCollectionDataset.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this KVCollectionDataset.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this KVCollectionDataset.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this KVCollectionDataset.
The name of the object's owner.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this KVCollectionDataset.
The name of the object's owner.
:param owner: The owner of this KVCollectionDataset.
:type: str
"""
if owner is None:
raise ValueError("Invalid value for `owner`, must not be `None`")
self._attrs["owner"] = owner
@property
def resourcename(self) -> "str":
""" Gets the resourcename of this KVCollectionDataset.
The dataset name qualified by the module name.
"""
return self._attrs.get("resourcename")
@resourcename.setter
def resourcename(self, resourcename: "str"):
"""Sets the resourcename of this KVCollectionDataset.
The dataset name qualified by the module name.
:param resourcename: The resourcename of this KVCollectionDataset.
:type: str
"""
if resourcename is None:
raise ValueError("Invalid value for `resourcename`, must not be `None`")
self._attrs["resourcename"] = resourcename
@property
def appclientidcreatedby(self) -> "str":
""" Gets the appclientidcreatedby of this KVCollectionDataset.
AppClinetId of the creator app of the dataset.
"""
return self._attrs.get("appclientidcreatedby")
@appclientidcreatedby.setter
def appclientidcreatedby(self, appclientidcreatedby: "str"):
"""Sets the appclientidcreatedby of this KVCollectionDataset.
AppClinetId of the creator app of the dataset.
:param appclientidcreatedby: The appclientidcreatedby of this KVCollectionDataset.
:type: str
"""
self._attrs["appclientidcreatedby"] = appclientidcreatedby
@property
def appclientidmodifiedby(self) -> "str":
""" Gets the appclientidmodifiedby of this KVCollectionDataset.
AppClinetId of the modifier app of the dataset.
"""
return self._attrs.get("appclientidmodifiedby")
@appclientidmodifiedby.setter
def appclientidmodifiedby(self, appclientidmodifiedby: "str"):
"""Sets the appclientidmodifiedby of this KVCollectionDataset.
AppClinetId of the modifier app of the dataset.
:param appclientidmodifiedby: The appclientidmodifiedby of this KVCollectionDataset.
:type: str
"""
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
@property
def description(self) -> "str":
""" Gets the description of this KVCollectionDataset.
Detailed description of the dataset.
"""
return self._attrs.get("description")
@description.setter
def description(self, description: "str"):
"""Sets the description of this KVCollectionDataset.
Detailed description of the dataset.
:param description: The description of this KVCollectionDataset.
:type: str
"""
self._attrs["description"] = description
@property
def kind(self) -> str:
return "kvcollection"
@property
def namespace(self) -> "str":
""" Gets the namespace of this KVCollectionDataset.
The name of the namespace that contains the dataset.
"""
return self._attrs.get("namespace")
@namespace.setter
def namespace(self, namespace: "str"):
"""Sets the namespace of this KVCollectionDataset.
The name of the namespace that contains the dataset.
:param namespace: The namespace of this KVCollectionDataset.
:type: str
"""
self._attrs["namespace"] = namespace
@property
def summary(self) -> "str":
""" Gets the summary of this KVCollectionDataset.
Summary of the dataset's purpose.
"""
return self._attrs.get("summary")
@summary.setter
def summary(self, summary: "str"):
"""Sets the summary of this KVCollectionDataset.
Summary of the dataset's purpose.
:param summary: The summary of this KVCollectionDataset.
:type: str
"""
self._attrs["summary"] = summary
@property
def title(self) -> "str":
""" Gets the title of this KVCollectionDataset.
The title of the dataset. Does not have to be unique.
"""
return self._attrs.get("title")
@title.setter
def title(self, title: "str"):
"""Sets the title of this KVCollectionDataset.
The title of the dataset. Does not have to be unique.
:param title: The title of this KVCollectionDataset.
:type: str
"""
self._attrs["title"] = title
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
Dataset.from_dict_handlers["kvcollection"] = KVCollectionDataset._from_dict
class KVCollectionDatasetKind(str, Enum):
KVCOLLECTION = "kvcollection"
@staticmethod
def from_value(value: str):
if value == "kvcollection":
return KVCollectionDatasetKind.KVCOLLECTION
class KVCollectionDatasetPATCH(DatasetPATCH):
@staticmethod
def _from_dict(model: dict) -> "KVCollectionDatasetPATCH":
instance = KVCollectionDatasetPATCH.__new__(KVCollectionDatasetPATCH)
instance._attrs = model
return instance
def __init__(self, kind: "KVCollectionDatasetKind" = None, module: "str" = None, name: "str" = None, owner: "str" = None, **extra):
"""KVCollectionDatasetPATCH"""
self._attrs = dict()
if kind is not None:
self._attrs["kind"] = kind
if module is not None:
self._attrs["module"] = module
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
for k, v in extra.items():
self._attrs[k] = v
@property
def kind(self) -> "KVCollectionDatasetKind":
""" Gets the kind of this KVCollectionDatasetPATCH.
"""
return KVCollectionDatasetKind.from_value(self._attrs.get("kind"))
@kind.setter
def kind(self, kind: "KVCollectionDatasetKind"):
"""Sets the kind of this KVCollectionDatasetPATCH.
:param kind: The kind of this KVCollectionDatasetPATCH.
:type: KVCollectionDatasetKind
"""
if isinstance(kind, Enum):
self._attrs["kind"] = kind.value
else:
self._attrs["kind"] = kind # If you supply a string, we presume you know the service will take it.
@property
def module(self) -> "str":
""" Gets the module of this KVCollectionDatasetPATCH.
The name of module to reassign dataset into.
"""
return self._attrs.get("module")
@module.setter
def module(self, module: "str"):
"""Sets the module of this KVCollectionDatasetPATCH.
The name of module to reassign dataset into.
:param module: The module of this KVCollectionDatasetPATCH.
:type: str
"""
self._attrs["module"] = module
@property
def name(self) -> "str":
""" Gets the name of this KVCollectionDatasetPATCH.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this KVCollectionDatasetPATCH.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this KVCollectionDatasetPATCH.
:type: str
"""
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this KVCollectionDatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this KVCollectionDatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
:param owner: The owner of this KVCollectionDatasetPATCH.
:type: str
"""
self._attrs["owner"] = owner
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class ListDatasets(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "ListDatasets":
instance = ListDatasets.__new__(ListDatasets)
instance._attrs = model
return instance
def __init__(self, results: "List[Dataset]" = None, **extra):
"""ListDatasets"""
self._attrs = dict()
if results is not None:
self._attrs["results"] = results
for k, v in extra.items():
self._attrs[k] = v
@property
def results(self) -> "List[Dataset]":
""" Gets the results of this ListDatasets.
List of all datasets
"""
return [Dataset._from_dict(i) for i in self._attrs.get("results")]
@results.setter
def results(self, results: "List[Dataset]"):
"""Sets the results of this ListDatasets.
List of all datasets
:param results: The results of this ListDatasets.
:type: List[Dataset]
"""
self._attrs["results"] = results
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class Module(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "Module":
instance = Module.__new__(Module)
instance._attrs = model
return instance
def __init__(self, definition: "str", name: "str", created_at: "str" = None, created_by: "str" = None, namespace: "str" = None, **extra):
"""Module"""
self._attrs = dict()
if definition is not None:
self._attrs["definition"] = definition
if name is not None:
self._attrs["name"] = name
if created_at is not None:
self._attrs["createdAt"] = created_at
if created_by is not None:
self._attrs["createdBy"] = created_by
if namespace is not None:
self._attrs["namespace"] = namespace
for k, v in extra.items():
self._attrs[k] = v
@property
def definition(self) -> "str":
""" Gets the definition of this Module.
The definition of the module
"""
return self._attrs.get("definition")
@definition.setter
def definition(self, definition: "str"):
"""Sets the definition of this Module.
The definition of the module
:param definition: The definition of this Module.
:type: str
"""
if definition is None:
raise ValueError("Invalid value for `definition`, must not be `None`")
self._attrs["definition"] = definition
@property
def name(self) -> "str":
""" Gets the name of this Module.
The name of the module
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this Module.
The name of the module
:param name: The name of this Module.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._attrs["name"] = name
@property
def created_at(self) -> "str":
""" Gets the created_at of this Module.
The timestamp when the module was created
"""
return self._attrs.get("createdAt")
@created_at.setter
def created_at(self, created_at: "str"):
"""Sets the created_at of this Module.
The timestamp when the module was created
:param created_at: The created_at of this Module.
:type: str
"""
self._attrs["createdAt"] = created_at
@property
def created_by(self) -> "str":
""" Gets the created_by of this Module.
The user who created the module
"""
return self._attrs.get("createdBy")
@created_by.setter
def created_by(self, created_by: "str"):
"""Sets the created_by of this Module.
The user who created the module
:param created_by: The created_by of this Module.
:type: str
"""
self._attrs["createdBy"] = created_by
@property
def namespace(self) -> "str":
""" Gets the namespace of this Module.
The namespace of the module
"""
return self._attrs.get("namespace")
@namespace.setter
def namespace(self, namespace: "str"):
"""Sets the namespace of this Module.
The namespace of the module
:param namespace: The namespace of this Module.
:type: str
"""
self._attrs["namespace"] = namespace
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class ListModules(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "ListModules":
instance = ListModules.__new__(ListModules)
instance._attrs = model
return instance
def __init__(self, results: "List[Module]" = None, **extra):
"""ListModules"""
self._attrs = dict()
if results is not None:
self._attrs["results"] = results
for k, v in extra.items():
self._attrs[k] = v
@property
def results(self) -> "List[Module]":
""" Gets the results of this ListModules.
list of all modules
"""
return [Module._from_dict(i) for i in self._attrs.get("results")]
@results.setter
def results(self, results: "List[Module]"):
"""Sets the results of this ListModules.
list of all modules
:param results: The results of this ListModules.
:type: List[Module]
"""
self._attrs["results"] = results
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class ListPreviewResultsResponseFields(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "ListPreviewResultsResponseFields":
instance = ListPreviewResultsResponseFields.__new__(ListPreviewResultsResponseFields)
instance._attrs = model
return instance
def __init__(self, name: "str", data_source: "str" = None, groupby_rank: "str" = None, split_field: "str" = None, split_value: "str" = None, splitby_special: "str" = None, type_special: "str" = None, **extra):
"""ListPreviewResultsResponseFields"""
self._attrs = dict()
if name is not None:
self._attrs["name"] = name
if data_source is not None:
self._attrs["dataSource"] = data_source
if groupby_rank is not None:
self._attrs["groupbyRank"] = groupby_rank
if split_field is not None:
self._attrs["splitField"] = split_field
if split_value is not None:
self._attrs["splitValue"] = split_value
if splitby_special is not None:
self._attrs["splitbySpecial"] = splitby_special
if type_special is not None:
self._attrs["typeSpecial"] = type_special
for k, v in extra.items():
self._attrs[k] = v
@property
def name(self) -> "str":
""" Gets the name of this ListPreviewResultsResponseFields.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this ListPreviewResultsResponseFields.
:param name: The name of this ListPreviewResultsResponseFields.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._attrs["name"] = name
@property
def data_source(self) -> "str":
""" Gets the data_source of this ListPreviewResultsResponseFields.
"""
return self._attrs.get("dataSource")
@data_source.setter
def data_source(self, data_source: "str"):
"""Sets the data_source of this ListPreviewResultsResponseFields.
:param data_source: The data_source of this ListPreviewResultsResponseFields.
:type: str
"""
self._attrs["dataSource"] = data_source
@property
def groupby_rank(self) -> "str":
""" Gets the groupby_rank of this ListPreviewResultsResponseFields.
"""
return self._attrs.get("groupbyRank")
@groupby_rank.setter
def groupby_rank(self, groupby_rank: "str"):
"""Sets the groupby_rank of this ListPreviewResultsResponseFields.
:param groupby_rank: The groupby_rank of this ListPreviewResultsResponseFields.
:type: str
"""
self._attrs["groupbyRank"] = groupby_rank
@property
def split_field(self) -> "str":
""" Gets the split_field of this ListPreviewResultsResponseFields.
"""
return self._attrs.get("splitField")
@split_field.setter
def split_field(self, split_field: "str"):
"""Sets the split_field of this ListPreviewResultsResponseFields.
:param split_field: The split_field of this ListPreviewResultsResponseFields.
:type: str
"""
self._attrs["splitField"] = split_field
@property
def split_value(self) -> "str":
""" Gets the split_value of this ListPreviewResultsResponseFields.
"""
return self._attrs.get("splitValue")
@split_value.setter
def split_value(self, split_value: "str"):
"""Sets the split_value of this ListPreviewResultsResponseFields.
:param split_value: The split_value of this ListPreviewResultsResponseFields.
:type: str
"""
self._attrs["splitValue"] = split_value
@property
def splitby_special(self) -> "str":
""" Gets the splitby_special of this ListPreviewResultsResponseFields.
"""
return self._attrs.get("splitbySpecial")
@splitby_special.setter
def splitby_special(self, splitby_special: "str"):
"""Sets the splitby_special of this ListPreviewResultsResponseFields.
:param splitby_special: The splitby_special of this ListPreviewResultsResponseFields.
:type: str
"""
self._attrs["splitbySpecial"] = splitby_special
@property
def type_special(self) -> "str":
""" Gets the type_special of this ListPreviewResultsResponseFields.
"""
return self._attrs.get("typeSpecial")
@type_special.setter
def type_special(self, type_special: "str"):
"""Sets the type_special of this ListPreviewResultsResponseFields.
:param type_special: The type_special of this ListPreviewResultsResponseFields.
:type: str
"""
self._attrs["typeSpecial"] = type_special
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class ListPreviewResultsResponse(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "ListPreviewResultsResponse":
instance = ListPreviewResultsResponse.__new__(ListPreviewResultsResponse)
instance._attrs = model
return instance
def __init__(self, is_preview_stable: "bool", results: "List[object]", fields: "List[ListPreviewResultsResponseFields]" = None, messages: "List[Message]" = None, next_link: "str" = None, wait: "str" = None, **extra):
"""ListPreviewResultsResponse"""
self._attrs = dict()
if is_preview_stable is not None:
self._attrs["isPreviewStable"] = is_preview_stable
if results is not None:
self._attrs["results"] = results
if fields is not None:
self._attrs["fields"] = fields
if messages is not None:
self._attrs["messages"] = messages
if next_link is not None:
self._attrs["nextLink"] = next_link
if wait is not None:
self._attrs["wait"] = wait
for k, v in extra.items():
self._attrs[k] = v
@property
def is_preview_stable(self) -> "bool":
""" Gets the is_preview_stable of this ListPreviewResultsResponse.
"""
return self._attrs.get("isPreviewStable")
@is_preview_stable.setter
def is_preview_stable(self, is_preview_stable: "bool"):
"""Sets the is_preview_stable of this ListPreviewResultsResponse.
:param is_preview_stable: The is_preview_stable of this ListPreviewResultsResponse.
:type: bool
"""
if is_preview_stable is None:
raise ValueError("Invalid value for `is_preview_stable`, must not be `None`")
self._attrs["isPreviewStable"] = is_preview_stable
@property
def results(self) -> "List[object]":
""" Gets the results of this ListPreviewResultsResponse.
"""
return self._attrs.get("results")
@results.setter
def results(self, results: "List[object]"):
"""Sets the results of this ListPreviewResultsResponse.
:param results: The results of this ListPreviewResultsResponse.
:type: List[object]
"""
if results is None:
raise ValueError("Invalid value for `results`, must not be `None`")
self._attrs["results"] = results
@property
def fields(self) -> "List[ListPreviewResultsResponseFields]":
""" Gets the fields of this ListPreviewResultsResponse.
"""
return [ListPreviewResultsResponseFields._from_dict(i) for i in self._attrs.get("fields")]
@fields.setter
def fields(self, fields: "List[ListPreviewResultsResponseFields]"):
"""Sets the fields of this ListPreviewResultsResponse.
:param fields: The fields of this ListPreviewResultsResponse.
:type: List[ListPreviewResultsResponseFields]
"""
self._attrs["fields"] = fields
@property
def messages(self) -> "List[Message]":
""" Gets the messages of this ListPreviewResultsResponse.
"""
return [Message._from_dict(i) for i in self._attrs.get("messages")]
@messages.setter
def messages(self, messages: "List[Message]"):
"""Sets the messages of this ListPreviewResultsResponse.
:param messages: The messages of this ListPreviewResultsResponse.
:type: List[Message]
"""
self._attrs["messages"] = messages
@property
def next_link(self) -> "str":
""" Gets the next_link of this ListPreviewResultsResponse.
"""
return self._attrs.get("nextLink")
@next_link.setter
def next_link(self, next_link: "str"):
"""Sets the next_link of this ListPreviewResultsResponse.
:param next_link: The next_link of this ListPreviewResultsResponse.
:type: str
"""
self._attrs["nextLink"] = next_link
@property
def wait(self) -> "str":
""" Gets the wait of this ListPreviewResultsResponse.
"""
return self._attrs.get("wait")
@wait.setter
def wait(self, wait: "str"):
"""Sets the wait of this ListPreviewResultsResponse.
:param wait: The wait of this ListPreviewResultsResponse.
:type: str
"""
self._attrs["wait"] = wait
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class ListSearchResultsResponse(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "ListSearchResultsResponse":
instance = ListSearchResultsResponse.__new__(ListSearchResultsResponse)
instance._attrs = model
return instance
def __init__(self, results: "List[object]", fields: "List[ListPreviewResultsResponseFields]" = None, messages: "List[Message]" = None, next_link: "str" = None, wait: "str" = None, **extra):
"""ListSearchResultsResponse"""
self._attrs = dict()
if results is not None:
self._attrs["results"] = results
if fields is not None:
self._attrs["fields"] = fields
if messages is not None:
self._attrs["messages"] = messages
if next_link is not None:
self._attrs["nextLink"] = next_link
if wait is not None:
self._attrs["wait"] = wait
for k, v in extra.items():
self._attrs[k] = v
@property
def results(self) -> "List[object]":
""" Gets the results of this ListSearchResultsResponse.
"""
return self._attrs.get("results")
@results.setter
def results(self, results: "List[object]"):
"""Sets the results of this ListSearchResultsResponse.
:param results: The results of this ListSearchResultsResponse.
:type: List[object]
"""
if results is None:
raise ValueError("Invalid value for `results`, must not be `None`")
self._attrs["results"] = results
@property
def fields(self) -> "List[ListPreviewResultsResponseFields]":
""" Gets the fields of this ListSearchResultsResponse.
"""
return [ListPreviewResultsResponseFields._from_dict(i) for i in self._attrs.get("fields")]
@fields.setter
def fields(self, fields: "List[ListPreviewResultsResponseFields]"):
"""Sets the fields of this ListSearchResultsResponse.
:param fields: The fields of this ListSearchResultsResponse.
:type: List[ListPreviewResultsResponseFields]
"""
self._attrs["fields"] = fields
@property
def messages(self) -> "List[Message]":
""" Gets the messages of this ListSearchResultsResponse.
"""
return [Message._from_dict(i) for i in self._attrs.get("messages")]
@messages.setter
def messages(self, messages: "List[Message]"):
"""Sets the messages of this ListSearchResultsResponse.
:param messages: The messages of this ListSearchResultsResponse.
:type: List[Message]
"""
self._attrs["messages"] = messages
@property
def next_link(self) -> "str":
""" Gets the next_link of this ListSearchResultsResponse.
"""
return self._attrs.get("nextLink")
@next_link.setter
def next_link(self, next_link: "str"):
"""Sets the next_link of this ListSearchResultsResponse.
:param next_link: The next_link of this ListSearchResultsResponse.
:type: str
"""
self._attrs["nextLink"] = next_link
@property
def wait(self) -> "str":
""" Gets the wait of this ListSearchResultsResponse.
"""
return self._attrs.get("wait")
@wait.setter
def wait(self, wait: "str"):
"""Sets the wait of this ListSearchResultsResponse.
:param wait: The wait of this ListSearchResultsResponse.
:type: str
"""
self._attrs["wait"] = wait
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class LookupDatasetExternalKind(str, Enum):
KVCOLLECTION = "kvcollection"
@staticmethod
def from_value(value: str):
if value == "kvcollection":
return LookupDatasetExternalKind.KVCOLLECTION
class LookupDataset(Dataset):
@staticmethod
def _from_dict(model: dict) -> "LookupDataset":
instance = LookupDataset.__new__(LookupDataset)
instance._attrs = model
return instance
def __init__(self, created: "str", createdby: "str", id: "str", modified: "str", modifiedby: "str", name: "str", owner: "str", resourcename: "str", appclientidcreatedby: "str" = None, appclientidmodifiedby: "str" = None, case_sensitive_match: "bool" = True, description: "str" = None, external_kind: "LookupDatasetExternalKind" = None, external_name: "str" = None, filter: "str" = None, namespace: "str" = None, summary: "str" = None, title: "str" = None, **extra):
"""LookupDataset"""
self._attrs = dict()
if created is not None:
self._attrs["created"] = created
if createdby is not None:
self._attrs["createdby"] = createdby
if id is not None:
self._attrs["id"] = id
if modified is not None:
self._attrs["modified"] = modified
if modifiedby is not None:
self._attrs["modifiedby"] = modifiedby
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
if resourcename is not None:
self._attrs["resourcename"] = resourcename
if appclientidcreatedby is not None:
self._attrs["appclientidcreatedby"] = appclientidcreatedby
if appclientidmodifiedby is not None:
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
if case_sensitive_match is not None:
self._attrs["caseSensitiveMatch"] = case_sensitive_match
if description is not None:
self._attrs["description"] = description
if external_kind is not None:
self._attrs["externalKind"] = external_kind
if external_name is not None:
self._attrs["externalName"] = external_name
if filter is not None:
self._attrs["filter"] = filter
self._attrs["kind"] = "lookup"
if namespace is not None:
self._attrs["namespace"] = namespace
if summary is not None:
self._attrs["summary"] = summary
if title is not None:
self._attrs["title"] = title
for k, v in extra.items():
self._attrs[k] = v
@property
def created(self) -> "str":
""" Gets the created of this LookupDataset.
The date and time object was created.
"""
return self._attrs.get("created")
@created.setter
def created(self, created: "str"):
"""Sets the created of this LookupDataset.
The date and time object was created.
:param created: The created of this LookupDataset.
:type: str
"""
if created is None:
raise ValueError("Invalid value for `created`, must not be `None`")
self._attrs["created"] = created
@property
def createdby(self) -> "str":
""" Gets the createdby of this LookupDataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
"""
return self._attrs.get("createdby")
@createdby.setter
def createdby(self, createdby: "str"):
"""Sets the createdby of this LookupDataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
:param createdby: The createdby of this LookupDataset.
:type: str
"""
if createdby is None:
raise ValueError("Invalid value for `createdby`, must not be `None`")
self._attrs["createdby"] = createdby
@property
def id(self) -> "str":
""" Gets the id of this LookupDataset.
A unique dataset ID.
"""
return self._attrs.get("id")
@id.setter
def id(self, id: "str"):
"""Sets the id of this LookupDataset.
A unique dataset ID.
:param id: The id of this LookupDataset.
:type: str
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`")
self._attrs["id"] = id
@property
def modified(self) -> "str":
""" Gets the modified of this LookupDataset.
The date and time object was modified.
"""
return self._attrs.get("modified")
@modified.setter
def modified(self, modified: "str"):
"""Sets the modified of this LookupDataset.
The date and time object was modified.
:param modified: The modified of this LookupDataset.
:type: str
"""
if modified is None:
raise ValueError("Invalid value for `modified`, must not be `None`")
self._attrs["modified"] = modified
@property
def modifiedby(self) -> "str":
""" Gets the modifiedby of this LookupDataset.
The name of the user who most recently modified the object.
"""
return self._attrs.get("modifiedby")
@modifiedby.setter
def modifiedby(self, modifiedby: "str"):
"""Sets the modifiedby of this LookupDataset.
The name of the user who most recently modified the object.
:param modifiedby: The modifiedby of this LookupDataset.
:type: str
"""
if modifiedby is None:
raise ValueError("Invalid value for `modifiedby`, must not be `None`")
self._attrs["modifiedby"] = modifiedby
@property
def name(self) -> "str":
""" Gets the name of this LookupDataset.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this LookupDataset.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this LookupDataset.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this LookupDataset.
The name of the object's owner.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this LookupDataset.
The name of the object's owner.
:param owner: The owner of this LookupDataset.
:type: str
"""
if owner is None:
raise ValueError("Invalid value for `owner`, must not be `None`")
self._attrs["owner"] = owner
@property
def resourcename(self) -> "str":
""" Gets the resourcename of this LookupDataset.
The dataset name qualified by the module name.
"""
return self._attrs.get("resourcename")
@resourcename.setter
def resourcename(self, resourcename: "str"):
"""Sets the resourcename of this LookupDataset.
The dataset name qualified by the module name.
:param resourcename: The resourcename of this LookupDataset.
:type: str
"""
if resourcename is None:
raise ValueError("Invalid value for `resourcename`, must not be `None`")
self._attrs["resourcename"] = resourcename
@property
def appclientidcreatedby(self) -> "str":
""" Gets the appclientidcreatedby of this LookupDataset.
AppClinetId of the creator app of the dataset.
"""
return self._attrs.get("appclientidcreatedby")
@appclientidcreatedby.setter
def appclientidcreatedby(self, appclientidcreatedby: "str"):
"""Sets the appclientidcreatedby of this LookupDataset.
AppClinetId of the creator app of the dataset.
:param appclientidcreatedby: The appclientidcreatedby of this LookupDataset.
:type: str
"""
self._attrs["appclientidcreatedby"] = appclientidcreatedby
@property
def appclientidmodifiedby(self) -> "str":
""" Gets the appclientidmodifiedby of this LookupDataset.
AppClinetId of the modifier app of the dataset.
"""
return self._attrs.get("appclientidmodifiedby")
@appclientidmodifiedby.setter
def appclientidmodifiedby(self, appclientidmodifiedby: "str"):
"""Sets the appclientidmodifiedby of this LookupDataset.
AppClinetId of the modifier app of the dataset.
:param appclientidmodifiedby: The appclientidmodifiedby of this LookupDataset.
:type: str
"""
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
@property
def case_sensitive_match(self) -> "bool":
""" Gets the case_sensitive_match of this LookupDataset.
Match case-sensitively against the lookup.
"""
return self._attrs.get("caseSensitiveMatch")
@case_sensitive_match.setter
def case_sensitive_match(self, case_sensitive_match: "bool"):
"""Sets the case_sensitive_match of this LookupDataset.
Match case-sensitively against the lookup.
:param case_sensitive_match: The case_sensitive_match of this LookupDataset.
:type: bool
"""
self._attrs["caseSensitiveMatch"] = case_sensitive_match
@property
def description(self) -> "str":
""" Gets the description of this LookupDataset.
Detailed description of the dataset.
"""
return self._attrs.get("description")
@description.setter
def description(self, description: "str"):
"""Sets the description of this LookupDataset.
Detailed description of the dataset.
:param description: The description of this LookupDataset.
:type: str
"""
self._attrs["description"] = description
@property
def external_kind(self) -> "LookupDatasetExternalKind":
""" Gets the external_kind of this LookupDataset.
"""
return LookupDatasetExternalKind.from_value(self._attrs.get("externalKind"))
@external_kind.setter
def external_kind(self, external_kind: "LookupDatasetExternalKind"):
"""Sets the external_kind of this LookupDataset.
:param external_kind: The external_kind of this LookupDataset.
:type: LookupDatasetExternalKind
"""
if isinstance(external_kind, Enum):
self._attrs["externalKind"] = external_kind.value
else:
self._attrs["externalKind"] = external_kind # If you supply a string, we presume you know the service will take it.
@property
def external_name(self) -> "str":
""" Gets the external_name of this LookupDataset.
The name of the external lookup.
"""
return self._attrs.get("externalName")
@external_name.setter
def external_name(self, external_name: "str"):
"""Sets the external_name of this LookupDataset.
The name of the external lookup.
:param external_name: The external_name of this LookupDataset.
:type: str
"""
self._attrs["externalName"] = external_name
@property
def filter(self) -> "str":
""" Gets the filter of this LookupDataset.
A query that filters results out of the lookup before those results are returned.
"""
return self._attrs.get("filter")
@filter.setter
def filter(self, filter: "str"):
"""Sets the filter of this LookupDataset.
A query that filters results out of the lookup before those results are returned.
:param filter: The filter of this LookupDataset.
:type: str
"""
self._attrs["filter"] = filter
@property
def kind(self) -> str:
return "lookup"
@property
def namespace(self) -> "str":
""" Gets the namespace of this LookupDataset.
The name of the namespace that contains the dataset.
"""
return self._attrs.get("namespace")
@namespace.setter
def namespace(self, namespace: "str"):
"""Sets the namespace of this LookupDataset.
The name of the namespace that contains the dataset.
:param namespace: The namespace of this LookupDataset.
:type: str
"""
self._attrs["namespace"] = namespace
@property
def summary(self) -> "str":
""" Gets the summary of this LookupDataset.
Summary of the dataset's purpose.
"""
return self._attrs.get("summary")
@summary.setter
def summary(self, summary: "str"):
"""Sets the summary of this LookupDataset.
Summary of the dataset's purpose.
:param summary: The summary of this LookupDataset.
:type: str
"""
self._attrs["summary"] = summary
@property
def title(self) -> "str":
""" Gets the title of this LookupDataset.
The title of the dataset. Does not have to be unique.
"""
return self._attrs.get("title")
@title.setter
def title(self, title: "str"):
"""Sets the title of this LookupDataset.
The title of the dataset. Does not have to be unique.
:param title: The title of this LookupDataset.
:type: str
"""
self._attrs["title"] = title
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
Dataset.from_dict_handlers["lookup"] = LookupDataset._from_dict
class LookupDatasetKind(str, Enum):
LOOKUP = "lookup"
@staticmethod
def from_value(value: str):
if value == "lookup":
return LookupDatasetKind.LOOKUP
class LookupDatasetPATCH(DatasetPATCH):
@staticmethod
def _from_dict(model: dict) -> "LookupDatasetPATCH":
instance = LookupDatasetPATCH.__new__(LookupDatasetPATCH)
instance._attrs = model
return instance
def __init__(self, case_sensitive_match: "bool" = True, external_kind: "LookupDatasetExternalKind" = None, external_name: "str" = None, filter: "str" = None, kind: "LookupDatasetKind" = None, module: "str" = None, name: "str" = None, owner: "str" = None, **extra):
"""LookupDatasetPATCH"""
self._attrs = dict()
if case_sensitive_match is not None:
self._attrs["caseSensitiveMatch"] = case_sensitive_match
if external_kind is not None:
self._attrs["externalKind"] = external_kind
if external_name is not None:
self._attrs["externalName"] = external_name
if filter is not None:
self._attrs["filter"] = filter
if kind is not None:
self._attrs["kind"] = kind
if module is not None:
self._attrs["module"] = module
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
for k, v in extra.items():
self._attrs[k] = v
@property
def case_sensitive_match(self) -> "bool":
""" Gets the case_sensitive_match of this LookupDatasetPATCH.
Match case-sensitively against the lookup.
"""
return self._attrs.get("caseSensitiveMatch")
@case_sensitive_match.setter
def case_sensitive_match(self, case_sensitive_match: "bool"):
"""Sets the case_sensitive_match of this LookupDatasetPATCH.
Match case-sensitively against the lookup.
:param case_sensitive_match: The case_sensitive_match of this LookupDatasetPATCH.
:type: bool
"""
self._attrs["caseSensitiveMatch"] = case_sensitive_match
@property
def external_kind(self) -> "LookupDatasetExternalKind":
""" Gets the external_kind of this LookupDatasetPATCH.
"""
return LookupDatasetExternalKind.from_value(self._attrs.get("externalKind"))
@external_kind.setter
def external_kind(self, external_kind: "LookupDatasetExternalKind"):
"""Sets the external_kind of this LookupDatasetPATCH.
:param external_kind: The external_kind of this LookupDatasetPATCH.
:type: LookupDatasetExternalKind
"""
if isinstance(external_kind, Enum):
self._attrs["externalKind"] = external_kind.value
else:
self._attrs["externalKind"] = external_kind # If you supply a string, we presume you know the service will take it.
@property
def external_name(self) -> "str":
""" Gets the external_name of this LookupDatasetPATCH.
The name of the external lookup.
"""
return self._attrs.get("externalName")
@external_name.setter
def external_name(self, external_name: "str"):
"""Sets the external_name of this LookupDatasetPATCH.
The name of the external lookup.
:param external_name: The external_name of this LookupDatasetPATCH.
:type: str
"""
self._attrs["externalName"] = external_name
@property
def filter(self) -> "str":
""" Gets the filter of this LookupDatasetPATCH.
A query that filters results out of the lookup before those results are returned.
"""
return self._attrs.get("filter")
@filter.setter
def filter(self, filter: "str"):
"""Sets the filter of this LookupDatasetPATCH.
A query that filters results out of the lookup before those results are returned.
:param filter: The filter of this LookupDatasetPATCH.
:type: str
"""
self._attrs["filter"] = filter
@property
def kind(self) -> "LookupDatasetKind":
""" Gets the kind of this LookupDatasetPATCH.
"""
return LookupDatasetKind.from_value(self._attrs.get("kind"))
@kind.setter
def kind(self, kind: "LookupDatasetKind"):
"""Sets the kind of this LookupDatasetPATCH.
:param kind: The kind of this LookupDatasetPATCH.
:type: LookupDatasetKind
"""
if isinstance(kind, Enum):
self._attrs["kind"] = kind.value
else:
self._attrs["kind"] = kind # If you supply a string, we presume you know the service will take it.
@property
def module(self) -> "str":
""" Gets the module of this LookupDatasetPATCH.
The name of module to reassign dataset into.
"""
return self._attrs.get("module")
@module.setter
def module(self, module: "str"):
"""Sets the module of this LookupDatasetPATCH.
The name of module to reassign dataset into.
:param module: The module of this LookupDatasetPATCH.
:type: str
"""
self._attrs["module"] = module
@property
def name(self) -> "str":
""" Gets the name of this LookupDatasetPATCH.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this LookupDatasetPATCH.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this LookupDatasetPATCH.
:type: str
"""
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this LookupDatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this LookupDatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
:param owner: The owner of this LookupDatasetPATCH.
:type: str
"""
self._attrs["owner"] = owner
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class MetricDataset(Dataset):
@staticmethod
def _from_dict(model: dict) -> "MetricDataset":
instance = MetricDataset.__new__(MetricDataset)
instance._attrs = model
return instance
def __init__(self, created: "str", createdby: "str", id: "str", modified: "str", modifiedby: "str", name: "str", owner: "str", resourcename: "str", appclientidcreatedby: "str" = None, appclientidmodifiedby: "str" = None, description: "str" = None, disabled: "bool" = None, earliest_event_time: "str" = None, earliest_ingest_time: "str" = None, frozen_time_period_in_secs: "int" = None, latest_event_time: "str" = None, latest_ingest_time: "str" = None, latest_metadata_update_time: "str" = None, namespace: "str" = None, summary: "str" = None, title: "str" = None, total_event_count: "int" = None, total_size: "int" = None, **extra):
"""MetricDataset"""
self._attrs = dict()
if created is not None:
self._attrs["created"] = created
if createdby is not None:
self._attrs["createdby"] = createdby
if id is not None:
self._attrs["id"] = id
if modified is not None:
self._attrs["modified"] = modified
if modifiedby is not None:
self._attrs["modifiedby"] = modifiedby
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
if resourcename is not None:
self._attrs["resourcename"] = resourcename
if appclientidcreatedby is not None:
self._attrs["appclientidcreatedby"] = appclientidcreatedby
if appclientidmodifiedby is not None:
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
if description is not None:
self._attrs["description"] = description
if disabled is not None:
self._attrs["disabled"] = disabled
if earliest_event_time is not None:
self._attrs["earliestEventTime"] = earliest_event_time
if earliest_ingest_time is not None:
self._attrs["earliestIngestTime"] = earliest_ingest_time
if frozen_time_period_in_secs is not None:
self._attrs["frozenTimePeriodInSecs"] = frozen_time_period_in_secs
self._attrs["kind"] = "metric"
if latest_event_time is not None:
self._attrs["latestEventTime"] = latest_event_time
if latest_ingest_time is not None:
self._attrs["latestIngestTime"] = latest_ingest_time
if latest_metadata_update_time is not None:
self._attrs["latestMetadataUpdateTime"] = latest_metadata_update_time
if namespace is not None:
self._attrs["namespace"] = namespace
if summary is not None:
self._attrs["summary"] = summary
if title is not None:
self._attrs["title"] = title
if total_event_count is not None:
self._attrs["totalEventCount"] = total_event_count
if total_size is not None:
self._attrs["totalSize"] = total_size
for k, v in extra.items():
self._attrs[k] = v
@property
def created(self) -> "str":
""" Gets the created of this MetricDataset.
The date and time object was created.
"""
return self._attrs.get("created")
@created.setter
def created(self, created: "str"):
"""Sets the created of this MetricDataset.
The date and time object was created.
:param created: The created of this MetricDataset.
:type: str
"""
if created is None:
raise ValueError("Invalid value for `created`, must not be `None`")
self._attrs["created"] = created
@property
def createdby(self) -> "str":
""" Gets the createdby of this MetricDataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
"""
return self._attrs.get("createdby")
@createdby.setter
def createdby(self, createdby: "str"):
"""Sets the createdby of this MetricDataset.
The name of the user who created the object. This value is obtained from the bearer token and may not be changed.
:param createdby: The createdby of this MetricDataset.
:type: str
"""
if createdby is None:
raise ValueError("Invalid value for `createdby`, must not be `None`")
self._attrs["createdby"] = createdby
@property
def id(self) -> "str":
""" Gets the id of this MetricDataset.
A unique dataset ID.
"""
return self._attrs.get("id")
@id.setter
def id(self, id: "str"):
"""Sets the id of this MetricDataset.
A unique dataset ID.
:param id: The id of this MetricDataset.
:type: str
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`")
self._attrs["id"] = id
@property
def modified(self) -> "str":
""" Gets the modified of this MetricDataset.
The date and time object was modified.
"""
return self._attrs.get("modified")
@modified.setter
def modified(self, modified: "str"):
"""Sets the modified of this MetricDataset.
The date and time object was modified.
:param modified: The modified of this MetricDataset.
:type: str
"""
if modified is None:
raise ValueError("Invalid value for `modified`, must not be `None`")
self._attrs["modified"] = modified
@property
def modifiedby(self) -> "str":
""" Gets the modifiedby of this MetricDataset.
The name of the user who most recently modified the object.
"""
return self._attrs.get("modifiedby")
@modifiedby.setter
def modifiedby(self, modifiedby: "str"):
"""Sets the modifiedby of this MetricDataset.
The name of the user who most recently modified the object.
:param modifiedby: The modifiedby of this MetricDataset.
:type: str
"""
if modifiedby is None:
raise ValueError("Invalid value for `modifiedby`, must not be `None`")
self._attrs["modifiedby"] = modifiedby
@property
def name(self) -> "str":
""" Gets the name of this MetricDataset.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this MetricDataset.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this MetricDataset.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this MetricDataset.
The name of the object's owner.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this MetricDataset.
The name of the object's owner.
:param owner: The owner of this MetricDataset.
:type: str
"""
if owner is None:
raise ValueError("Invalid value for `owner`, must not be `None`")
self._attrs["owner"] = owner
@property
def resourcename(self) -> "str":
""" Gets the resourcename of this MetricDataset.
The dataset name qualified by the module name.
"""
return self._attrs.get("resourcename")
@resourcename.setter
def resourcename(self, resourcename: "str"):
"""Sets the resourcename of this MetricDataset.
The dataset name qualified by the module name.
:param resourcename: The resourcename of this MetricDataset.
:type: str
"""
if resourcename is None:
raise ValueError("Invalid value for `resourcename`, must not be `None`")
self._attrs["resourcename"] = resourcename
@property
def appclientidcreatedby(self) -> "str":
""" Gets the appclientidcreatedby of this MetricDataset.
AppClinetId of the creator app of the dataset.
"""
return self._attrs.get("appclientidcreatedby")
@appclientidcreatedby.setter
def appclientidcreatedby(self, appclientidcreatedby: "str"):
"""Sets the appclientidcreatedby of this MetricDataset.
AppClinetId of the creator app of the dataset.
:param appclientidcreatedby: The appclientidcreatedby of this MetricDataset.
:type: str
"""
self._attrs["appclientidcreatedby"] = appclientidcreatedby
@property
def appclientidmodifiedby(self) -> "str":
""" Gets the appclientidmodifiedby of this MetricDataset.
AppClinetId of the modifier app of the dataset.
"""
return self._attrs.get("appclientidmodifiedby")
@appclientidmodifiedby.setter
def appclientidmodifiedby(self, appclientidmodifiedby: "str"):
"""Sets the appclientidmodifiedby of this MetricDataset.
AppClinetId of the modifier app of the dataset.
:param appclientidmodifiedby: The appclientidmodifiedby of this MetricDataset.
:type: str
"""
self._attrs["appclientidmodifiedby"] = appclientidmodifiedby
@property
def description(self) -> "str":
""" Gets the description of this MetricDataset.
Detailed description of the dataset.
"""
return self._attrs.get("description")
@description.setter
def description(self, description: "str"):
"""Sets the description of this MetricDataset.
Detailed description of the dataset.
:param description: The description of this MetricDataset.
:type: str
"""
self._attrs["description"] = description
@property
def disabled(self) -> "bool":
""" Gets the disabled of this MetricDataset.
Specifies whether or not the Splunk index is disabled.
"""
return self._attrs.get("disabled")
@disabled.setter
def disabled(self, disabled: "bool"):
"""Sets the disabled of this MetricDataset.
Specifies whether or not the Splunk index is disabled.
:param disabled: The disabled of this MetricDataset.
:type: bool
"""
self._attrs["disabled"] = disabled
@property
def earliest_event_time(self) -> "str":
""" Gets the earliest_event_time of this MetricDataset.
The timestamp, in seconds, of the earliest measure. The timestamp is in UNIX time.
"""
return self._attrs.get("earliestEventTime")
@earliest_event_time.setter
def earliest_event_time(self, earliest_event_time: "str"):
"""Sets the earliest_event_time of this MetricDataset.
The timestamp, in seconds, of the earliest measure. The timestamp is in UNIX time.
:param earliest_event_time: The earliest_event_time of this MetricDataset.
:type: str
"""
self._attrs["earliestEventTime"] = earliest_event_time
@property
def earliest_ingest_time(self) -> "str":
""" Gets the earliest_ingest_time of this MetricDataset.
The earliest index time for any of the measures in this index.
"""
return self._attrs.get("earliestIngestTime")
@earliest_ingest_time.setter
def earliest_ingest_time(self, earliest_ingest_time: "str"):
"""Sets the earliest_ingest_time of this MetricDataset.
The earliest index time for any of the measures in this index.
:param earliest_ingest_time: The earliest_ingest_time of this MetricDataset.
:type: str
"""
self._attrs["earliestIngestTime"] = earliest_ingest_time
@property
def frozen_time_period_in_secs(self) -> "int":
""" Gets the frozen_time_period_in_secs of this MetricDataset.
The frozenTimePeriodInSecs to use for the index
"""
return self._attrs.get("frozenTimePeriodInSecs")
@frozen_time_period_in_secs.setter
def frozen_time_period_in_secs(self, frozen_time_period_in_secs: "int"):
"""Sets the frozen_time_period_in_secs of this MetricDataset.
The frozenTimePeriodInSecs to use for the index
:param frozen_time_period_in_secs: The frozen_time_period_in_secs of this MetricDataset.
:type: int
"""
self._attrs["frozenTimePeriodInSecs"] = frozen_time_period_in_secs
@property
def kind(self) -> str:
return "metric"
@property
def latest_event_time(self) -> "str":
""" Gets the latest_event_time of this MetricDataset.
The timestamp, in seconds, of the latest measure. The timestamp is in UNIX time.
"""
return self._attrs.get("latestEventTime")
@latest_event_time.setter
def latest_event_time(self, latest_event_time: "str"):
"""Sets the latest_event_time of this MetricDataset.
The timestamp, in seconds, of the latest measure. The timestamp is in UNIX time.
:param latest_event_time: The latest_event_time of this MetricDataset.
:type: str
"""
self._attrs["latestEventTime"] = latest_event_time
@property
def latest_ingest_time(self) -> "str":
""" Gets the latest_ingest_time of this MetricDataset.
The earliest index time for any of the measures in this index.
"""
return self._attrs.get("latestIngestTime")
@latest_ingest_time.setter
def latest_ingest_time(self, latest_ingest_time: "str"):
"""Sets the latest_ingest_time of this MetricDataset.
The earliest index time for any of the measures in this index.
:param latest_ingest_time: The latest_ingest_time of this MetricDataset.
:type: str
"""
self._attrs["latestIngestTime"] = latest_ingest_time
@property
def latest_metadata_update_time(self) -> "str":
""" Gets the latest_metadata_update_time of this MetricDataset.
The latest time that the metric index metadata was refreshed.
"""
return self._attrs.get("latestMetadataUpdateTime")
@latest_metadata_update_time.setter
def latest_metadata_update_time(self, latest_metadata_update_time: "str"):
"""Sets the latest_metadata_update_time of this MetricDataset.
The latest time that the metric index metadata was refreshed.
:param latest_metadata_update_time: The latest_metadata_update_time of this MetricDataset.
:type: str
"""
self._attrs["latestMetadataUpdateTime"] = latest_metadata_update_time
@property
def namespace(self) -> "str":
""" Gets the namespace of this MetricDataset.
The name of the namespace that contains the dataset.
"""
return self._attrs.get("namespace")
@namespace.setter
def namespace(self, namespace: "str"):
"""Sets the namespace of this MetricDataset.
The name of the namespace that contains the dataset.
:param namespace: The namespace of this MetricDataset.
:type: str
"""
self._attrs["namespace"] = namespace
@property
def summary(self) -> "str":
""" Gets the summary of this MetricDataset.
Summary of the dataset's purpose.
"""
return self._attrs.get("summary")
@summary.setter
def summary(self, summary: "str"):
"""Sets the summary of this MetricDataset.
Summary of the dataset's purpose.
:param summary: The summary of this MetricDataset.
:type: str
"""
self._attrs["summary"] = summary
@property
def title(self) -> "str":
""" Gets the title of this MetricDataset.
The title of the dataset. Does not have to be unique.
"""
return self._attrs.get("title")
@title.setter
def title(self, title: "str"):
"""Sets the title of this MetricDataset.
The title of the dataset. Does not have to be unique.
:param title: The title of this MetricDataset.
:type: str
"""
self._attrs["title"] = title
@property
def total_event_count(self) -> "int":
""" Gets the total_event_count of this MetricDataset.
THe number of measures in the metric index.
"""
return self._attrs.get("totalEventCount")
@total_event_count.setter
def total_event_count(self, total_event_count: "int"):
"""Sets the total_event_count of this MetricDataset.
THe number of measures in the metric index.
:param total_event_count: The total_event_count of this MetricDataset.
:type: int
"""
self._attrs["totalEventCount"] = total_event_count
@property
def total_size(self) -> "int":
""" Gets the total_size of this MetricDataset.
For metrics indexes, the totalSize is set to 0.
"""
return self._attrs.get("totalSize")
@total_size.setter
def total_size(self, total_size: "int"):
"""Sets the total_size of this MetricDataset.
For metrics indexes, the totalSize is set to 0.
:param total_size: The total_size of this MetricDataset.
:type: int
"""
self._attrs["totalSize"] = total_size
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
Dataset.from_dict_handlers["metric"] = MetricDataset._from_dict
class MetricDatasetKind(str, Enum):
METRIC = "metric"
@staticmethod
def from_value(value: str):
if value == "metric":
return MetricDatasetKind.METRIC
class MetricDatasetPATCH(DatasetPATCH):
@staticmethod
def _from_dict(model: dict) -> "MetricDatasetPATCH":
instance = MetricDatasetPATCH.__new__(MetricDatasetPATCH)
instance._attrs = model
return instance
def __init__(self, disabled: "bool" = None, frozen_time_period_in_secs: "int" = None, kind: "MetricDatasetKind" = None, module: "str" = None, name: "str" = None, owner: "str" = None, **extra):
"""MetricDatasetPATCH"""
self._attrs = dict()
if disabled is not None:
self._attrs["disabled"] = disabled
if frozen_time_period_in_secs is not None:
self._attrs["frozenTimePeriodInSecs"] = frozen_time_period_in_secs
if kind is not None:
self._attrs["kind"] = kind
if module is not None:
self._attrs["module"] = module
if name is not None:
self._attrs["name"] = name
if owner is not None:
self._attrs["owner"] = owner
for k, v in extra.items():
self._attrs[k] = v
@property
def disabled(self) -> "bool":
""" Gets the disabled of this MetricDatasetPATCH.
Specifies whether or not the Splunk index is disabled.
"""
return self._attrs.get("disabled")
@disabled.setter
def disabled(self, disabled: "bool"):
"""Sets the disabled of this MetricDatasetPATCH.
Specifies whether or not the Splunk index is disabled.
:param disabled: The disabled of this MetricDatasetPATCH.
:type: bool
"""
self._attrs["disabled"] = disabled
@property
def frozen_time_period_in_secs(self) -> "int":
""" Gets the frozen_time_period_in_secs of this MetricDatasetPATCH.
The frozenTimePeriodInSecs to use for the index
"""
return self._attrs.get("frozenTimePeriodInSecs")
@frozen_time_period_in_secs.setter
def frozen_time_period_in_secs(self, frozen_time_period_in_secs: "int"):
"""Sets the frozen_time_period_in_secs of this MetricDatasetPATCH.
The frozenTimePeriodInSecs to use for the index
:param frozen_time_period_in_secs: The frozen_time_period_in_secs of this MetricDatasetPATCH.
:type: int
"""
self._attrs["frozenTimePeriodInSecs"] = frozen_time_period_in_secs
@property
def kind(self) -> "MetricDatasetKind":
""" Gets the kind of this MetricDatasetPATCH.
"""
return MetricDatasetKind.from_value(self._attrs.get("kind"))
@kind.setter
def kind(self, kind: "MetricDatasetKind"):
"""Sets the kind of this MetricDatasetPATCH.
:param kind: The kind of this MetricDatasetPATCH.
:type: MetricDatasetKind
"""
if isinstance(kind, Enum):
self._attrs["kind"] = kind.value
else:
self._attrs["kind"] = kind # If you supply a string, we presume you know the service will take it.
@property
def module(self) -> "str":
""" Gets the module of this MetricDatasetPATCH.
The name of module to reassign dataset into.
"""
return self._attrs.get("module")
@module.setter
def module(self, module: "str"):
"""Sets the module of this MetricDatasetPATCH.
The name of module to reassign dataset into.
:param module: The module of this MetricDatasetPATCH.
:type: str
"""
self._attrs["module"] = module
@property
def name(self) -> "str":
""" Gets the name of this MetricDatasetPATCH.
The dataset name. Dataset names must be unique within each module.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this MetricDatasetPATCH.
The dataset name. Dataset names must be unique within each module.
:param name: The name of this MetricDatasetPATCH.
:type: str
"""
self._attrs["name"] = name
@property
def owner(self) -> "str":
""" Gets the owner of this MetricDatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
"""
return self._attrs.get("owner")
@owner.setter
def owner(self, owner: "str"):
"""Sets the owner of this MetricDatasetPATCH.
The name of the dataset owner. This value is obtained from the bearer token.
:param owner: The owner of this MetricDatasetPATCH.
:type: str
"""
self._attrs["owner"] = owner
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class SearchJob(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "SearchJob":
instance = SearchJob.__new__(SearchJob)
instance._attrs = model
return instance
def __init__(self, query: "str", allow_side_effects: "bool" = False, collect_event_summary: "bool" = False, collect_field_summary: "bool" = False, collect_time_buckets: "bool" = False, completion_time: "str" = None, dispatch_time: "str" = None, enable_preview: "bool" = False, extract_all_fields: "bool" = False, extract_fields: "str" = '', max_time: "int" = 3600, messages: "List[Message]" = None, module: "str" = '', name: "str" = None, parent: "str" = None, percent_complete: "int" = 0, preview_available: "str" = 'false', query_parameters: "QueryParameters" = None, required_freshness: "int" = 0, resolved_earliest: "str" = None, resolved_latest: "str" = None, results_available: "int" = 0, results_preview_available: "int" = 0, sid: "str" = None, status: "SearchStatus" = None, **extra):
"""SearchJob"""
self._attrs = dict()
if query is not None:
self._attrs["query"] = query
if allow_side_effects is not None:
self._attrs["allowSideEffects"] = allow_side_effects
if collect_event_summary is not None:
self._attrs["collectEventSummary"] = collect_event_summary
if collect_field_summary is not None:
self._attrs["collectFieldSummary"] = collect_field_summary
if collect_time_buckets is not None:
self._attrs["collectTimeBuckets"] = collect_time_buckets
if completion_time is not None:
self._attrs["completionTime"] = completion_time
if dispatch_time is not None:
self._attrs["dispatchTime"] = dispatch_time
if enable_preview is not None:
self._attrs["enablePreview"] = enable_preview
if extract_all_fields is not None:
self._attrs["extractAllFields"] = extract_all_fields
if extract_fields is not None:
self._attrs["extractFields"] = extract_fields
if max_time is not None:
self._attrs["maxTime"] = max_time
if messages is not None:
self._attrs["messages"] = messages
if module is not None:
self._attrs["module"] = module
if name is not None:
self._attrs["name"] = name
if parent is not None:
self._attrs["parent"] = parent
if percent_complete is not None:
self._attrs["percentComplete"] = percent_complete
if preview_available is not None:
self._attrs["previewAvailable"] = preview_available
if query_parameters is not None:
self._attrs["queryParameters"] = query_parameters.to_dict()
if required_freshness is not None:
self._attrs["requiredFreshness"] = required_freshness
if resolved_earliest is not None:
self._attrs["resolvedEarliest"] = resolved_earliest
if resolved_latest is not None:
self._attrs["resolvedLatest"] = resolved_latest
if results_available is not None:
self._attrs["resultsAvailable"] = results_available
if results_preview_available is not None:
self._attrs["resultsPreviewAvailable"] = results_preview_available
if sid is not None:
self._attrs["sid"] = sid
if status is not None:
self._attrs["status"] = status
for k, v in extra.items():
self._attrs[k] = v
@property
def query(self) -> "str":
""" Gets the query of this SearchJob.
The SPL search string.
"""
return self._attrs.get("query")
@query.setter
def query(self, query: "str"):
"""Sets the query of this SearchJob.
The SPL search string.
:param query: The query of this SearchJob.
:type: str
"""
if query is None:
raise ValueError("Invalid value for `query`, must not be `None`")
self._attrs["query"] = query
@property
def allow_side_effects(self) -> "bool":
""" Gets the allow_side_effects of this SearchJob.
Specifies whether a search that contains commands with side effects (with possible security risks) is allowed to run.
"""
return self._attrs.get("allowSideEffects")
@allow_side_effects.setter
def allow_side_effects(self, allow_side_effects: "bool"):
"""Sets the allow_side_effects of this SearchJob.
Specifies whether a search that contains commands with side effects (with possible security risks) is allowed to run.
:param allow_side_effects: The allow_side_effects of this SearchJob.
:type: bool
"""
self._attrs["allowSideEffects"] = allow_side_effects
@property
def collect_event_summary(self) -> "bool":
""" Gets the collect_event_summary of this SearchJob.
Specifies whether a search is allowed to collect events summary information during the run time.
"""
return self._attrs.get("collectEventSummary")
@collect_event_summary.setter
def collect_event_summary(self, collect_event_summary: "bool"):
"""Sets the collect_event_summary of this SearchJob.
Specifies whether a search is allowed to collect events summary information during the run time.
:param collect_event_summary: The collect_event_summary of this SearchJob.
:type: bool
"""
self._attrs["collectEventSummary"] = collect_event_summary
@property
def collect_field_summary(self) -> "bool":
""" Gets the collect_field_summary of this SearchJob.
Specifies whether a search is allowed to collect fields summary information during the run time.
"""
return self._attrs.get("collectFieldSummary")
@collect_field_summary.setter
def collect_field_summary(self, collect_field_summary: "bool"):
"""Sets the collect_field_summary of this SearchJob.
Specifies whether a search is allowed to collect fields summary information during the run time.
:param collect_field_summary: The collect_field_summary of this SearchJob.
:type: bool
"""
self._attrs["collectFieldSummary"] = collect_field_summary
@property
def collect_time_buckets(self) -> "bool":
""" Gets the collect_time_buckets of this SearchJob.
Specifies whether a search is allowed to collect timeline buckets summary information during the run time.
"""
return self._attrs.get("collectTimeBuckets")
@collect_time_buckets.setter
def collect_time_buckets(self, collect_time_buckets: "bool"):
"""Sets the collect_time_buckets of this SearchJob.
Specifies whether a search is allowed to collect timeline buckets summary information during the run time.
:param collect_time_buckets: The collect_time_buckets of this SearchJob.
:type: bool
"""
self._attrs["collectTimeBuckets"] = collect_time_buckets
@property
def completion_time(self) -> "str":
""" Gets the completion_time of this SearchJob.
The time, in GMT, that the search job is finished. Empty if the search job has not completed.
"""
return self._attrs.get("completionTime")
@completion_time.setter
def completion_time(self, completion_time: "str"):
"""Sets the completion_time of this SearchJob.
The time, in GMT, that the search job is finished. Empty if the search job has not completed.
:param completion_time: The completion_time of this SearchJob.
:type: str
"""
self._attrs["completionTime"] = completion_time
@property
def dispatch_time(self) -> "str":
""" Gets the dispatch_time of this SearchJob.
The time, in GMT, that the search job is dispatched.
"""
return self._attrs.get("dispatchTime")
@dispatch_time.setter
def dispatch_time(self, dispatch_time: "str"):
"""Sets the dispatch_time of this SearchJob.
The time, in GMT, that the search job is dispatched.
:param dispatch_time: The dispatch_time of this SearchJob.
:type: str
"""
self._attrs["dispatchTime"] = dispatch_time
@property
def enable_preview(self) -> "bool":
""" Gets the enable_preview of this SearchJob.
Specifies whether a search is allowed to collect preview results during the run time.
"""
return self._attrs.get("enablePreview")
@enable_preview.setter
def enable_preview(self, enable_preview: "bool"):
"""Sets the enable_preview of this SearchJob.
Specifies whether a search is allowed to collect preview results during the run time.
:param enable_preview: The enable_preview of this SearchJob.
:type: bool
"""
self._attrs["enablePreview"] = enable_preview
@property
def extract_all_fields(self) -> "bool":
""" Gets the extract_all_fields of this SearchJob.
Specifies whether the Search service should extract all of the available fields in the data, including fields not mentioned in the SPL for the search job. Set to 'false' for better search performance. The 'extractAllFields' parameter is deprecated as of version v3alpha1. Although this parameter continues to function, it might be removed in a future version. Use the 'extractFields' parameter instead.
"""
return self._attrs.get("extractAllFields")
@extract_all_fields.setter
def extract_all_fields(self, extract_all_fields: "bool"):
"""Sets the extract_all_fields of this SearchJob.
Specifies whether the Search service should extract all of the available fields in the data, including fields not mentioned in the SPL for the search job. Set to 'false' for better search performance. The 'extractAllFields' parameter is deprecated as of version v3alpha1. Although this parameter continues to function, it might be removed in a future version. Use the 'extractFields' parameter instead.
:param extract_all_fields: The extract_all_fields of this SearchJob.
:type: bool
"""
self._attrs["extractAllFields"] = extract_all_fields
@property
def extract_fields(self) -> "str":
""" Gets the extract_fields of this SearchJob.
Specifies how the Search service should extract fields. Valid values include 'all', 'none', or 'indexed'. 'all' will extract all fields, 'indexed' will extract only indexed fields, and 'none' will extract only the default fields. This parameter overwrites the value of the 'extractAllFields' parameter. Set to 'none' for better search performance.
"""
return self._attrs.get("extractFields")
@extract_fields.setter
def extract_fields(self, extract_fields: "str"):
"""Sets the extract_fields of this SearchJob.
Specifies how the Search service should extract fields. Valid values include 'all', 'none', or 'indexed'. 'all' will extract all fields, 'indexed' will extract only indexed fields, and 'none' will extract only the default fields. This parameter overwrites the value of the 'extractAllFields' parameter. Set to 'none' for better search performance.
:param extract_fields: The extract_fields of this SearchJob.
:type: str
"""
self._attrs["extractFields"] = extract_fields
@property
def max_time(self) -> "int":
""" Gets the max_time of this SearchJob.
The number of seconds to run the search before finalizing the search. The default value is 3600 seconds (1 hour). The maximum value is 3600 seconds (1 hour).
"""
return self._attrs.get("maxTime")
@max_time.setter
def max_time(self, max_time: "int"):
"""Sets the max_time of this SearchJob.
The number of seconds to run the search before finalizing the search. The default value is 3600 seconds (1 hour). The maximum value is 3600 seconds (1 hour).
:param max_time: The max_time of this SearchJob.
:type: int
"""
self._attrs["maxTime"] = max_time
@property
def messages(self) -> "List[Message]":
""" Gets the messages of this SearchJob.
"""
return [Message._from_dict(i) for i in self._attrs.get("messages")]
@messages.setter
def messages(self, messages: "List[Message]"):
"""Sets the messages of this SearchJob.
:param messages: The messages of this SearchJob.
:type: List[Message]
"""
self._attrs["messages"] = messages
@property
def module(self) -> "str":
""" Gets the module of this SearchJob.
The module to run the search in. The default module is used if a module is not specified.
"""
return self._attrs.get("module")
@module.setter
def module(self, module: "str"):
"""Sets the module of this SearchJob.
The module to run the search in. The default module is used if a module is not specified.
:param module: The module of this SearchJob.
:type: str
"""
self._attrs["module"] = module
@property
def name(self) -> "str":
""" Gets the name of this SearchJob.
The name of the created search job.
"""
return self._attrs.get("name")
@name.setter
def name(self, name: "str"):
"""Sets the name of this SearchJob.
The name of the created search job.
:param name: The name of this SearchJob.
:type: str
"""
self._attrs["name"] = name
@property
def parent(self) -> "str":
""" Gets the parent of this SearchJob.
The 'rsid' of an associated recurring-search, if this search job is dispatched by a recurring-search.
"""
return self._attrs.get("parent")
@parent.setter
def parent(self, parent: "str"):
"""Sets the parent of this SearchJob.
The 'rsid' of an associated recurring-search, if this search job is dispatched by a recurring-search.
:param parent: The parent of this SearchJob.
:type: str
"""
self._attrs["parent"] = parent
@property
def percent_complete(self) -> "int":
""" Gets the percent_complete of this SearchJob.
An estimate of the percent of time remaining before the job completes.
"""
return self._attrs.get("percentComplete")
@percent_complete.setter
def percent_complete(self, percent_complete: "int"):
"""Sets the percent_complete of this SearchJob.
An estimate of the percent of time remaining before the job completes.
:param percent_complete: The percent_complete of this SearchJob.
:type: int
"""
self._attrs["percentComplete"] = percent_complete
@property
def preview_available(self) -> "str":
""" Gets the preview_available of this SearchJob.
Specifies if preview results are available for the search job. The valid status values are 'unknown', 'true', and 'false'.
"""
return self._attrs.get("previewAvailable")
@preview_available.setter
def preview_available(self, preview_available: "str"):
"""Sets the preview_available of this SearchJob.
Specifies if preview results are available for the search job. The valid status values are 'unknown', 'true', and 'false'.
:param preview_available: The preview_available of this SearchJob.
:type: str
"""
self._attrs["previewAvailable"] = preview_available
@property
def query_parameters(self) -> "QueryParameters":
""" Gets the query_parameters of this SearchJob.
Represents parameters on the search job such as 'earliest' and 'latest'.
"""
return QueryParameters._from_dict(self._attrs["queryParameters"])
@query_parameters.setter
def query_parameters(self, query_parameters: "QueryParameters"):
"""Sets the query_parameters of this SearchJob.
Represents parameters on the search job such as 'earliest' and 'latest'.
:param query_parameters: The query_parameters of this SearchJob.
:type: QueryParameters
"""
self._attrs["queryParameters"] = query_parameters.to_dict()
@property
def required_freshness(self) -> "int":
""" Gets the required_freshness of this SearchJob.
Specifies a maximum time interval, in seconds, between identical existing searches. The 'requiredFreshness' parameter is used to determine if an existing search with the same query and the same time boundaries can be reused, instead of running the same search again. Freshness is applied to the resolvedEarliest and resolvedLatest parameters. If an existing search has the same exact criteria as this search and the resolvedEarliest and resolvedLatest values are within the freshness interval, the existing search metadata is returned instead of initiating a new search job. By default, the requiredFreshness parameter is set to 0 which means that the platform does not attempt to use an existing search.
"""
return self._attrs.get("requiredFreshness")
@required_freshness.setter
def required_freshness(self, required_freshness: "int"):
"""Sets the required_freshness of this SearchJob.
Specifies a maximum time interval, in seconds, between identical existing searches. The 'requiredFreshness' parameter is used to determine if an existing search with the same query and the same time boundaries can be reused, instead of running the same search again. Freshness is applied to the resolvedEarliest and resolvedLatest parameters. If an existing search has the same exact criteria as this search and the resolvedEarliest and resolvedLatest values are within the freshness interval, the existing search metadata is returned instead of initiating a new search job. By default, the requiredFreshness parameter is set to 0 which means that the platform does not attempt to use an existing search.
:param required_freshness: The required_freshness of this SearchJob.
:type: int
"""
self._attrs["requiredFreshness"] = required_freshness
@property
def resolved_earliest(self) -> "str":
""" Gets the resolved_earliest of this SearchJob.
The earliest time speciifed as an absolute value in GMT. The time is computed based on the values you specify for the 'timezone' and 'earliest' queryParameters.
"""
return self._attrs.get("resolvedEarliest")
@resolved_earliest.setter
def resolved_earliest(self, resolved_earliest: "str"):
"""Sets the resolved_earliest of this SearchJob.
The earliest time speciifed as an absolute value in GMT. The time is computed based on the values you specify for the 'timezone' and 'earliest' queryParameters.
:param resolved_earliest: The resolved_earliest of this SearchJob.
:type: str
"""
self._attrs["resolvedEarliest"] = resolved_earliest
@property
def resolved_latest(self) -> "str":
""" Gets the resolved_latest of this SearchJob.
The latest time specified as an absolute value in GMT. The time is computed based on the values you specify for the 'timezone' and 'earliest' queryParameters.
"""
return self._attrs.get("resolvedLatest")
@resolved_latest.setter
def resolved_latest(self, resolved_latest: "str"):
"""Sets the resolved_latest of this SearchJob.
The latest time specified as an absolute value in GMT. The time is computed based on the values you specify for the 'timezone' and 'earliest' queryParameters.
:param resolved_latest: The resolved_latest of this SearchJob.
:type: str
"""
self._attrs["resolvedLatest"] = resolved_latest
@property
def results_available(self) -> "int":
""" Gets the results_available of this SearchJob.
The number of results produced so far for the search job.
"""
return self._attrs.get("resultsAvailable")
@results_available.setter
def results_available(self, results_available: "int"):
"""Sets the results_available of this SearchJob.
The number of results produced so far for the search job.
:param results_available: The results_available of this SearchJob.
:type: int
"""
self._attrs["resultsAvailable"] = results_available
@property
def results_preview_available(self) -> "int":
""" Gets the results_preview_available of this SearchJob.
The number of the preview search results for the job with the specified search ID (sid).
"""
return self._attrs.get("resultsPreviewAvailable")
@results_preview_available.setter
def results_preview_available(self, results_preview_available: "int"):
"""Sets the results_preview_available of this SearchJob.
The number of the preview search results for the job with the specified search ID (sid).
:param results_preview_available: The results_preview_available of this SearchJob.
:type: int
"""
self._attrs["resultsPreviewAvailable"] = results_preview_available
@property
def sid(self) -> "str":
""" Gets the sid of this SearchJob.
The ID assigned to the search job.
"""
return self._attrs.get("sid")
@sid.setter
def sid(self, sid: "str"):
"""Sets the sid of this SearchJob.
The ID assigned to the search job.
:param sid: The sid of this SearchJob.
:type: str
"""
self._attrs["sid"] = sid
@property
def status(self) -> "SearchStatus":
""" Gets the status of this SearchJob.
"""
return SearchStatus.from_value(self._attrs.get("status"))
@status.setter
def status(self, status: "SearchStatus"):
"""Sets the status of this SearchJob.
:param status: The status of this SearchJob.
:type: SearchStatus
"""
if isinstance(status, Enum):
self._attrs["status"] = status.value
else:
self._attrs["status"] = status # If you supply a string, we presume you know the service will take it.
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class SingleStatementQueryParameters(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "SingleStatementQueryParameters":
instance = SingleStatementQueryParameters.__new__(SingleStatementQueryParameters)
instance._attrs = model
return instance
def __init__(self, allow_side_effects: "bool" = False, collect_field_summary: "bool" = False, collect_time_buckets: "bool" = False, earliest: "str" = '-24h@h', enable_preview: "bool" = False, extract_fields: "str" = 'indexed', latest: "str" = 'now', max_time: "int" = 3600, relative_time_anchor: "datetime" = None, sid: "str" = '', timezone: "object" = None, **extra):
"""SingleStatementQueryParameters"""
self._attrs = dict()
if allow_side_effects is not None:
self._attrs["allowSideEffects"] = allow_side_effects
if collect_field_summary is not None:
self._attrs["collectFieldSummary"] = collect_field_summary
if collect_time_buckets is not None:
self._attrs["collectTimeBuckets"] = collect_time_buckets
if earliest is not None:
self._attrs["earliest"] = earliest
if enable_preview is not None:
self._attrs["enablePreview"] = enable_preview
if extract_fields is not None:
self._attrs["extractFields"] = extract_fields
if latest is not None:
self._attrs["latest"] = latest
if max_time is not None:
self._attrs["maxTime"] = max_time
if relative_time_anchor is not None:
self._attrs["relativeTimeAnchor"] = relative_time_anchor
if sid is not None:
self._attrs["sid"] = sid
if timezone is not None:
self._attrs["timezone"] = timezone
for k, v in extra.items():
self._attrs[k] = v
@property
def allow_side_effects(self) -> "bool":
""" Gets the allow_side_effects of this SingleStatementQueryParameters.
Specifies whether a search that contains commands with side effects (with possible security risks) is allowed to run. The search contains commands, thru or into, that have side effects.
"""
return self._attrs.get("allowSideEffects")
@allow_side_effects.setter
def allow_side_effects(self, allow_side_effects: "bool"):
"""Sets the allow_side_effects of this SingleStatementQueryParameters.
Specifies whether a search that contains commands with side effects (with possible security risks) is allowed to run. The search contains commands, thru or into, that have side effects.
:param allow_side_effects: The allow_side_effects of this SingleStatementQueryParameters.
:type: bool
"""
self._attrs["allowSideEffects"] = allow_side_effects
@property
def collect_field_summary(self) -> "bool":
""" Gets the collect_field_summary of this SingleStatementQueryParameters.
Specifies whether a search is allowed to collect the Fields summary during the run time.
"""
return self._attrs.get("collectFieldSummary")
@collect_field_summary.setter
def collect_field_summary(self, collect_field_summary: "bool"):
"""Sets the collect_field_summary of this SingleStatementQueryParameters.
Specifies whether a search is allowed to collect the Fields summary during the run time.
:param collect_field_summary: The collect_field_summary of this SingleStatementQueryParameters.
:type: bool
"""
self._attrs["collectFieldSummary"] = collect_field_summary
@property
def collect_time_buckets(self) -> "bool":
""" Gets the collect_time_buckets of this SingleStatementQueryParameters.
Specifies whether a search is allowed to collect the Timeline Buckets summary during the run time.
"""
return self._attrs.get("collectTimeBuckets")
@collect_time_buckets.setter
def collect_time_buckets(self, collect_time_buckets: "bool"):
"""Sets the collect_time_buckets of this SingleStatementQueryParameters.
Specifies whether a search is allowed to collect the Timeline Buckets summary during the run time.
:param collect_time_buckets: The collect_time_buckets of this SingleStatementQueryParameters.
:type: bool
"""
self._attrs["collectTimeBuckets"] = collect_time_buckets
@property
def earliest(self) -> "str":
""" Gets the earliest of this SingleStatementQueryParameters.
The earliest time, in absolute or relative format, to retrieve events. When specifying an absolute time specify either UNIX time, or UTC in seconds using the ISO-8601 (%FT%T.%Q) format. For example 2020-01-25T13:15:30Z. GMT is the default timezone. You must specify GMT when you specify UTC. Any offset specified is ignored.
"""
return self._attrs.get("earliest")
@earliest.setter
def earliest(self, earliest: "str"):
"""Sets the earliest of this SingleStatementQueryParameters.
The earliest time, in absolute or relative format, to retrieve events. When specifying an absolute time specify either UNIX time, or UTC in seconds using the ISO-8601 (%FT%T.%Q) format. For example 2020-01-25T13:15:30Z. GMT is the default timezone. You must specify GMT when you specify UTC. Any offset specified is ignored.
:param earliest: The earliest of this SingleStatementQueryParameters.
:type: str
"""
self._attrs["earliest"] = earliest
@property
def enable_preview(self) -> "bool":
""" Gets the enable_preview of this SingleStatementQueryParameters.
Specifies whether a search is allowed to collect the preview results during the run time.
"""
return self._attrs.get("enablePreview")
@enable_preview.setter
def enable_preview(self, enable_preview: "bool"):
"""Sets the enable_preview of this SingleStatementQueryParameters.
Specifies whether a search is allowed to collect the preview results during the run time.
:param enable_preview: The enable_preview of this SingleStatementQueryParameters.
:type: bool
"""
self._attrs["enablePreview"] = enable_preview
@property
def extract_fields(self) -> "str":
""" Gets the extract_fields of this SingleStatementQueryParameters.
Specifies how the Search service should extract fields. Valid values include 'all', 'none', or 'indexed'. 'all' extracts all fields, 'indexed' extracts only indexed fields, and 'none' extracts only the default fields.
"""
return self._attrs.get("extractFields")
@extract_fields.setter
def extract_fields(self, extract_fields: "str"):
"""Sets the extract_fields of this SingleStatementQueryParameters.
Specifies how the Search service should extract fields. Valid values include 'all', 'none', or 'indexed'. 'all' extracts all fields, 'indexed' extracts only indexed fields, and 'none' extracts only the default fields.
:param extract_fields: The extract_fields of this SingleStatementQueryParameters.
:type: str
"""
self._attrs["extractFields"] = extract_fields
@property
def latest(self) -> "str":
""" Gets the latest of this SingleStatementQueryParameters.
The latest time, in absolute or relative format, to retrieve events. When specifying an absolute time specify either UNIX time, or UTC in seconds using the ISO-8601 (%FT%T.%Q) format. For example 2020-01-25T13:15:30Z. GMT is the default timezone. You must specify GMT when you specify UTC. Any offset specified is ignored.
"""
return self._attrs.get("latest")
@latest.setter
def latest(self, latest: "str"):
"""Sets the latest of this SingleStatementQueryParameters.
The latest time, in absolute or relative format, to retrieve events. When specifying an absolute time specify either UNIX time, or UTC in seconds using the ISO-8601 (%FT%T.%Q) format. For example 2020-01-25T13:15:30Z. GMT is the default timezone. You must specify GMT when you specify UTC. Any offset specified is ignored.
:param latest: The latest of this SingleStatementQueryParameters.
:type: str
"""
self._attrs["latest"] = latest
@property
def max_time(self) -> "int":
""" Gets the max_time of this SingleStatementQueryParameters.
The number of seconds to run the search before finalizing the search. The maximum value is 3600 seconds (1 hour).
"""
return self._attrs.get("maxTime")
@max_time.setter
def max_time(self, max_time: "int"):
"""Sets the max_time of this SingleStatementQueryParameters.
The number of seconds to run the search before finalizing the search. The maximum value is 3600 seconds (1 hour).
:param max_time: The max_time of this SingleStatementQueryParameters.
:type: int
"""
self._attrs["maxTime"] = max_time
@property
def relative_time_anchor(self) -> "datetime":
""" Gets the relative_time_anchor of this SingleStatementQueryParameters.
Relative values for the 'earliest' and 'latest' parameters snap to the unit that you specify. For example, if 'earliest' is set to -d@d, the unit is day. If the 'relativeTimeAnchor' is is set to '2020-10-05T13:15:30Z' then 'resolvedEarliest' is snapped to '2020-10-05T00:00:00Z', which is the day. Hours, minutes, and seconds are dropped. If no 'relativeTimeAnchor' is specified, the default value is set to the time the search job was created.
"""
return self._attrs.get("relativeTimeAnchor")
@relative_time_anchor.setter
def relative_time_anchor(self, relative_time_anchor: "datetime"):
"""Sets the relative_time_anchor of this SingleStatementQueryParameters.
Relative values for the 'earliest' and 'latest' parameters snap to the unit that you specify. For example, if 'earliest' is set to -d@d, the unit is day. If the 'relativeTimeAnchor' is is set to '2020-10-05T13:15:30Z' then 'resolvedEarliest' is snapped to '2020-10-05T00:00:00Z', which is the day. Hours, minutes, and seconds are dropped. If no 'relativeTimeAnchor' is specified, the default value is set to the time the search job was created.
:param relative_time_anchor: The relative_time_anchor of this SingleStatementQueryParameters.
:type: datetime
"""
self._attrs["relativeTimeAnchor"] = relative_time_anchor
@property
def sid(self) -> "str":
""" Gets the sid of this SingleStatementQueryParameters.
Reuse the results from the previous search ID (sid) for the statement. For customized default queryParameters, the sid is ignored.
"""
return self._attrs.get("sid")
@sid.setter
def sid(self, sid: "str"):
"""Sets the sid of this SingleStatementQueryParameters.
Reuse the results from the previous search ID (sid) for the statement. For customized default queryParameters, the sid is ignored.
:param sid: The sid of this SingleStatementQueryParameters.
:type: str
"""
self._attrs["sid"] = sid
@property
def timezone(self) -> "object":
""" Gets the timezone of this SingleStatementQueryParameters.
The timezone that relative time specifiers are based off of. Timezone only applies to relative time literals for 'earliest' and 'latest'. If UNIX time or UTC format is used for 'earliest' and 'latest', this field is ignored. For the list of supported timezone formats, see https://docs.splunk.com/Documentation/Splunk/latest/Data/Applytimezoneoffsetstotimestamps#zoneinfo_.28TZ.29_database type: string default: \"GMT\"
"""
return self._attrs.get("timezone")
@timezone.setter
def timezone(self, timezone: "object"):
"""Sets the timezone of this SingleStatementQueryParameters.
The timezone that relative time specifiers are based off of. Timezone only applies to relative time literals for 'earliest' and 'latest'. If UNIX time or UTC format is used for 'earliest' and 'latest', this field is ignored. For the list of supported timezone formats, see https://docs.splunk.com/Documentation/Splunk/latest/Data/Applytimezoneoffsetstotimestamps#zoneinfo_.28TZ.29_database type: string default: \"GMT\"
:param timezone: The timezone of this SingleStatementQueryParameters.
:type: object
"""
self._attrs["timezone"] = timezone
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class SearchModule(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "SearchModule":
instance = SearchModule.__new__(SearchModule)
instance._attrs = model
return instance
def __init__(self, module: "str" = None, namespace: "str" = '', query_parameters: "Dict[str, SingleStatementQueryParameters]" = None, wip_modules: "Dict[str, Module]" = None, **extra):
"""SearchModule"""
self._attrs = dict()
if module is not None:
self._attrs["module"] = module
if namespace is not None:
self._attrs["namespace"] = namespace
if query_parameters is not None:
self._attrs["queryParameters"] = query_parameters
if wip_modules is not None:
self._attrs["wipModules"] = wip_modules
for k, v in extra.items():
self._attrs[k] = v
@property
def module(self) -> "str":
""" Gets the module of this SearchModule.
Multi-statement module with inter-dependencies between statements. Statements are separated by semicolons.
"""
return self._attrs.get("module")
@module.setter
def module(self, module: "str"):
"""Sets the module of this SearchModule.
Multi-statement module with inter-dependencies between statements. Statements are separated by semicolons.
:param module: The module of this SearchModule.
:type: str
"""
self._attrs["module"] = module
@property
def namespace(self) -> "str":
""" Gets the namespace of this SearchModule.
The namespace to run the search in. The default namespace is used if a namespace is not specified.
"""
return self._attrs.get("namespace")
@namespace.setter
def namespace(self, namespace: "str"):
"""Sets the namespace of this SearchModule.
The namespace to run the search in. The default namespace is used if a namespace is not specified.
:param namespace: The namespace of this SearchModule.
:type: str
"""
self._attrs["namespace"] = namespace
@property
def query_parameters(self) -> "Dict[str, SingleStatementQueryParameters]":
""" Gets the query_parameters of this SearchModule.
The parameters on the search statement, such as 'earliest' and 'latest. The request can specify a \"defaults\" set of statement queryParameters which override the system default queryParameters. Each export statement requires to have a statement queryParameters in the object, it can be empty if there is no override.
"""
return self._attrs.get("queryParameters")
@query_parameters.setter
def query_parameters(self, query_parameters: "Dict[str, SingleStatementQueryParameters]"):
"""Sets the query_parameters of this SearchModule.
The parameters on the search statement, such as 'earliest' and 'latest. The request can specify a \"defaults\" set of statement queryParameters which override the system default queryParameters. Each export statement requires to have a statement queryParameters in the object, it can be empty if there is no override.
:param query_parameters: The query_parameters of this SearchModule.
:type: Dict[str, SingleStatementQueryParameters]
"""
self._attrs["queryParameters"] = query_parameters
@property
def wip_modules(self) -> "Dict[str, Module]":
""" Gets the wip_modules of this SearchModule.
WIP (Work in progress) modules which are used in the module's search statements, but not yet registered .
"""
return self._attrs.get("wipModules")
@wip_modules.setter
def wip_modules(self, wip_modules: "Dict[str, Module]"):
"""Sets the wip_modules of this SearchModule.
WIP (Work in progress) modules which are used in the module's search statements, but not yet registered .
:param wip_modules: The wip_modules of this SearchModule.
:type: Dict[str, Module]
"""
self._attrs["wipModules"] = wip_modules
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class SingleTimeBucket(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "SingleTimeBucket":
instance = SingleTimeBucket.__new__(SingleTimeBucket)
instance._attrs = model
return instance
def __init__(self, available_count: "int" = None, duration: "float" = None, earliest_time: "float" = None, earliest_time_strf_time: "str" = None, is_finalized: "bool" = None, total_count: "int" = None, **extra):
"""SingleTimeBucket"""
self._attrs = dict()
if available_count is not None:
self._attrs["availableCount"] = available_count
if duration is not None:
self._attrs["duration"] = duration
if earliest_time is not None:
self._attrs["earliestTime"] = earliest_time
if earliest_time_strf_time is not None:
self._attrs["earliestTimeStrfTime"] = earliest_time_strf_time
if is_finalized is not None:
self._attrs["isFinalized"] = is_finalized
if total_count is not None:
self._attrs["totalCount"] = total_count
for k, v in extra.items():
self._attrs[k] = v
@property
def available_count(self) -> "int":
""" Gets the available_count of this SingleTimeBucket.
Count of available events. Not all events in a bucket are retrievable. Typically this count is capped at 10000.
"""
return self._attrs.get("availableCount")
@available_count.setter
def available_count(self, available_count: "int"):
"""Sets the available_count of this SingleTimeBucket.
Count of available events. Not all events in a bucket are retrievable. Typically this count is capped at 10000.
:param available_count: The available_count of this SingleTimeBucket.
:type: int
"""
self._attrs["availableCount"] = available_count
@property
def duration(self) -> "float":
""" Gets the duration of this SingleTimeBucket.
"""
return self._attrs.get("duration")
@duration.setter
def duration(self, duration: "float"):
"""Sets the duration of this SingleTimeBucket.
:param duration: The duration of this SingleTimeBucket.
:type: float
"""
self._attrs["duration"] = duration
@property
def earliest_time(self) -> "float":
""" Gets the earliest_time of this SingleTimeBucket.
The timestamp of the earliest event in the current bucket, in UNIX format. This is the same time as 'earliestTimeStrfTime' in UNIX format.
"""
return self._attrs.get("earliestTime")
@earliest_time.setter
def earliest_time(self, earliest_time: "float"):
"""Sets the earliest_time of this SingleTimeBucket.
The timestamp of the earliest event in the current bucket, in UNIX format. This is the same time as 'earliestTimeStrfTime' in UNIX format.
:param earliest_time: The earliest_time of this SingleTimeBucket.
:type: float
"""
self._attrs["earliestTime"] = earliest_time
@property
def earliest_time_strf_time(self) -> "str":
""" Gets the earliest_time_strf_time of this SingleTimeBucket.
The timestamp of the earliest event in the current bucket, in UTC format with seconds. For example 2019-01-25T13:15:30Z, which follows the ISO-8601 (%FT%T.%Q) format.
"""
return self._attrs.get("earliestTimeStrfTime")
@earliest_time_strf_time.setter
def earliest_time_strf_time(self, earliest_time_strf_time: "str"):
"""Sets the earliest_time_strf_time of this SingleTimeBucket.
The timestamp of the earliest event in the current bucket, in UTC format with seconds. For example 2019-01-25T13:15:30Z, which follows the ISO-8601 (%FT%T.%Q) format.
:param earliest_time_strf_time: The earliest_time_strf_time of this SingleTimeBucket.
:type: str
"""
self._attrs["earliestTimeStrfTime"] = earliest_time_strf_time
@property
def is_finalized(self) -> "bool":
""" Gets the is_finalized of this SingleTimeBucket.
Specifies if all of the events in the current bucket have been finalized.
"""
return self._attrs.get("isFinalized")
@is_finalized.setter
def is_finalized(self, is_finalized: "bool"):
"""Sets the is_finalized of this SingleTimeBucket.
Specifies if all of the events in the current bucket have been finalized.
:param is_finalized: The is_finalized of this SingleTimeBucket.
:type: bool
"""
self._attrs["isFinalized"] = is_finalized
@property
def total_count(self) -> "int":
""" Gets the total_count of this SingleTimeBucket.
The total count of the events in the current bucket.
"""
return self._attrs.get("totalCount")
@total_count.setter
def total_count(self, total_count: "int"):
"""Sets the total_count of this SingleTimeBucket.
The total count of the events in the current bucket.
:param total_count: The total_count of this SingleTimeBucket.
:type: int
"""
self._attrs["totalCount"] = total_count
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class TimeBucketsSummary(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "TimeBucketsSummary":
instance = TimeBucketsSummary.__new__(TimeBucketsSummary)
instance._attrs = model
return instance
def __init__(self, is_time_cursored: "bool" = None, buckets: "List[SingleTimeBucket]" = None, cursor_time: "float" = None, event_count: "int" = None, **extra):
"""TimeBucketsSummary"""
self._attrs = dict()
if is_time_cursored is not None:
self._attrs["IsTimeCursored"] = is_time_cursored
if buckets is not None:
self._attrs["buckets"] = buckets
if cursor_time is not None:
self._attrs["cursorTime"] = cursor_time
if event_count is not None:
self._attrs["eventCount"] = event_count
for k, v in extra.items():
self._attrs[k] = v
@property
def is_time_cursored(self) -> "bool":
""" Gets the is_time_cursored of this TimeBucketsSummary.
Specifies if the events are returned in time order.
"""
return self._attrs.get("IsTimeCursored")
@is_time_cursored.setter
def is_time_cursored(self, is_time_cursored: "bool"):
"""Sets the is_time_cursored of this TimeBucketsSummary.
Specifies if the events are returned in time order.
:param is_time_cursored: The is_time_cursored of this TimeBucketsSummary.
:type: bool
"""
self._attrs["IsTimeCursored"] = is_time_cursored
@property
def buckets(self) -> "List[SingleTimeBucket]":
""" Gets the buckets of this TimeBucketsSummary.
"""
return [SingleTimeBucket._from_dict(i) for i in self._attrs.get("buckets")]
@buckets.setter
def buckets(self, buckets: "List[SingleTimeBucket]"):
"""Sets the buckets of this TimeBucketsSummary.
:param buckets: The buckets of this TimeBucketsSummary.
:type: List[SingleTimeBucket]
"""
self._attrs["buckets"] = buckets
@property
def cursor_time(self) -> "float":
""" Gets the cursor_time of this TimeBucketsSummary.
Identifies where the cursor is, in processing the events. The 'cursorTime' is a timestamp specified in UNIX time.
"""
return self._attrs.get("cursorTime")
@cursor_time.setter
def cursor_time(self, cursor_time: "float"):
"""Sets the cursor_time of this TimeBucketsSummary.
Identifies where the cursor is, in processing the events. The 'cursorTime' is a timestamp specified in UNIX time.
:param cursor_time: The cursor_time of this TimeBucketsSummary.
:type: float
"""
self._attrs["cursorTime"] = cursor_time
@property
def event_count(self) -> "int":
""" Gets the event_count of this TimeBucketsSummary.
The number of events processed at the 'cursorTime'.
"""
return self._attrs.get("eventCount")
@event_count.setter
def event_count(self, event_count: "int"):
"""Sets the event_count of this TimeBucketsSummary.
The number of events processed at the 'cursorTime'.
:param event_count: The event_count of this TimeBucketsSummary.
:type: int
"""
self._attrs["eventCount"] = event_count
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
class StatusEnum(str, Enum):
CANCELED = "canceled"
FINALIZED = "finalized"
@staticmethod
def from_value(value: str):
if value == "canceled":
return StatusEnum.CANCELED
if value == "finalized":
return StatusEnum.FINALIZED
class UpdateJob(SSCModel):
@staticmethod
def _from_dict(model: dict) -> "UpdateJob":
instance = UpdateJob.__new__(UpdateJob)
instance._attrs = model
return instance
def __init__(self, status: "str", **extra):
"""UpdateJob"""
self._attrs = dict()
if status is not None:
self._attrs["status"] = status
for k, v in extra.items():
self._attrs[k] = v
@property
def status(self) -> "StatusEnum":
""" Gets the status of this UpdateJob.
The status to PATCH to an existing search job. The only status values you can PATCH are 'canceled' and 'finalized'. You can PATCH the 'canceled' status only to a search job that is running.
"""
return StatusEnum.from_value(self._attrs.get("status"))
@status.setter
def status(self, status: "str"):
"""Sets the status of this UpdateJob.
The status to PATCH to an existing search job. The only status values you can PATCH are 'canceled' and 'finalized'. You can PATCH the 'canceled' status only to a search job that is running.
:param status: The status of this UpdateJob.
:type: str
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`")
if isinstance(status, Enum):
self._attrs["status"] = status.value
else:
self._attrs["status"] = status # If you supply a string, we presume you know the service will take it.
def to_dict(self):
return {k: v for (k, v) in self._attrs.items() if v is not None}
| 35.626768 | 807 | 0.638003 | 28,689 | 244,364 | 5.317543 | 0.021646 | 0.059113 | 0.030081 | 0.025053 | 0.91588 | 0.883642 | 0.859402 | 0.822549 | 0.799875 | 0.791616 | 0 | 0.002284 | 0.268964 | 244,364 | 6,858 | 808 | 35.631963 | 0.851695 | 0.386477 | 0 | 0.841249 | 0 | 0 | 0.129242 | 0.013881 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230319 | false | 0.002602 | 0.001627 | 0.012362 | 0.38419 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
cb5df56ba6ae2cc5c989c315832414247d9d1222 | 158,242 | py | Python | clevrer/utils.py | K-A-R-T/DCL-Release | 44c6e1234af63daa1ae32302eef5981651a5a0aa | [
"MIT"
] | 26 | 2021-02-07T03:58:36.000Z | 2022-02-22T03:57:22.000Z | clevrer/utils.py | K-A-R-T/DCL-Release | 44c6e1234af63daa1ae32302eef5981651a5a0aa | [
"MIT"
] | 3 | 2021-03-10T01:48:07.000Z | 2022-03-01T15:59:35.000Z | clevrer/utils.py | K-A-R-T/DCL-Release | 44c6e1234af63daa1ae32302eef5981651a5a0aa | [
"MIT"
] | 6 | 2021-03-09T14:53:49.000Z | 2022-02-11T20:10:25.000Z | import pickle
import json
import sys
import pycocotools.mask as mask
import copy
import pycocotools.mask as cocoMask
import numpy as np
import torch
import os
import cv2
import pdb
from collections import defaultdict
from nscl.datasets.definition import gdef
import torch.nn as nn
from PIL import Image
COLORS = ['gray', 'red', 'blue', 'green', 'brown', 'yellow', 'cyan', 'purple']
MATERIALS = ['metal', 'rubber']
SHAPES = ['sphere', 'cylinder', 'cube']
ORDER = ['first', 'second', 'last']
ALL_CONCEPTS= COLORS + MATERIALS + SHAPES + ORDER + ['white']
def keep_only_temporal_concept_learner(trainer, args, configs):
from jactorch.optim import AdamW
# fix model parameters
for name, param in trainer._model.named_parameters():
param.requires_grad = False
for name, param in trainer._model.reasoning.embedding_temporal.named_parameters():
param.requires_grad = True
parameters = trainer._model.reasoning.embedding_temporal.parameters()
#trainable_parameters = filter(lambda x: x.requires_grad, parameters)
optimizer = AdamW([{'params': parameters}], args.lr, weight_decay=configs.train.weight_decay)
trainer._optimizer = optimizer
return trainer
def compute_union_box(bbox1, bbox2):
EPS = 1e-10
union_box = [0, 0, 0, 0]
union_box[0] = min(bbox1[0], bbox2[0])
union_box[1] = min(bbox1[1], bbox2[1])
union_box[2] = max(bbox1[2], bbox2[2])
union_box[3] = max(bbox1[3], bbox2[3])
return union_box
def compute_IoU_v2(bbox1, bbox2):
EPS = 1e-10
bbox1_area = float((bbox1[2] - bbox1[0] + EPS) * (bbox1[3] - bbox1[1] + EPS))
bbox2_area = float((bbox2[2] - bbox2[0] + EPS) * (bbox2[3] - bbox2[1] + EPS))
w = max(0.0, min(bbox1[2], bbox2[2]) - max(bbox1[0], bbox2[0]) + EPS)
h = max(0.0, min(bbox1[3], bbox2[3]) - max(bbox1[1], bbox2[1]) + EPS)
inter = float(w * h)
ovr = inter / (bbox1_area + bbox2_area - inter)
return ovr
def compute_LS(traj, gt_traj):
# see http://jvgemert.github.io/pub/jain-tubelets-cvpr2014.pdf
IoU_list = []
frm_num = 0
for frame_ind, gt_box in enumerate(gt_traj):
box = traj[frame_ind]
if not (box==[0, 0, 1, 1] and gt_box==[0, 0, 1, 1]):
frm_num +=1
if box==[0, 0, 1, 1] or gt_box==[0, 0, 1, 1]:
continue
IoU_list.append(compute_IoU_v2(box, gt_box))
return sum(IoU_list) / frm_num
def visualize_scene_parser(feed_dict, ctx, whatif_id=-1, store_img=False, args=None):
vis_size = 5
max_dist = 20
base_folder = 'visualization/'+ args.prefix + '/'+ os.path.basename(args.load).split('.')[0]
filename = str(feed_dict['meta_ann']['scene_index'])
if args.visualize_retrieval_id>=0:
videoname = 'dumps/'+ base_folder + '/'+str(args.visualize_retrieval_id) +'/'+ filename+'_scene.avi'
else:
videoname = 'dumps/'+ base_folder + '/' + filename + '/' + str(int(whatif_id)) +'_scene.avi'
#videoname = filename + '.mp4'
if store_img:
if args.visualize_retrieval_id>=0:
img_folder = 'dumps/'+base_folder +'/'+str(args.visualize_retrieval_id) +'/img'
else:
img_folder = 'dumps/'+base_folder +'/'+filename +'/img'
os.system('mkdir -p ' + img_folder)
background_fn = '../temporal_reasoning-master/background.png'
if not os.path.isfile(background_fn):
background_fn = '../temporal_reasoningv2/background.png'
bg = cv2.imread(background_fn)
H, W, C = bg.shape
bg = cv2.resize(bg, (W, H), interpolation=cv2.INTER_AREA)
fps = 6
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
#fourcc = cv2.VideoWriter_fourcc('F','M','P','4')
out = cv2.VideoWriter(videoname, fourcc, fps, (W, H))
scene_idx = feed_dict['meta_ann']['scene_index']
sub_idx = int(scene_idx/1000)
sub_img_folder = 'image_'+str(sub_idx).zfill(2)+'000-'+str(sub_idx+1).zfill(2)+'000'
img_full_folder = os.path.join(args.frm_img_path, sub_img_folder)
if whatif_id==-2:
n_frame = len(feed_dict['tube_info']['frm_list'])
obj_num = len(ctx._events_buffer[1][0])
in_list = []
out_list = []
for obj_id in range(obj_num):
if ctx._events_buffer[1][0][obj_id]>args.colli_threshold:
target_frm = ctx._events_buffer[1][1][obj_id]
frm_diff = [ abs(prp_frm-target_frm) for prp_frm in feed_dict['tube_info']['frm_list']]
min_diff = min(frm_diff)
min_index = frm_diff.index(min_diff)
if frm_diff[min_index]<0:
min_index +=1
frm_idx = feed_dict['tube_info']['frm_list'][min_index]
box_prp = feed_dict['tube_info']['box_seq']['tubes'][obj_id][frm_idx]
while box_prp[0]==-1 and box_prp[1]==-1:
min_index +=1
frm_idx = feed_dict['tube_info']['frm_list'][min_index]
box_prp = feed_dict['tube_info']['box_seq']['tubes'][obj_id][frm_idx]
in_list.append((obj_id, min_index))
if ctx._events_buffer[2][0][obj_id]>args.colli_threshold:
target_frm = ctx._events_buffer[2][1][obj_id]
frm_diff = [ abs(prp_frm-target_frm) for prp_frm in feed_dict['tube_info']['frm_list']]
min_diff = min(frm_diff)
min_index = frm_diff.index(min_diff)
if frm_diff[min_index]>0:
min_index -=1
frm_idx = feed_dict['tube_info']['frm_list'][min_index]
box_prp = feed_dict['tube_info']['box_seq']['tubes'][obj_id][frm_idx]
while box_prp[0]==-1 and box_prp[1]==-1:
min_index -=1
frm_idx = feed_dict['tube_info']['frm_list'][min_index]
box_prp = feed_dict['tube_info']['box_seq']['tubes'][obj_id][frm_idx]
out_list.append((obj_id, min_index))
elif whatif_id==-1 and ctx._future_features is not None:
box_dim, obj_num = 4, ctx._future_features[3].shape[0]
box_ftr = ctx._future_features[3].view(obj_num, -1, box_dim)
n_frame = len(feed_dict['tube_info']['frm_list']) + box_ftr.shape[1] - args.n_his -1
elif whatif_id>=0 and ctx._counter_events_colli_set is not None:
box_dim, obj_num = 4, ctx._counterfact_features[3].shape[0]
box_ftr = ctx._counterfact_features[3].view(obj_num, -1, box_dim)
n_frame = min(len(feed_dict['tube_info']['frm_list']), box_ftr.shape[1])
else:
raise NotImplemented
padding_patch_list = []
frm_box_list = []
for i in range(n_frame):
box_list = []
if whatif_id==-1 or whatif_id==-2:
if i < len(feed_dict['tube_info']['frm_list']):
frm_id = feed_dict['tube_info']['frm_list'][i]
img_full_path = os.path.join(img_full_folder, 'video_'+str(scene_idx).zfill(5), str(frm_id+1)+'.png')
img_ori = cv2.imread(img_full_path)
img = copy.deepcopy(img_ori)
for tube_id in range(len(feed_dict['tube_info']['box_seq']['tubes'])):
tmp_box = feed_dict['tube_info']['box_seq']['tubes'][tube_id][frm_id]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
x1, y1, x2, y2 = x*W, y*H, (x+w)*W, (y+h)*H
box_list.append([x1, y1, x2, y2])
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (36,255,12), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if i==len(feed_dict['tube_info']['frm_list'])-1:
padding_patch = img_ori[int(y*H):int(y*H+h*H),int(x*W):int(W*x+w*W)]
hh, ww, c = padding_patch.shape
if hh*ww*c==0:
padding_patch = np.zeros((24, 24, 3), dtype=np.float32)
padding_patch_list.append(padding_patch)
else:
if args.version=='v2' or args.version=='v2_1':
pred_offset = i - len(feed_dict['tube_info']['frm_list'])
else:
pred_offset = i - len(feed_dict['tube_info']['frm_list']) + args.n_his + 1
frm_id = feed_dict['tube_info'] ['frm_list'][-1] + (args.frame_offset*pred_offset+1)
if args.version!='v2' and args.version!='v2_1':
img = copy.deepcopy(bg)
else:
img_tensor = feed_dict['img_future'][pred_offset]
mean = np.array([0.485, 0.456, 0.406]).reshape((1, 1, 3))
std = np.array([0.229, 0.224, 0.225]).reshape((1, 1, 3))
img = img_tensor.permute(1, 2, 0).cpu().numpy() * std + mean
img = cv2.resize( img*255, (W, H))
img = img.astype(np.uint8)
for tube_id in range(box_ftr.shape[0]):
tmp_box = box_ftr[tube_id][pred_offset]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
box_list.append([x*W, y*H, (x+w)*W, (y+h)*H])
y2 = y +h
x2 = x +w
if w<=0 or h<=0:
continue
if x>1:
continue
if y>1:
continue
if x2 <=0:
continue
if y2 <=0:
continue
if x<0:
x=0
if y<0:
y=0
if x2>1:
x2=1
if y2>1:
y2=1
if args.version!='v2' and args.version!='v2_1':
patch_resize = cv2.resize(padding_patch_list[tube_id], (max(1, int(x2*W) - int(x*W)), max(1, int(y2*H) - int(y*H))) )
img[int(y*H):int(y2*H), int(x*W):int(x2*W)] = patch_resize
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (0,0,0), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0,0,0), 2)
else:
if args.version!='v2' and args.version!='v2_1':
frm_id = feed_dict['tube_info']['frm_list'][i]
img_full_path = os.path.join(img_full_folder, 'video_'+str(scene_idx).zfill(5), str(frm_id+1)+'.png')
img_rgb = cv2.imread(img_full_path)
img = copy.deepcopy(img_rgb)
else:
img_tensor = feed_dict['img_counterfacts'][whatif_id][i]
mean = np.array([0.485, 0.456, 0.406]).reshape((1, 1, 3))
std = np.array([0.229, 0.224, 0.225]).reshape((1, 1, 3))
img = img_tensor.permute(1, 2, 0).cpu().numpy() * std + mean
img = cv2.resize( img * 255, (W, H))
img = img.astype(np.uint8)
for tube_id in range(box_ftr.shape[0]):
if args.version!='v2' and args.version!='v2_1':
tmp_box = feed_dict['tube_info']['box_seq']['tubes'][tube_id][frm_id]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
x2 = x + w
y2 = y + h
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (36,255,12), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
tmp_box = box_ftr[tube_id, i]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
box_list.append([x*W, y*H, (x+w)*W, (y+h)*H])
y2 = y +h
x2 = x +w
if w<=0 or h<=0:
continue
if x>1:
continue
if y>1:
continue
if x2 <=0:
continue
if y2 <=0:
continue
if x<0:
x=0
if y<0:
y=0
if x2>1:
x2=1
if y2>1:
y2=1
#patch_resize = cv2.resize(img_patch, (max(int(x2*W) - int(x*W), 1), max(int(y2*H) - int(y*H), 1)))
#img[int(y*H):int(y2*H), int(x*W):int(x2*W)] = patch_resize
x_step = args.n_his + 1
if i >=x_step:
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (0,0,0), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0,0,0), 2)
# draw collision events
obj_num = len(feed_dict['tube_info']['box_seq']['tubes'])
#print('%d/%d' %(i, box_ftr.shape[1]))
if (whatif_id==-2):
for in_info in in_list:
#if i==in_info[1]:
offset = i - in_info[1] # for better visualization
#if scene_idx ==10001:
if offset >=0 and offset < vis_size:
box_id = in_info[0]
box = box_list[box_id]
w_dist1 = box[0]
h_dist1 = box[1]
w_dist2 = W - box[2]
h_dist2 = H - box[3]
if min([w_dist1, h_dist1, w_dist2, h_dist2])>max_dist:
continue
img = cv2.rectangle(img, (int(box[0]), int(box[1])), (int(box[2]), int(box[3])), (255, 0, 0), 2)
cv2.putText(img, 'in', (int(box[0]), max(int(box[1])-10, 20)), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (255, 0, 0), 2)
#img = cv2.rectangle(img, (int(box[0]), int(box[1])), (int(box[2]), int(box[3])), (255, 128, 0), 1)
#cv2.putText(img, 'in', (int(box[0]), max(int(box[1])-10, 20)), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (255, 28, 0), 2)
for out_info in out_list:
offset = out_info[1] - i # for better visualization
if offset >= 0 and offset < vis_size:
#if i==out_info[1]:
box_id = out_info[0]
box = box_list[box_id]
w_dist1 = box[0]
h_dist1 = box[1]
w_dist2 = W - box[2]
h_dist2 = H - box[3]
if min([w_dist1, h_dist1, w_dist2, h_dist2])>max_dist:
continue
#img = cv2.rectangle(img, (int(box[0]), int(box[1])), (int(box[2]), int(box[3])), (255, 153, 255), 1)
#cv2.putText(img, 'out', (int(box[0]), max(int(box[1])-10, 20)), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (255, 153, 255), 2)
img = cv2.rectangle(img, (int(box[0]), int(box[1])), (int(box[2]), int(box[3])), (0, 255, 255), 2)
cv2.putText(img, 'out', (int(box[0]), max(int(box[1])-10, 20)), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 255, 255), 2)
for t_id1 in range(obj_num):
for t_id2 in range(obj_num):
if t_id1==whatif_id or t_id2==whatif_id:
continue
if i >=ctx._events_buffer[0][0].shape[2]:
pred_id = i - len(feed_dict['tube_info']['frm_list']) + args.n_his +1
if ctx._event_colli_set[t_id1, t_id2, pred_id]>args.colli_threshold:
#pred_score = ctx._unseen_event_buffer[0][t_id1, t_id2]
#pred_id = ctx._unseen_event_buffer[1][t_id1, t_id2]
#if i==pred_id+len(feed_dict['tube_info']['frm_list']) - args.n_his -1 and \
#pred_score >args.colli_threshold:
box1 = box_list[t_id1]
box2 = box_list[t_id2]
x1_min = min(box1[0], box2[0])
y1_min = min(box1[1], box2[1])
x2_max = max(box1[2], box2[2])
y2_max = max(box1[3], box2[3])
img = cv2.rectangle(img, (int(x1_min), int(y1_min)), (int(x2_max), int(y2_max)), (0,0,255), 2)
cv2.putText(img, 'collision', (int(x1_min), int(y1_min)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 0,255), 2)
elif (whatif_id==-1 and ctx._events_buffer[0][0][t_id1, t_id2, i]>args.colli_threshold) or \
(whatif_id>=0 and ctx._counter_events_colli_set[t_id1, t_id2, i]>args.colli_threshold) or \
(whatif_id==-2 and ctx._events_buffer[0][0][t_id1, t_id2, i]>args.colli_threshold):
print('collision@%d frames'%(i))
box1 = box_list[t_id1]
box2 = box_list[t_id2]
x1_min = min(box1[0], box2[0])
y1_min = min(box1[1], box2[1])
x2_max = max(box1[2], box2[2])
y2_max = max(box1[3], box2[3])
valid_box_flag1 = check_valid_box(box1, W, H)
valid_box_flag2 = check_valid_box(box2, W, H)
if not (valid_box_flag1 and valid_box_flag2):
continue
img = cv2.rectangle(img, (int(x1_min), int(y1_min)), (int(x2_max), int(y2_max)), (0,0,255), 2)
cv2.putText(img, 'collision', (int(x1_min), int(y1_min)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 0,255), 2)
if store_img:
cv2.imwrite(os.path.join( img_folder, '%s_%d_%d.png' % (filename, i, int(whatif_id))), img.astype(np.uint8))
out.write(img)
out.release()
if args.visualize_gif_flag:
if os.path.isfile(videoname+'.gif'):
cmd_str = 'rm %s' % (videoname+'.gif')
os.system( cmd_str)
cmd_str = 'ffmpeg -i %s -t 32 %s' % (videoname, videoname+'.gif')
os.system( cmd_str)
cmd_str = 'rm %s' % (videoname)
os.system( cmd_str)
def check_valid_box(box, W, H):
x1, y1, x2, y2 = box
w = x2 - x1
h = y2 - y1
valid_flag = True
if w<=0 or h<=0:
valid_flag = False
if x1>W:
valid_flag = False
if y1>H:
valid_flag = False
if x2 <=0:
valid_flag = False
if y2 <=0:
valid_flag = False
return valid_flag
def visualize_prediction(box_ftr, feed_dict, whatif_id=-1, store_img=False, args=None):
# print('states', states.shape)
# print('actions', actions.shape)
# print(filename)
# print(actions[:, 0, :])
# print(states[:20, 0, :])
base_folder = os.path.basename(args.load).split('.')[0]
filename = str(feed_dict['meta_ann']['scene_index'])
videoname = 'dumps/'+ base_folder + '/' + filename + '_' + str(int(whatif_id)) +'.avi'
#videoname = filename + '.mp4'
if store_img:
img_folder = 'dumps/'+base_folder +'/'+filename
os.system('mkdir -p ' + img_folder)
background_fn = '../temporal_reasoning-master/background.png'
if not os.path.isfile(background_fn):
background_fn = '../temporal_reasoningv2/background.png'
bg = cv2.imread(background_fn)
H, W, C = bg.shape
bg = cv2.resize(bg, (W, H), interpolation=cv2.INTER_AREA)
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
out = cv2.VideoWriter(videoname, fourcc, 3, (W, H))
scene_idx = feed_dict['meta_ann']['scene_index']
sub_idx = int(scene_idx/1000)
sub_img_folder = 'image_'+str(sub_idx).zfill(2)+'000-'+str(sub_idx+1).zfill(2)+'000'
img_full_folder = os.path.join(args.frm_img_path, sub_img_folder)
if whatif_id == -1:
n_frame = len(feed_dict['tube_info']['frm_list']) + box_ftr.shape[1]
else:
n_frame = min(box_ftr.shape[1], len(feed_dict['tube_info']['frm_list']))
padding_patch_list = []
for i in range(n_frame):
if whatif_id==-1:
if i < len(feed_dict['tube_info']['frm_list']):
frm_id = feed_dict['tube_info']['frm_list'][i]
img_full_path = os.path.join(img_full_folder, 'video_'+str(scene_idx).zfill(5), str(frm_id+1)+'.png')
img = cv2.imread(img_full_path)
for tube_id in range(len(feed_dict['tube_info']['box_seq']['tubes'])):
tmp_box = feed_dict['tube_info']['box_seq']['tubes'][tube_id][frm_id]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (36,255,12), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if i==len(feed_dict['tube_info']['frm_list'])-1:
padding_patch = img[int(h*H):int(y*H+h*H),int(x*W):int(W*x+w*W)]
hh, ww, c = padding_patch.shape
if hh*ww*c==0:
padding_patch = np.zeros((24, 24, 3), dtype=np.float32)
padding_patch_list.append(padding_patch)
else:
pred_offset = i - len(feed_dict['tube_info']['frm_list'])
frm_id = feed_dict['tube_info'] ['frm_list'][-1] + (args.frame_offset*pred_offset+1)
img = copy.deepcopy(bg)
for tube_id in range(box_ftr.shape[0]):
tmp_box = box_ftr[tube_id][pred_offset]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
y2 = y +h
x2 = x +w
if w<=0 or h<=0:
continue
if x>1:
continue
if y>1:
continue
if x2 <=0:
continue
if y2 <=0:
continue
if x<0:
x=0
if y<0:
y=0
if x2>1:
x2=1
if y2>1:
y2=1
patch_resize = cv2.resize(padding_patch_list[tube_id], (max(1, int(x2*W) - int(x*W)), max(1, int(y2*H) - int(y*H))) )
img[int(y*H):int(y2*H), int(x*W):int(x2*W)] = patch_resize
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if store_img:
cv2.imwrite(os.path.join( img_folder, '%s_%d.png' % (filename, i)), img.astype(np.uint8))
else:
frm_id = feed_dict['tube_info']['frm_list'][i]
img_full_path = os.path.join(img_full_folder, 'video_'+str(scene_idx).zfill(5), str(frm_id+1)+'.png')
img_rgb = cv2.imread(img_full_path)
#for tube_id in range(len(feed_dict['tube_info']['box_seq']['tubes'])):
img = copy.deepcopy(bg)
for tube_id in range(box_ftr.shape[0]):
tmp_box = feed_dict['tube_info']['box_seq']['tubes'][tube_id][frm_id]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
img_patch = img_rgb[int(y*H):int(y*H + h*H) , int(x*W): int(x*W + w*W)]
hh, ww, c = img_patch.shape
if hh*ww*c==0:
img_patch = np.zeros((24, 24, 3), dtype=np.float32)
tmp_box = box_ftr[tube_id][i]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
y2 = y +h
x2 = x +w
if w<=0 or h<=0:
continue
if x>1:
continue
if y>1:
continue
if x2 <=0:
continue
if y2 <=0:
continue
if x<0:
x=0
if y<0:
y=0
if x2>1:
x2=1
if y2>1:
y2=1
patch_resize = cv2.resize(img_patch, (max(int(x2*W) - int(x*W), 1), max(int(y2*H) - int(y*H), 1)))
img[int(y*H):int(y2*H), int(x*W):int(x2*W)] = patch_resize
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if store_img:
cv2.imwrite(os.path.join( img_folder, '%s_%d_%d.png' % (filename, i, int(whatif_id))), img.astype(np.uint8))
out.write(img)
def visualize_prediction(box_ftr, feed_dict, whatif_id=-1, store_img=False, args=None):
# print('states', states.shape)
# print('actions', actions.shape)
# print(filename)
# print(actions[:, 0, :])
# print(states[:20, 0, :])
base_folder = os.path.basename(args.load).split('.')[0]
filename = str(feed_dict['meta_ann']['scene_index'])
videoname = 'dumps/'+ base_folder + '/' + filename + '_' + str(int(whatif_id)) +'.avi'
#videoname = filename + '.mp4'
if store_img:
img_folder = 'dumps/'+base_folder +'/'+filename
os.system('mkdir -p ' + img_folder)
background_fn = '../temporal_reasoning-master/background.png'
if not os.path.isfile(background_fn):
background_fn = '../temporal_reasoningv2/background.png'
bg = cv2.imread(background_fn)
H, W, C = bg.shape
bg = cv2.resize(bg, (W, H), interpolation=cv2.INTER_AREA)
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
out = cv2.VideoWriter(videoname, fourcc, 3, (W, H))
scene_idx = feed_dict['meta_ann']['scene_index']
sub_idx = int(scene_idx/1000)
sub_img_folder = 'image_'+str(sub_idx).zfill(2)+'000-'+str(sub_idx+1).zfill(2)+'000'
img_full_folder = os.path.join(args.frm_img_path, sub_img_folder)
if whatif_id == -1:
n_frame = len(feed_dict['tube_info']['frm_list']) + box_ftr.shape[1]
else:
n_frame = min(box_ftr.shape[1], len(feed_dict['tube_info']['frm_list']))
padding_patch_list = []
for i in range(n_frame):
if whatif_id==-1:
if i < len(feed_dict['tube_info']['frm_list']):
frm_id = feed_dict['tube_info']['frm_list'][i]
img_full_path = os.path.join(img_full_folder, 'video_'+str(scene_idx).zfill(5), str(frm_id+1)+'.png')
img = cv2.imread(img_full_path)
for tube_id in range(len(feed_dict['tube_info']['box_seq']['tubes'])):
tmp_box = feed_dict['tube_info']['box_seq']['tubes'][tube_id][frm_id]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (36,255,12), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if i==len(feed_dict['tube_info']['frm_list'])-1:
padding_patch = img[int(h*H):int(y*H+h*H),int(x*W):int(W*x+w*W)]
hh, ww, c = padding_patch.shape
if hh*ww*c==0:
padding_patch = np.zeros((24, 24, 3), dtype=np.float32)
padding_patch_list.append(padding_patch)
else:
pred_offset = i - len(feed_dict['tube_info']['frm_list'])
frm_id = feed_dict['tube_info'] ['frm_list'][-1] + (args.frame_offset*pred_offset+1)
img = copy.deepcopy(bg)
for tube_id in range(box_ftr.shape[0]):
tmp_box = box_ftr[tube_id][pred_offset]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
y2 = y +h
x2 = x +w
if w<=0 or h<=0:
continue
if x>1:
continue
if y>1:
continue
if x2 <=0:
continue
if y2 <=0:
continue
if x<0:
x=0
if y<0:
y=0
if x2>1:
x2=1
if y2>1:
y2=1
patch_resize = cv2.resize(padding_patch_list[tube_id], (max(1, int(x2*W) - int(x*W)), max(1, int(y2*H) - int(y*H))) )
img[int(y*H):int(y2*H), int(x*W):int(x2*W)] = patch_resize
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if store_img:
cv2.imwrite(os.path.join( img_folder, '%s_%d.png' % (filename, i)), img.astype(np.uint8))
else:
frm_id = feed_dict['tube_info']['frm_list'][i]
img_full_path = os.path.join(img_full_folder, 'video_'+str(scene_idx).zfill(5), str(frm_id+1)+'.png')
img_rgb = cv2.imread(img_full_path)
#for tube_id in range(len(feed_dict['tube_info']['box_seq']['tubes'])):
img = copy.deepcopy(bg)
for tube_id in range(box_ftr.shape[0]):
tmp_box = feed_dict['tube_info']['box_seq']['tubes'][tube_id][frm_id]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
img_patch = img_rgb[int(y*H):int(y*H + h*H) , int(x*W): int(x*W + w*W)]
hh, ww, c = img_patch.shape
if hh*ww*c==0:
img_patch = np.zeros((24, 24, 3), dtype=np.float32)
tmp_box = box_ftr[tube_id][i]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
y2 = y +h
x2 = x +w
if w<=0 or h<=0:
continue
if x>1:
continue
if y>1:
continue
if x2 <=0:
continue
if y2 <=0:
continue
if x<0:
x=0
if y<0:
y=0
if x2>1:
x2=1
if y2>1:
y2=1
patch_resize = cv2.resize(img_patch, (max(int(x2*W) - int(x*W), 1), max(int(y2*H) - int(y*H), 1)))
img[int(y*H):int(y2*H), int(x*W):int(x2*W)] = patch_resize
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if store_img:
cv2.imwrite(os.path.join( img_folder, '%s_%d_%d.png' % (filename, i, int(whatif_id))), img.astype(np.uint8))
def prepare_data_for_testing(output_dict_list, feed_dict_list, json_output_list):
for vid, output_answer_list in enumerate(output_dict_list['answer']):
vid_id = feed_dict_list[vid]['meta_ann']['scene_index']
tmp_vid_dict = {'scene_index': vid_id, 'questions': []}
for q_id, q_info in enumerate(output_answer_list):
tmp_ques_ann = feed_dict_list[vid]['meta_ann']['questions'][q_id]
question_id = tmp_ques_ann['question_id']
tmp_q_dict = {'question_id': question_id}
ques_type =feed_dict_list[vid]['question_type'][q_id]
response_query_type = gdef.qtype2atype_dict[ques_type]
ori_answer = q_info[-1]
if response_query_type== 'integer':
ans = int(ori_answer)
elif response_query_type == 'bool':
if isinstance(ori_answer, list):
tmp_choice_list = []
for idx in range(len(ori_answer)):
tmp_choice = {'choice_id': idx}
if ori_answer[idx] > 0:
tmp_choice['answer'] = 'correct'
else:
tmp_choice['answer'] = 'wrong'
tmp_choice_list.append(tmp_choice)
else:
ans = 'yes' if ori_answer>=0 else 'no'
elif response_query_type == 'word':
a, word2idx = ori_answer
argmax = a.argmax(dim=-1).item()
idx2word = {v: k for k, v in word2idx.items()}
ans = idx2word[argmax]
if isinstance(ori_answer, list):
tmp_q_dict['choices'] = tmp_choice_list
else:
tmp_q_dict['answer'] = str(ans)
tmp_vid_dict['questions'].append(tmp_q_dict)
json_output_list.append(tmp_vid_dict)
def _norm(x, dim=-1):
return x / (x.norm(2, dim=dim, keepdim=True)+1e-7)
def normalize(x, mean, std):
return (x - mean) / std
def prepare_spatial_only_prediction_input(feed_dict, f_sng, args, p_id=0):
""""
attr: obj_num, attr_dim, 1, 1 (None)
x: obj_num, state_dim*(n_his+1)
rel: return from prepare_relations
label_obj: obj_num, state_dim, 1 , 1
label_rel: obj_num * obj_num, rela_dim, 1, 1
"""""
x_step = args.n_his +1
st_id = p_id
ed_id = p_id + x_step
if ed_id >len(feed_dict['tube_info']['frm_list']):
return None
first_frm_id_list = [frm_id for frm_id in feed_dict['tube_info']['frm_list'][st_id:ed_id]]
obj_num, ftr_t_dim = f_sng[3].shape
ftr_dim = f_sng[1].shape[-1]
box_dim = 4
t_dim = ftr_t_dim//box_dim
spatial_seq = f_sng[3].view(obj_num, t_dim, box_dim)
tmp_box_list = [spatial_seq[:, frm_id] for frm_id in first_frm_id_list]
x_box = torch.stack(tmp_box_list, dim=1).contiguous().view(obj_num, args.n_his+1, box_dim)
#x_ftr = f_sng[0][:, st_id:ed_id] .view(obj_num, x_step, ftr_dim)
#x = torch.cat([x_box, x_ftr], dim=2).view(obj_num, x_step*(ftr_dim+box_dim), 1, 1).contiguous()
# obj_num*obj_num, box_dim*total_step, 1, 1
spatial_rela = extract_spatial_relations_only_v5(x_box.view(obj_num, x_step, box_dim), args)
#ftr_rela = f_sng[2][:, :, st_id:ed_id].view(obj_num*obj_num, x_step*ftr_dim, 1, 1)
#rela = torch.cat([spatial_rela, ftr_rela], dim=1)
rel = prepare_relations(obj_num)
for idx in range(len(rel)-2):
rel[idx] = rel[idx].to(x_box.device)
rel.append(spatial_rela)
attr = None
node_r_idx, node_s_idx, Ra = rel[3], rel[4], rel[5]
Rr_idx, Rs_idx, value = rel[0], rel[1], rel[2]
Rr = torch.sparse.FloatTensor(
Rr_idx, value, torch.Size([node_r_idx.shape[0], value.size(0)])).to(spatial_rela.device)
Rs = torch.sparse.FloatTensor(
Rs_idx, value, torch.Size([node_s_idx.shape[0], value.size(0)])).to(spatial_rela.device)
# preparing patch coordinates and preparing spatial relations
#ret_mean = torch.FloatTensor(np.array([ 1/ 2.])).cuda().to(x_box.device)
#ret_mean = ret_mean.unsqueeze(1).unsqueeze(1)
ret_mean = 0.5
ret_std = ret_mean
x_box_norm = normalize(x_box, ret_mean, ret_std)
x = x_box_norm.unsqueeze(3).unsqueeze(4).expand(obj_num, x_step, box_dim, args.bbox_size, args.bbox_size)
return attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx
def prepare_normal_prediction_input(feed_dict, f_sng, args, p_id=0, semantic_only_flag=False):
""""
attr: obj_num, attr_dim, 1, 1 (None)
x: obj_num, state_dim*(n_his+1)
rel: return from prepare_relations
label_obj: obj_num, state_dim, 1 , 1
label_rel: obj_num * obj_num, rela_dim, 1, 1
"""""
x_step = args.n_his +1
st_id = p_id
ed_id = p_id + x_step
if ed_id >len(feed_dict['tube_info']['frm_list']):
return None
first_frm_id_list = [frm_id for frm_id in feed_dict['tube_info']['frm_list'][st_id:ed_id]]
obj_num, ftr_t_dim = f_sng[3].shape
ftr_dim = f_sng[1].shape[-1]
box_dim = 4
t_dim = ftr_t_dim//box_dim
spatial_seq = f_sng[3].view(obj_num, t_dim, box_dim)
tmp_box_list = [spatial_seq[:, frm_id] for frm_id in first_frm_id_list]
x_box = torch.stack(tmp_box_list, dim=1).contiguous().view(obj_num, args.n_his+1, box_dim)
x_ftr = f_sng[0][:, st_id:ed_id] .view(obj_num, x_step, ftr_dim)
x = torch.cat([x_box, x_ftr], dim=2).view(obj_num, x_step*(ftr_dim+box_dim), 1, 1).contiguous()
if not semantic_only_flag:
# obj_num*obj_num, box_dim*total_step, 1, 1
spatial_rela = extract_spatial_relations(x_box.view(obj_num, x_step, box_dim), args)
else:
spatial_rela = extract_spatial_relations_only_v5(x_box.view(obj_num, x_step, box_dim), args, semantic_only_flag=True)
ftr_rela = f_sng[2][:, :, st_id:ed_id].view(obj_num*obj_num, x_step*ftr_dim, 1, 1)
rela = torch.cat([spatial_rela, ftr_rela], dim=1)
rel = prepare_relations(obj_num)
for idx in range(len(rel)-2):
rel[idx] = rel[idx].to(ftr_rela.device)
rel.append(rela)
attr = None
node_r_idx, node_s_idx, Ra = rel[3], rel[4], rel[5]
Rr_idx, Rs_idx, value = rel[0], rel[1], rel[2]
Rr = torch.sparse.FloatTensor(
Rr_idx, value, torch.Size([node_r_idx.shape[0], value.size(0)])).to(ftr_rela.device)
Rs = torch.sparse.FloatTensor(
Rs_idx, value, torch.Size([node_s_idx.shape[0], value.size(0)])).to(ftr_rela.device)
return attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx
def prepare_future_prediction_input(feed_dict, f_sng, args):
""""
attr: obj_num, attr_dim, 1, 1 (None)
x: obj_num, state_dim*(n_his+1)
rel: return from prepare_relations
label_obj: obj_num, state_dim, 1 , 1
label_rel: obj_num * obj_num, rela_dim, 1, 1
"""""
x_step = args.n_his +1
last_frm_id_list = [frm_id for frm_id in feed_dict['tube_info']['frm_list'][-args.n_his-1:]]
obj_num, ftr_t_dim = f_sng[3].shape
ftr_dim = f_sng[1].shape[-1]
box_dim = 4
t_dim = ftr_t_dim//box_dim
spatial_seq = f_sng[3].view(obj_num, t_dim, box_dim)
tmp_box_list = [spatial_seq[:, frm_id] for frm_id in last_frm_id_list]
x_box = torch.stack(tmp_box_list, dim=1).contiguous().view(obj_num, args.n_his+1, box_dim)
x_ftr = f_sng[0][:, -x_step:] .view(obj_num, x_step, ftr_dim)
x = torch.cat([x_box, x_ftr], dim=2).view(obj_num, x_step*(ftr_dim+box_dim), 1, 1).contiguous()
# obj_num*obj_num, box_dim*total_step, 1, 1
spatial_rela = extract_spatial_relations(x_box.view(obj_num, x_step, box_dim), args)
ftr_rela = f_sng[2][:, :, -x_step:].view(obj_num*obj_num, x_step*ftr_dim, 1, 1)
rela = torch.cat([spatial_rela, ftr_rela], dim=1)
rel = prepare_relations(obj_num)
for idx in range(len(rel)-2):
rel[idx] = rel[idx].to(ftr_rela.device)
rel.append(rela)
attr = None
node_r_idx, node_s_idx, Ra = rel[3], rel[4], rel[5]
Rr_idx, Rs_idx, value = rel[0], rel[1], rel[2]
Rr = torch.sparse.FloatTensor(
Rr_idx, value, torch.Size([node_r_idx.shape[0], value.size(0)])).to(ftr_rela.device)
Rs = torch.sparse.FloatTensor(
Rs_idx, value, torch.Size([node_s_idx.shape[0], value.size(0)])).to(ftr_rela.device)
return attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx
def prepare_counterfact_prediction_input(feed_dict, f_sng, args):
""""
attr: obj_num, attr_dim, 1, 1 (None)
x: obj_num, state_dim*(n_his+1)
rel: return from prepare_relations
label_obj: obj_num, state_dim, 1 , 1
label_rel: obj_num * obj_num, rela_dim, 1, 1
"""""
x_step = args.n_his +1
first_id_list = [frm_id for frm_id in feed_dict['tube_info']['frm_list'][:x_step]]
obj_num, ftr_t_dim = f_sng[3].shape
ftr_dim = f_sng[1].shape[-1]
box_dim = 4
t_dim = ftr_t_dim//box_dim
spatial_seq = f_sng[3].view(obj_num, t_dim, box_dim)
tmp_box_list = [spatial_seq[:, frm_id].clone() for frm_id in first_id_list]
x_box = torch.stack(tmp_box_list, dim=1).contiguous().view(obj_num, x_step, box_dim)
x_ftr = f_sng[0][:, :x_step].view(obj_num, x_step, ftr_dim).clone()
x = torch.cat([x_box, x_ftr], dim=2).view(obj_num, x_step*(ftr_dim+box_dim), 1, 1).contiguous()
# obj_num*obj_num, box_dim*total_step, 1, 1
spatial_rela = extract_spatial_relations(x_box.view(obj_num, x_step, box_dim))
ftr_rela = f_sng[2][:, :, :x_step].view(obj_num*obj_num, x_step*ftr_dim, 1, 1)
rela = torch.cat([spatial_rela, ftr_rela], dim=1)
rel = prepare_relations(obj_num)
for idx in range(len(rel)-2):
rel[idx] = rel[idx].to(ftr_rela.device)
rel.append(rela)
attr = None
node_r_idx, node_s_idx, Ra = rel[3], rel[4], rel[5]
Rr_idx, Rs_idx, value = rel[0], rel[1], rel[2]
Rr = torch.sparse.FloatTensor(
Rr_idx, value, torch.Size([node_r_idx.shape[0], value.size(0)])).to(ftr_rela.device)
Rs = torch.sparse.FloatTensor(
Rs_idx, value, torch.Size([node_s_idx.shape[0], value.size(0)])).to(ftr_rela.device)
return attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx
def prepare_relations(n):
node_r_idx = np.arange(n)
node_s_idx = np.arange(n)
rel = np.zeros((n**2, 2))
rel[:, 0] = np.repeat(np.arange(n), n)
rel[:, 1] = np.tile(np.arange(n), n)
n_rel = rel.shape[0]
Rr_idx = torch.LongTensor([rel[:, 0], np.arange(n_rel)])
Rs_idx = torch.LongTensor([rel[:, 1], np.arange(n_rel)])
value = torch.FloatTensor([1] * n_rel)
rel = [Rr_idx, Rs_idx, value, node_r_idx, node_s_idx]
return rel
def extract_spatial_relations_only_v5(feats, args=None, semantic_only_flag=False):
"""
Extract spatial relations
"""
### prepare relation attributes
n_objects, t_frame, box_dim = feats.shape
feats = feats.view(n_objects, t_frame*box_dim, 1, 1)
n_relations = n_objects * n_objects
relation_dim = 3
state_dim = box_dim
if semantic_only_flag:
Ra = torch.ones([n_relations, relation_dim *t_frame, 1, 1], device=feats.device) * -0.5
else:
Ra = torch.ones([n_relations, relation_dim *t_frame, args.bbox_size, args.bbox_size], device=feats.device) * -0.5
#change to relative position
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 1::relation_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 2::relation_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
return Ra
def extract_spatial_relations(feats, args=None):
"""
Extract spatial relations
"""
### prepare relation attributes
n_objects, t_frame, box_dim = feats.shape
feats = feats.view(n_objects, t_frame*box_dim, 1, 1)
n_relations = n_objects * n_objects
if args is None or args.add_rela_dist_mode ==0:
relation_dim = box_dim
elif args.add_rela_dist_mode==1 or args.add_rela_dist_mode==2:
relation_dim = box_dim + 1
else:
raise NotImplementedError
state_dim = box_dim
Ra = torch.ones([n_relations, relation_dim *t_frame, 1, 1], device=feats.device) * -0.5
#change to relative position
# relation_dim = self.args.relation_dim
# state_dim = self.args.state_dim
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 0::relation_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 1::relation_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[idx, 2::relation_dim] = feats[i, 2::state_dim] - feats[j, 2::state_dim] # h
Ra[idx, 3::relation_dim] = feats[i, 3::state_dim] - feats[j, 3::state_dim] # w
if args is not None and (args.add_rela_dist_mode==1 or args.add_rela_dist_mode==2):
Ra_x = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra_y = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra_dist = torch.sqrt(Ra_x**2+Ra_y**2) #+0.0000000001)
Ra[idx, 4::relation_dim] = Ra_dist
return Ra
def predict_counterfact_features_v2(model, feed_dict, f_sng, args, counter_fact_id):
data = prepare_counterfact_prediction_input(feed_dict, f_sng, args)
#x: obj_num, state_dim*(n_his+1)
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
n_objects_ori = x.shape[0]
for i in range(n_objects_ori):
for j in range(n_objects_ori):
idx = i * n_objects_ori + j
if i==counter_fact_id or j==counter_fact_id:
Ra[idx] = 0.0
x[counter_fact_id] = 0.0
pred_obj_list = []
pred_rel_spatial_list = []
pred_rel_ftr_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
Ra_spatial = Ra[:, :box_dim*x_step]
Ra_ftr = Ra[:, box_dim*x_step:]
for t_step in range(args.n_his+1):
pred_obj_list.append(x[:,t_step*args.state_dim:(t_step+1)*args.state_dim])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*box_dim:(t_step+1)*box_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
relation_dim = args.relation_dim
state_dim = args.state_dim
box_dim = 4
for p_id, frm_id in enumerate(range(0, args.n_seen_frames, args.frame_offset)):
x = torch.cat(pred_obj_list[p_id:p_id+x_step], dim=1)
Ra_spatial = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
Ra_ftr = torch.cat(pred_rel_ftr_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat([Ra_spatial, Ra_ftr], dim=1)
valid_object_id_list = check_valid_object_id_list(x, args)
if counter_fact_id in valid_object_id_list:
counter_idx = valid_object_id_list.index(counter_fact_id)
del valid_object_id_list[counter_idx]
if len(valid_object_id_list) == 0:
break
data_valid = prepare_valid_input(x, Ra, valid_object_id_list, args)
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
n_objects = x.shape[0]
feats = x
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 0::relation_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 1::relation_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[idx, 2::relation_dim] = feats[i, 2::state_dim] - feats[j, 2::state_dim] # h
Ra[idx, 3::relation_dim] = feats[i, 3::state_dim] - feats[j, 3::state_dim] # w
pred_obj_valid, pred_rel_valid = model._model_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj = torch.zeros(n_objects_ori, state_dim, 1, 1, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = pred_obj_valid[valid_id]
pred_obj[ori_id, box_dim:] = _norm(pred_obj_valid[valid_id, box_dim:], dim=0)
pred_rel_ftr = torch.zeros(n_objects_ori*n_objects_ori, ftr_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, box_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial[:, 0] = -1
pred_rel_spatial[:, 1] = -1
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_ftr[ori_idx] = _norm(pred_rel_valid[valid_idx, box_dim:], dim=0)
pred_obj_list.append(pred_obj)
pred_rel_ftr_list.append(pred_rel_ftr.view(n_objects_ori*n_objects_ori, ftr_dim, 1, 1))
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, box_dim, 1, 1))
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_list)
ftr_dim = f_sng[1].shape[1]
box_dim = 4
box_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, :box_dim].contiguous().view(n_objects_ori, pred_frm_num, box_dim)
if args.visualize_flag:
visualize_prediction_v2(box_ftr, feed_dict, whatif_id=counter_fact_id, store_img=True, args=args)
rel_ftr_exp = torch.stack(pred_rel_ftr_list[-pred_frm_num:], dim=1).view(n_objects_ori, n_objects_ori, pred_frm_num, ftr_dim)
return None, None, rel_ftr_exp, box_ftr.view(n_objects_ori, -1)
def predict_counterfact_features(model, feed_dict, f_sng, args, counter_fact_id):
data = prepare_counterfact_prediction_input(feed_dict, f_sng, args)
#x: obj_num, state_dim*(n_his+1)
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_obj_list = []
pred_rel_list = []
for t_step in range(args.n_his+1):
pred_obj_list.append(x[:,t_step*args.state_dim:(t_step+1)*args.state_dim])
pred_rel_list.append(Ra[:,t_step*args.relation_dim:(t_step+1)*args.relation_dim])
n_objects = x.shape[0]
relation_dim = args.relation_dim
state_dim = args.state_dim
for p_id, frm_id in enumerate(range(0, args.n_seen_frames, args.frame_offset)):
x = torch.cat(pred_obj_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat(pred_rel_list[p_id:p_id+x_step], dim=1)
feats = x
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 0::relation_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 1::relation_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[idx, 2::relation_dim] = feats[i, 2::state_dim] - feats[j, 2::state_dim] # h
Ra[idx, 3::relation_dim] = feats[i, 3::state_dim] - feats[j, 3::state_dim] # w
# masking out counter_fact_id
x[counter_fact_id] = -1.0
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
if i==counter_fact_id or j==counter_fact_id:
Ra[idx] = -1.0
pred_obj, pred_rel = model._model_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj_list.append(pred_obj)
pred_rel_list.append(pred_rel.view(n_objects*n_objects, relation_dim, 1, 1))
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_list)
ftr_dim = f_sng[1].shape[1]
box_dim = 4
box_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, :box_dim].contiguous().view(n_objects, pred_frm_num, box_dim)
rel_ftr_exp = torch.stack(pred_rel_list[-pred_frm_num:], dim=1)[:, :, box_dim:].contiguous().view(n_objects, n_objects, pred_frm_num, ftr_dim)
return None, None, rel_ftr_exp, box_ftr.view(n_objects, -1)
def predict_future_feature(model, feed_dict, f_sng, args):
data = prepare_future_prediction_input(feed_dict, f_sng, args)
#x: obj_num, state_dim*(n_his+1)
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_rel_spatial_list = []
pred_rel_ftr_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
Ra_spatial = Ra[:, :box_dim*x_step]
Ra_ftr = Ra[:, box_dim*x_step:]
for t_step in range(args.n_his+1):
pred_obj_list.append(x[:,t_step*args.state_dim:(t_step+1)*args.state_dim])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*box_dim:(t_step+1)*box_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
n_objects = x.shape[0]
relation_dim = args.relation_dim
state_dim = args.state_dim
for p_id in range(args.pred_frm_num):
x = torch.cat(pred_obj_list[p_id:p_id+x_step], dim=1)
pred_rel_spatial_list.append(Ra_spatial[:, t_step*box_dim:(t_step+1)*box_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
feats = x
Ra_spatial = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
Ra_ftr = torch.cat(pred_rel_ftr_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat([Ra_spatial, Ra_ftr], dim=1)
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 0::relation_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 1::relation_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[idx, 2::relation_dim] = feats[i, 2::state_dim] - feats[j, 2::state_dim] # h
Ra[idx, 3::relation_dim] = feats[i, 3::state_dim] - feats[j, 3::state_dim] # w
pred_obj, pred_rel = model._model_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj_list.append(pred_obj)
pred_rel_spatial_list.append(pred_rel.view(n_objects*n_objects, relation_dim, 1, 1)[:, :box_dim])
pred_rel_ftr_list.append(pred_rel.view(n_objects*n_objects, relation_dim, 1, 1)[:, box_dim:])
#make the output consitent with video scene graph
pred_frm_num = args.pred_frm_num
ftr_dim = f_sng[1].shape[1]
box_dim = 4
box_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, :box_dim].contiguous().view(n_objects, pred_frm_num, box_dim)
rel_ftr_exp = torch.stack(pred_rel_ftr_list[:pred_frm_num], dim=1).view(n_objects, n_objects, pred_frm_num, ftr_dim)
return None, None, rel_ftr_exp, box_ftr.view(n_objects, -1)
def predict_future_feature_v2(model, feed_dict, f_sng, args):
data = prepare_future_prediction_input(feed_dict, f_sng, args)
#x: obj_num, state_dim*(n_his+1)
#print('BUGs')
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_obj_list = []
pred_rel_spatial_list = []
pred_rel_ftr_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
rela_spa_dim = args.rela_spatial_dim
rela_ftr_dim = args.rela_ftr_dim
Ra_spatial = Ra[:, :rela_spa_dim*x_step]
Ra_ftr = Ra[:, rela_spa_dim*x_step:]
for t_step in range(args.n_his+1):
pred_obj_list.append(x[:,t_step*args.state_dim:(t_step+1)*args.state_dim])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*rela_spa_dim:(t_step+1)*rela_spa_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
n_objects_ori = x.shape[0]
relation_dim = args.relation_dim
state_dim = args.state_dim
box_dim = 4
for p_id in range(args.pred_frm_num):
x = torch.cat(pred_obj_list[p_id:p_id+x_step], dim=1)
Ra_spatial = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
Ra_ftr = torch.cat(pred_rel_ftr_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat([Ra_spatial, Ra_ftr], dim=1)
# remove invalid object, object coordinates that has been out of size
valid_object_id_list = check_valid_object_id_list(x, args)
if len(valid_object_id_list) == 0:
break
data_valid = prepare_valid_input(x, Ra, valid_object_id_list, args)
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
n_objects = x.shape[0]
feats = x
invalid_rela_list = []
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 0:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 1:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[idx, 2:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 2::state_dim] - feats[j, 2::state_dim] # h
Ra[idx, 3:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 3::state_dim] - feats[j, 3::state_dim] # w
if args.add_rela_dist_mode==1 or args.add_rela_dist_mode==2:
Ra_x = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra_y = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra_dist = torch.sqrt(Ra_x**2+Ra_y**2+0.0000000001)
Ra[idx, 4:rela_spa_dim*x_step:rela_spa_dim] = Ra_dist
if Ra_dist[-1] > args.rela_dist_thre:
invalid_rela_list.append(idx)
#print(Ra_dist[-1])
if args.add_rela_dist_mode==2:
Rr, Rs = update_valid_rela_input(n_objects, invalid_rela_list, feats, args)
pred_obj_valid, pred_rel_valid = model._model_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj = torch.zeros(n_objects_ori, state_dim, 1, 1, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = pred_obj_valid[valid_id]
pred_obj[ori_id, box_dim:] = _norm(pred_obj_valid[valid_id, box_dim:], dim=0)
pred_rel_ftr = torch.zeros(n_objects_ori*n_objects_ori, ftr_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_ftr[ori_idx] = _norm(pred_rel_valid[valid_idx, rela_spa_dim:], dim=0)
pred_obj_list.append(pred_obj)
pred_rel_ftr_list.append(pred_rel_ftr.view(n_objects_ori*n_objects_ori, ftr_dim, 1, 1))
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, rela_spa_dim, 1, 1))
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_list)
ftr_dim = f_sng[1].shape[1]
box_dim = 4
box_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, :box_dim].contiguous().view(n_objects_ori, pred_frm_num, box_dim)
rel_ftr_exp = torch.stack(pred_rel_ftr_list[-pred_frm_num:], dim=1).view(n_objects_ori, n_objects_ori, pred_frm_num, ftr_dim)
if args.visualize_flag:
visualize_prediction_v2(box_ftr, feed_dict, whatif_id=-1, store_img=True, args=args)
return None, None, rel_ftr_exp, box_ftr.view(n_objects_ori, -1)
def predict_normal_feature(model, feed_dict, f_sng, args):
data = prepare_normal_prediction_input(feed_dict, f_sng, args)
#x: obj_num, state_dim*(n_his+1)
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_obj_list = []
#pred_rel_list = []
pred_rel_spatial_list = []
pred_rel_ftr_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
Ra_spatial = Ra[:, :box_dim*x_step]
Ra_ftr = Ra[:, box_dim*x_step:]
for t_step in range(args.n_his+1):
pred_obj_list.append(x[:,t_step*args.state_dim:(t_step+1)*args.state_dim])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*box_dim:(t_step+1)*box_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
n_objects = x.shape[0]
relation_dim = args.relation_dim
state_dim = args.state_dim
for p_id in range(args.pred_normal_num):
x = torch.cat(pred_obj_list[p_id:p_id+x_step], dim=1)
Ra_spatial = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
Ra_ftr = torch.cat(pred_rel_ftr_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat([Ra_spatial, Ra_ftr], dim=1)
feats = x
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 0::relation_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 1::relation_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[idx, 2::relation_dim] = feats[i, 2::state_dim] - feats[j, 2::state_dim] # h
Ra[idx, 3::relation_dim] = feats[i, 3::state_dim] - feats[j, 3::state_dim] # w
pred_obj, pred_rel = model._model_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj_list.append(pred_obj)
pred_rel_spatial_list.append(pred_rel.view(n_objects*n_objects, relation_dim, 1, 1)[:, :box_dim])
pred_rel_ftr_list.append(pred_rel.view(n_objects*n_objects, relation_dim, 1, 1)[:, box_dim:])
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_list)
box_ftr = torch.stack(pred_obj_list[:pred_frm_num], dim=1)[:, :, :box_dim].contiguous().view(n_objects, pred_frm_num, box_dim)
rel_ftr_exp = torch.stack(pred_rel_ftr_list[:pred_frm_num], dim=1).view(n_objects, n_objects, pred_frm_num, ftr_dim)
obj_ftr = torch.stack(pred_obj_list[:pred_frm_num], dim=1)[:, :, box_dim:].contiguous().view(n_objects, pred_frm_num, ftr_dim)
return obj_ftr, None, rel_ftr_exp, box_ftr.view(n_objects, -1)
def check_valid_object_id_list_spatial(x, args):
valid_object_id_list = []
x_step = args.n_his + 1
box_dim = 4
for obj_id in range(x.shape[0]):
tmp_obj_feat = x[obj_id, :, 0, 0].view(x_step, -1)
obj_valid = True
for tmp_step in range(x_step):
last_obj_box = tmp_obj_feat[tmp_step, :box_dim]
x_c, y_c, w, h = (last_obj_box*0.5) + 0.5
x1 = x_c - w*0.5
y1 = y_c - h*0.5
x2 = x_c + w*0.5
y2 = y_c + h*0.5
if w <=0 or h<=0:
obj_valid = False
elif x2<=0 or y2<=0:
obj_valid = False
elif x1>=1 or y1>=1:
obj_valid = False
if obj_valid:
valid_object_id_list.append(obj_id)
return valid_object_id_list
def check_valid_object_id_list_v2(x, args):
valid_object_id_list = []
x_step = args.n_his + 1
box_dim = 4
for obj_id in range(x.shape[0]):
tmp_obj_feat = x[obj_id, :, 0, 0].view(x_step, -1)
obj_valid = True
for tmp_step in range(x_step):
last_obj_box = tmp_obj_feat[tmp_step, :box_dim]
x_c, y_c, w, h = last_obj_box
x1 = x_c - w*0.5
y1 = y_c - h*0.5
x2 = x_c + w*0.5
y2 = y_c + h*0.5
if w <=0 or h<=0:
obj_valid = False
elif x2<=0 or y2<=0:
obj_valid = False
elif x1>=1 or y1>=1:
obj_valid = False
if obj_valid:
valid_object_id_list.append(obj_id)
return valid_object_id_list
def check_valid_object_id_list(x, args):
valid_object_id_list = []
x_step = args.n_his + 1
box_dim = 4
for obj_id in range(x.shape[0]):
tmp_obj_feat = x[obj_id].view(x_step, -1)
last_obj_box = tmp_obj_feat[-1, :box_dim]
x_c, y_c, w, h = last_obj_box
x1 = x_c - w*0.5
y1 = y_c - h*0.5
x2 = x_c + w*0.5
y2 = y_c + h*0.5
obj_valid = True
if w <=0 or h<=0:
obj_valid = False
elif x2<=0 or y2<=0:
obj_valid = False
elif x1>=1 or y1>=1:
obj_valid = False
if obj_valid:
valid_object_id_list.append(obj_id)
return valid_object_id_list
def prepare_valid_input(x, Ra, valid_object_id_list, args, x_spatial=None):
x_valid_list = [x[obj_id] for obj_id in valid_object_id_list]
x_valid = torch.stack(x_valid_list, dim=0)
if x_spatial is not None:
x_spatial_valid_list = [x_spatial[obj_id] for obj_id in valid_object_id_list]
x_spatial_valid = torch.stack(x_spatial_valid_list, dim=0)
valid_obj_num = len(valid_object_id_list)
rel = prepare_relations(valid_obj_num)
for idx in range(len(rel)-2):
rel[idx] = rel[idx].to(x_valid.device)
n_objects = x.shape[0]
ra_valid_list = []
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
if (i in valid_object_id_list) and (j in valid_object_id_list):
ra_valid_list.append(Ra[idx])
Ra_valid = torch.stack(ra_valid_list, dim=0)
rel.append(Ra_valid)
attr = None
node_r_idx, node_s_idx, Ra_valid = rel[3], rel[4], rel[5]
Rr_idx, Rs_idx, value = rel[0], rel[1], rel[2]
Rr = torch.sparse.FloatTensor(
Rr_idx, value, torch.Size([node_r_idx.shape[0], value.size(0)])).to(x_valid.device)
Rs = torch.sparse.FloatTensor(
Rs_idx, value, torch.Size([node_s_idx.shape[0], value.size(0)])).to(x_valid.device)
if x_spatial is None:
return attr, x_valid, Rr, Rs, Ra_valid, node_r_idx, node_s_idx
else:
return attr, x_valid, x_spatial, Rr, Rs, Ra_valid, node_r_idx, node_s_idx
def update_valid_rela_input(n_objects, invalid_rela_list, feats, args):
rel = prepare_relations(n_objects)
for idx in range(len(rel)-2):
rel[idx] = rel[idx].to(feats.device)
n_rel = n_objects * n_objects
Rr_idx, Rs_idx, value = rel[0], rel[1], rel[2]
Rr_idx_list = []
Rs_idx_list = []
value_list = []
for rel_idx in range(n_rel):
if rel_idx in invalid_rela_list:
continue
Rr_idx_list.append(Rr_idx[:, rel_idx])
Rs_idx_list.append(Rs_idx[:, rel_idx])
value_list.append(value[rel_idx])
Rr_idx_new = torch.stack(Rr_idx_list, dim=1)
Rs_idx_new = torch.stack(Rs_idx_list, dim=1)
value_new = torch.stack(value_list, dim=0)
Rr_new = torch.sparse.FloatTensor(
Rr_idx_new, value_new, torch.Size([n_objects, value.size(0)])).to(value.device)
Rs_new = torch.sparse.FloatTensor(
Rs_idx_new, value_new, torch.Size([n_objects, value.size(0)])).to(value.device)
return Rr_new, Rs_new
def predict_normal_feature_v3(model, feed_dict, f_sng, args):
pred_obj_list = []
pred_rel_spatial_list = []
pred_rel_ftr_list = []
x_step = args.n_his + 1
box_dim = 4
ftr_dim = f_sng[1].shape[1]
pred_rel_spatial_gt_list = []
relation_dim = args.relation_dim
state_dim = args.state_dim
valid_object_id_stack = []
rela_spa_dim = args.rela_spatial_dim
rela_ftr_dim = args.rela_ftr_dim
for p_id in range(args.pred_normal_num):
data = prepare_normal_prediction_input(feed_dict, f_sng, args, p_id)
if data is None:
break
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
n_objects_ori = x.shape[0]
#if p_id ==0 and args.visualize_flag:
if p_id ==0:
Ra_spatial = Ra[:, :rela_spa_dim*x_step]
Ra_ftr = Ra[:, rela_spa_dim*x_step:]
assert Ra.shape[1]==(rela_spa_dim+rela_ftr_dim)*x_step
for t_step in range(args.n_his+1):
pred_obj_list.append(x[:,t_step*args.state_dim:(t_step+1)*args.state_dim])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*rela_spa_dim:(t_step+1)*rela_spa_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
# remove invalid object, object coordinates that has been out of size
valid_object_id_list = check_valid_object_id_list(x, args)
if len(valid_object_id_list) == 0:
break
valid_object_id_stack.append(valid_object_id_list)
data_valid = prepare_valid_input(x, Ra, valid_object_id_list, args)
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
n_objects = x.shape[0]
feats = x
invalid_rela_list = []
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 0:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 1:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[idx, 2:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 2::state_dim] - feats[j, 2::state_dim] # h
Ra[idx, 3:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 3::state_dim] - feats[j, 3::state_dim] # w
if args.add_rela_dist_mode==1 or args.add_rela_dist_mode==2:
Ra_x = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra_y = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra_dist = torch.sqrt(Ra_x**2+Ra_y**2) #+0.0000000001)
Ra[idx, 4:rela_spa_dim*x_step:rela_spa_dim] = Ra_dist
if Ra_dist[-1] > args.rela_dist_thre:
invalid_rela_list.append(idx)
#print(Ra_dist[-1])
if args.add_rela_dist_mode==2:
Rr, Rs = update_valid_rela_input(n_objects, invalid_rela_list, feats, args)
# update gt spatial relations
pred_rel_spatial_gt = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=Ra.dtype, \
device=Ra.device) #- 1.0
pred_rel_spatial_gt[:, 0] = -1
pred_rel_spatial_gt[:, 1] = -1
pred_rel_spatial_gt_valid = Ra[:, (x_step-1)*rela_spa_dim:x_step*rela_spa_dim].squeeze(3).squeeze(2)
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_spatial_gt[ori_idx] = pred_rel_spatial_gt_valid[valid_idx]
pred_rel_spatial_gt_list.append(pred_rel_spatial_gt)
# normalize data
pred_obj_valid, pred_rel_valid = model._model_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj = torch.zeros(n_objects_ori, state_dim, 1, 1, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = pred_obj_valid[valid_id]
pred_obj[ori_id, box_dim:] = _norm(pred_obj_valid[valid_id, box_dim:], dim=0)
pred_rel_ftr = torch.zeros(n_objects_ori*n_objects_ori, ftr_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial[:, 0] = -1
pred_rel_spatial[:, 1] = -1
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_ftr[ori_idx] = _norm(pred_rel_valid[valid_idx, rela_spa_dim:], dim=0)
pred_rel_spatial[ori_idx] = pred_rel_valid[valid_idx, :rela_spa_dim]
pred_obj_list.append(pred_obj)
pred_rel_ftr_list.append(pred_rel_ftr.view(n_objects_ori*n_objects_ori, ftr_dim, 1, 1))
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, rela_spa_dim, 1, 1)) # just padding
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_list)
box_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, :box_dim].contiguous().view(n_objects_ori, pred_frm_num, box_dim)
rel_ftr_exp = torch.stack(pred_rel_ftr_list[-pred_frm_num:], dim=1).view(n_objects_ori, n_objects_ori, pred_frm_num, ftr_dim)
obj_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, box_dim:].contiguous().view(n_objects_ori, pred_frm_num, ftr_dim)
if args.visualize_flag:
visualize_prediction_v2(box_ftr, feed_dict, whatif_id=100, store_img=True, args=args)
return obj_ftr, None, rel_ftr_exp, box_ftr.view(n_objects_ori, -1), valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list
def update_new_appear_objects(x, Ra, feed_dict, f_sng, args, p_id, object_appear_id_list, spatial_only=False, semantic_only_flag=False, x_spatial=None):
n_obj = x.shape[0]
assert not (spatial_only and semantic_only_flag)
#assert (semantic_only_flag and x_spatial is None)
if spatial_only:
data_v3 = prepare_spatial_only_prediction_input(feed_dict, f_sng, args, p_id)
attr_v3, x_v3, Rr_v3, Rs_v3, Ra_v3, node_r_idx_v3, node_s_idx_v3 = data_v3
valid_obj_id_list = check_valid_object_id_list_spatial(x_v3, args)
patch_size = x.shape[2]
x_v3 = x_v3.view(n_obj, -1, patch_size, patch_size)
else:
if x_spatial is not None:
valid_obj_id_list = check_valid_object_id_list_v2(x_spatial, args)
else:
valid_obj_id_list = check_valid_object_id_list_v2(x, args)
data_v3 = prepare_normal_prediction_input(feed_dict, f_sng, args, p_id, semantic_only_flag=semantic_only_flag)
attr_v3, x_v3, Rr_v3, Rs_v3, Ra_v3, node_r_idx_v3, node_s_idx_v3 = data_v3
if semantic_only_flag:
box_dim = 4
ftr_dim = f_sng[1].shape[1]
x_step = args.n_his + 1
x_v3 = x_v3.view(n_obj, x_step, ftr_dim+box_dim)
x_spatial_v3 = x_v3[:, :, :box_dim].contiguous().view(n_obj, x_step*box_dim, 1, 1)
x_v3 = x_v3[:, :, box_dim:].contiguous().view(n_obj, x_step*ftr_dim, 1, 1)
new_valid_id_list = []
for new_id in valid_obj_id_list:
if new_id not in object_appear_id_list:
x[new_id] = x_v3[new_id]
if semantic_only_flag:
x_spatial[new_id] = x_spatial_v3[new_id]
for i in range(n_obj):
idx = i * n_obj + new_id
idx2 = new_id * n_obj + i
Ra[idx] = Ra_v3[idx]
Ra[idx] = Ra_v3[idx2]
new_valid_id_list.append(new_id)
if semantic_only_flag:
return x, x_spatial, Ra, new_valid_id_list
else:
return x, Ra, new_valid_id_list
def predict_spatial_feature(model, feed_dict, f_sng, args):
data = prepare_spatial_only_prediction_input(feed_dict, f_sng, args, p_id=0)
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_obj_list = []
pred_rel_spatial_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
rela_spa_dim = Ra.shape[1] // x_step
Ra_spatial = Ra[:, :rela_spa_dim*x_step]
Ra_ftr = Ra[:, rela_spa_dim*x_step:]
valid_object_id_stack = []
for t_step in range(x_step):
pred_obj_list.append(x[:,t_step])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*rela_spa_dim:(t_step+1)*rela_spa_dim])
n_objects_ori = x.shape[0]
relation_dim = rela_spa_dim
state_dim = box_dim
object_appear_id_list = []
pred_rel_spatial_gt_list = []
box_only_flag_bp = args.box_only_flag
args.box_only_flag = 1
for p_id in range(args.pred_normal_num):
if p_id + x_step > len(feed_dict['tube_info']['frm_list']):
break
x = torch.cat(pred_obj_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
# remove invalid object, object coordinates that has been out of size
valid_object_id_list = check_valid_object_id_list_spatial(x, args)
if len(valid_object_id_list) == 0:
break
object_appear_id_list +=valid_object_id_list
#update new appear objects
x, Ra, obj_appear_new_ids = update_new_appear_objects(x, Ra, feed_dict, f_sng, args, p_id, object_appear_id_list, spatial_only=True)
valid_object_id_list = check_valid_object_id_list_spatial(x, args)
#object_appear_id_list +=valid_object_id_list
data_valid = prepare_valid_input(x, Ra, valid_object_id_list, args)
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
n_objects = x.shape[0]
feats = x
invalid_rela_list = []
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 1:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 2:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[:, 0::rela_spa_dim] = -0.5
# padding spatial relation feature
pred_rel_spatial_gt = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, args.bbox_size, args.bbox_size, dtype=Ra.dtype, \
device=Ra.device) - 1.0
# for calculating loss
pred_rel_spatial_gt_valid = Ra[:, (x_step-1)*rela_spa_dim:x_step*rela_spa_dim]
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_spatial_gt[ori_idx] = pred_rel_spatial_gt_valid[valid_idx]
pred_rel_spatial_gt_list.append(pred_rel_spatial_gt)
attr = torch.FloatTensor(n_objects, 3, args.bbox_size, args.bbox_size).cuda().to(x.device)
# normalize data
pred_obj_valid, pred_rel_valid = model._model_spatial_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj_valid += x[:, -state_dim:]
pred_obj = torch.zeros(n_objects_ori, state_dim, args.bbox_size, args.bbox_size, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) - 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = pred_obj_valid[valid_id]
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial[:, 0] = -1
pred_rel_spatial[:, 1] = -1
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_spatial[ori_idx] = pred_rel_valid[valid_idx, :rela_spa_dim]
pred_obj_list.append(pred_obj)
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, rela_spa_dim, \
1, 1).expand(n_objects_ori*n_objects_ori, rela_spa_dim, args.bbox_size, args.bbox_size)) # just padding
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_list)
box_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, :box_dim].contiguous().mean(4).mean(3).view(n_objects_ori, pred_frm_num, box_dim)
spatial_feature = box_ftr*0.5 +0.5
if args.visualize_flag:
visualize_prediction_v2(spatial_feature, feed_dict, whatif_id=100, store_img=True, args=args)
args.box_only_flag = box_only_flag_bp
return spatial_feature
def predict_semantic_feature(model, feed_dict, f_sng, args, spatial_feature):
semantic_only_flag_bp = args.semantic_only_flag
args.semantic_only_flag = 1
data = prepare_normal_prediction_input(feed_dict, f_sng, args, p_id=0, semantic_only_flag=True)
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_rel_spatial_list = []
pred_rel_ftr_list = []
pred_obj_spatial_list = []
pred_obj_ftr_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
rela_spa_dim = args.rela_spatial_dim
rela_ftr_dim = args.rela_ftr_dim
Ra_spatial = Ra[:, :rela_spa_dim*x_step]
Ra_ftr = Ra[:, rela_spa_dim*x_step:]
valid_object_id_stack = []
pred_rel_spatial_gt_list = []
n_objects_ori = x.shape[0]
x_view = x.view(n_objects_ori, x_step, box_dim + ftr_dim, 1, 1)
for t_step in range(args.n_his+1):
#pred_obj_spatial_list.append(x_view[:,t_step, :box_dim])
pred_obj_ftr_list.append(x_view[:,t_step, box_dim:])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*rela_spa_dim:(t_step+1)*rela_spa_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
relation_dim = args.relation_dim
state_dim = args.state_dim
object_appear_id_list = []
obj_num, ftr_t_dim = f_sng[3].shape
ftr_dim = f_sng[1].shape[-1]
t_dim = ftr_t_dim//box_dim
spatial_gt = f_sng[3].view(obj_num, t_dim, box_dim)
for p_id in range(args.pred_normal_num):
if spatial_feature is None:
st_id = p_id
ed_id = st_id + x_step
frm_id_list = feed_dict['tube_info']['frm_list'][st_id:ed_id]
tmp_box_list = [spatial_gt[:, frm_id] for frm_id in frm_id_list]
x_spatial = torch.stack(tmp_box_list, dim=1).contiguous().view(obj_num, x_step * box_dim, 1, 1)
else:
if p_id + x_step >=spatial_feature.shape[1]:
break
x_spatial = spatial_feature[:, p_id:p_id+x_step].view(n_objects_ori, -1, 1, 1)
x_ftr = torch.cat(pred_obj_ftr_list[p_id:p_id+x_step], dim=1)
Ra_spatial = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
Ra_ftr = torch.cat(pred_rel_ftr_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat([Ra_spatial, Ra_ftr], dim=1)
# remove invalid object, object coordinates that has been out of size
valid_object_id_list = check_valid_object_id_list_v2(x_spatial, args)
if len(valid_object_id_list) == 0:
break
object_appear_id_list +=valid_object_id_list
#update new appear objects
x_ftr, x_spatial, Ra, obj_appear_new_ids = update_new_appear_objects(x_ftr, Ra, feed_dict, f_sng, args, p_id, object_appear_id_list, semantic_only_flag=True, x_spatial=x_spatial)
valid_object_id_list = check_valid_object_id_list_v2(x_spatial, args)
data_valid = prepare_valid_input(x_ftr, Ra, valid_object_id_list, args ,x_spatial)
attr, x_ftr, x_spatial, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
valid_object_id_stack.append(valid_object_id_list)
n_objects = x_ftr.shape[0]
feats = x_spatial
invalid_rela_list = []
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 1:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 0::box_dim] - feats[j, 0::box_dim] # x
Ra[idx, 2:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 1::box_dim] - feats[j, 1::box_dim] # y
Ra[:, 0:rela_spa_dim*x_step:rela_spa_dim] = -0.5
# normalize data
pred_obj_valid, pred_rel_valid = model._model_pred(
attr, x_ftr, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj = torch.zeros(n_objects_ori, ftr_dim, 1, 1, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = _norm(pred_obj_valid[valid_id], dim=0)
pred_rel_ftr = torch.zeros(n_objects_ori*n_objects_ori, ftr_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial[:, 0] = -1
pred_rel_spatial[:, 1] = -1
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_ftr[ori_idx] = _norm(pred_rel_valid[valid_idx, rela_spa_dim:], dim=0)
pred_rel_spatial[ori_idx] = pred_rel_valid[valid_idx, :rela_spa_dim]
pred_obj_ftr_list.append(pred_obj)
pred_rel_ftr_list.append(pred_rel_ftr.view(n_objects_ori*n_objects_ori, ftr_dim, 1, 1))
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, rela_spa_dim, 1, 1)) # just padding
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_ftr_list)
rel_ftr_exp = torch.stack(pred_rel_ftr_list[-pred_frm_num:], dim=1).view(n_objects_ori, n_objects_ori, pred_frm_num, ftr_dim)
obj_ftr = torch.stack(pred_obj_ftr_list[-pred_frm_num:], dim=1).contiguous().view(n_objects_ori, pred_frm_num, ftr_dim)
if args.visualize_flag:
# estimate the l2 difference
compare_l2_distance(f_sng, feed_dict, obj_ftr, rel_ftr_exp, valid_object_id_stack, args)
args.semantic_only_flag = semantic_only_flag_bp
return obj_ftr, rel_ftr_exp, valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list
def compare_l2_distance(f_sng, feed_dict, obj_ftr, rel_ftr_exp, valid_object_id_stack, args):
frm_num = obj_ftr.shape[1]
obj_num = obj_ftr.shape[0]
box_dim = 4
gt_list = [f_sng[3].view(obj_num, -1, box_dim)[:, feed_dict['tube_info']['frm_list'][idx]] for idx in range(frm_num) ]
tmp_gt = torch.stack(gt_list, dim=1).view(obj_num, -1, box_dim)
invalid_mask = tmp_gt.sum(dim=2)==-2
for tmp_ftr in [obj_ftr, rel_ftr_exp]:
if len(tmp_ftr.shape)==4:
frm_num = tmp_ftr.shape[2]
tmp_gt = f_sng[2][:, :, :frm_num]
for obj_id in range(invalid_mask.shape[0]):
for frm_id in range(tmp_ftr.shape[2]):
if invalid_mask[obj_id, frm_id]:
tmp_ftr[obj_id, :, frm_id] = 0.0
tmp_ftr[:, obj_id, frm_id] = 0.0
tmp_gt[obj_id, :, frm_id] = 0.0
tmp_gt[:, obj_id, frm_id] = 0.0
# tmp_ftr: (obj_num, obj_num, frm_num , ftr_dim)
for frm_idx, valid_obj_list in enumerate(valid_object_id_stack):
frm_id = args.n_his + 1 + frm_idx
if frm_id >= tmp_ftr.shape[1]:
break
for obj_id in range(obj_num):
if obj_id not in valid_obj_list:
tmp_ftr[obj_id, :, frm_id] = 0.0
tmp_ftr[:, obj_id, frm_id] = 0.0
tmp_gt[obj_id, :, frm_id] = 0.0
tmp_gt[:, obj_id, frm_id] = 0.0
elif len(tmp_ftr.shape)==3:
frm_num = tmp_ftr.shape[1]
tmp_gt = f_sng[0][:,:frm_num]
for obj_id in range(invalid_mask.shape[0]):
for frm_id in range(tmp_ftr.shape[1]):
if invalid_mask[obj_id, frm_id]:
tmp_ftr[obj_id, frm_id] = 0.0
tmp_gt[obj_id, frm_id] = 0.0
for frm_idx, valid_obj_list in enumerate(valid_object_id_stack):
frm_id = args.n_his + 1 + frm_idx
for obj_id in range(obj_num):
if obj_id not in valid_obj_list:
tmp_ftr[obj_id, frm_id] = 0.0
tmp_gt[obj_id, frm_id] = 0.0
l2_dist = torch.dist(tmp_ftr, tmp_gt)
def predict_future_semantic_feature(model, feed_dict, f_sng, args, spatial_feature):
semantic_only_flag_bp = args.semantic_only_flag
args.semantic_only_flag = 1
x_step = args.n_his + 1
p_id = len(feed_dict['tube_info']['frm_list']) - x_step
data = prepare_normal_prediction_input(feed_dict, f_sng, args, p_id=p_id, semantic_only_flag=True)
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_rel_spatial_list = []
pred_rel_ftr_list = []
pred_obj_spatial_list = []
pred_obj_ftr_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
rela_spa_dim = args.rela_spatial_dim
rela_ftr_dim = args.rela_ftr_dim
Ra_spatial = Ra[:, :rela_spa_dim*x_step]
Ra_ftr = Ra[:, rela_spa_dim*x_step:]
valid_object_id_stack = []
pred_rel_spatial_gt_list = []
n_objects_ori = x.shape[0]
x_view = x.view(n_objects_ori, x_step, box_dim + ftr_dim, 1, 1)
for t_step in range(args.n_his+1):
#pred_obj_spatial_list.append(x_view[:,t_step, :box_dim])
pred_obj_ftr_list.append(x_view[:,t_step, box_dim:])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*rela_spa_dim:(t_step+1)*rela_spa_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
relation_dim = args.relation_dim
state_dim = args.state_dim
object_appear_id_list = []
obj_num, ftr_t_dim = f_sng[3].shape
ftr_dim = f_sng[1].shape[-1]
t_dim = ftr_t_dim//box_dim
spatial_gt = f_sng[3].view(obj_num, t_dim, box_dim)
spatial_frm_num = spatial_feature.shape[1]
for p_id in range(args.pred_frm_num):
if p_id+x_step >= spatial_frm_num:
break
x_spatial = spatial_feature[:, p_id:p_id+x_step].view(n_objects_ori, -1, 1, 1)
x_ftr = torch.cat(pred_obj_ftr_list[p_id:p_id+x_step], dim=1)
Ra_spatial = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
Ra_ftr = torch.cat(pred_rel_ftr_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat([Ra_spatial, Ra_ftr], dim=1)
# remove invalid object, object coordinates that has been out of size
valid_object_id_list = check_valid_object_id_list_v2(x_spatial, args)
if len(valid_object_id_list) == 0:
break
object_appear_id_list +=valid_object_id_list
data_valid = prepare_valid_input(x_ftr, Ra, valid_object_id_list, args ,x_spatial)
attr, x_ftr, x_spatial, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
valid_object_id_stack.append(valid_object_id_list)
n_objects = x_ftr.shape[0]
feats = x_spatial
invalid_rela_list = []
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 1:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 0::box_dim] - feats[j, 0::box_dim] # x
Ra[idx, 2:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 1::box_dim] - feats[j, 1::box_dim] # y
Ra[:, 0:rela_spa_dim*x_step:rela_spa_dim] = -0.5
# normalize data
pred_obj_valid, pred_rel_valid = model._model_pred(
attr, x_ftr, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj = torch.zeros(n_objects_ori, ftr_dim, 1, 1, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = _norm(pred_obj_valid[valid_id], dim=0)
pred_rel_ftr = torch.zeros(n_objects_ori*n_objects_ori, ftr_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial[:, 0] = -1
pred_rel_spatial[:, 1] = -1
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_ftr[ori_idx] = _norm(pred_rel_valid[valid_idx, rela_spa_dim:], dim=0)
pred_rel_spatial[ori_idx] = pred_rel_valid[valid_idx, :rela_spa_dim]
pred_obj_ftr_list.append(pred_obj)
pred_rel_ftr_list.append(pred_rel_ftr.view(n_objects_ori*n_objects_ori, ftr_dim, 1, 1))
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, rela_spa_dim, 1, 1)) # just padding
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_ftr_list)
rel_ftr_exp = torch.stack(pred_rel_ftr_list[-pred_frm_num:], dim=1).view(n_objects_ori, n_objects_ori, pred_frm_num, ftr_dim)
obj_ftr = torch.stack(pred_obj_ftr_list[-pred_frm_num:], dim=1).contiguous().view(n_objects_ori, pred_frm_num, ftr_dim)
args.semantic_only_flag = semantic_only_flag_bp
return obj_ftr, rel_ftr_exp, valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list
def predict_normal_feature_v5(model, feed_dict, f_sng, args):
"""
Separately encoding the spatial and semantic features using PropagationNetwork
"""
if not model.training:
spatial_feature = predict_spatial_feature(model, feed_dict, f_sng, args)
else:
box_dim = 4
obj_num, ftr_t_dim = f_sng[3].shape
ftr_dim = f_sng[1].shape[-1]
t_dim = ftr_t_dim//box_dim
spatial_gt = f_sng[3].view(obj_num, t_dim, box_dim)
frm_id_list = feed_dict['tube_info']['frm_list']
tmp_box_list = [spatial_gt[:, frm_id] for frm_id in frm_id_list]
spatial_feature = torch.stack(tmp_box_list, dim=1).contiguous().view(obj_num, -1, box_dim)
obj_ftr, rel_ftr_exp, valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list \
= predict_semantic_feature(model, feed_dict, f_sng, args, spatial_feature)
obj_num = spatial_feature.shape[0]
frm_num = min(spatial_feature.shape[1], obj_ftr.shape[1])
box_ftr = spatial_feature[:, :frm_num].view(obj_num, -1).contiguous()
return obj_ftr, None, rel_ftr_exp, box_ftr, valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list
def predict_future_spatial_feature(model, feed_dict, f_sng, args):
x_step = args.n_his + 1
p_id = len(feed_dict['tube_info']['frm_list']) - x_step
data = prepare_spatial_only_prediction_input(feed_dict, f_sng, args, p_id=p_id)
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_obj_list = []
pred_rel_spatial_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
rela_spa_dim = Ra.shape[1] // x_step
Ra_spatial = Ra[:, :rela_spa_dim*x_step]
Ra_ftr = Ra[:, rela_spa_dim*x_step:]
valid_object_id_stack = []
for t_step in range(x_step):
pred_obj_list.append(x[:,t_step])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*rela_spa_dim:(t_step+1)*rela_spa_dim])
n_objects_ori = x.shape[0]
relation_dim = rela_spa_dim
state_dim = box_dim
object_appear_id_list = []
pred_rel_spatial_gt_list = []
box_only_flag_bp = args.box_only_flag
args.box_only_flag = 1
for p_id in range(args.pred_frm_num):
x = torch.cat(pred_obj_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
# remove invalid object, object coordinates that has been out of size
valid_object_id_list = check_valid_object_id_list_spatial(x, args)
if len(valid_object_id_list) == 0:
break
object_appear_id_list +=valid_object_id_list
#update new appear objects
data_valid = prepare_valid_input(x, Ra, valid_object_id_list, args)
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
n_objects = x.shape[0]
feats = x
invalid_rela_list = []
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 1:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 2:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[:, 0::rela_spa_dim] = -0.5
# padding spatial relation feature
pred_rel_spatial_gt = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, args.bbox_size, args.bbox_size, dtype=Ra.dtype, \
device=Ra.device) - 1.0
# for calculating loss
pred_rel_spatial_gt_valid = Ra[:, (x_step-1)*rela_spa_dim:x_step*rela_spa_dim]
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_spatial_gt[ori_idx] = pred_rel_spatial_gt_valid[valid_idx]
pred_rel_spatial_gt_list.append(pred_rel_spatial_gt)
attr = torch.FloatTensor(n_objects, 3, args.bbox_size, args.bbox_size).cuda().to(x.device)
# normalize data
pred_obj_valid, pred_rel_valid = model._model_spatial_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj_valid += x[:, -state_dim:]
pred_obj = torch.zeros(n_objects_ori, state_dim, args.bbox_size, args.bbox_size, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) - 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = pred_obj_valid[valid_id]
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial[:, 0] = -1
pred_rel_spatial[:, 1] = -1
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_spatial[ori_idx] = pred_rel_valid[valid_idx, :rela_spa_dim]
pred_obj_list.append(pred_obj)
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, rela_spa_dim, \
1, 1).expand(n_objects_ori*n_objects_ori, rela_spa_dim, args.bbox_size, args.bbox_size)) # just padding
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_list)
box_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, :box_dim].contiguous().mean(4).mean(3).view(n_objects_ori, pred_frm_num, box_dim)
spatial_feature = box_ftr*0.5 +0.5
if args.visualize_flag:
visualize_prediction_v2(spatial_feature, feed_dict, whatif_id=-1, store_img=True, args=args)
args.box_only_flag = box_only_flag_bp
return spatial_feature
def predict_future_feature_v5(model, feed_dict, f_sng, args):
"""
Separately encoding the spatial and semantic features using PropagationNetwork
"""
spatial_feature = predict_future_spatial_feature(model, feed_dict, f_sng, args)
obj_ftr, rel_ftr_exp, valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list \
= predict_future_semantic_feature(model, feed_dict, f_sng, args, spatial_feature)
obj_num = spatial_feature.shape[0]
frm_num = min(spatial_feature.shape[1], obj_ftr.shape[1])
box_ftr = spatial_feature[:, :frm_num].view(obj_num, -1).contiguous()
return obj_ftr, None, rel_ftr_exp, box_ftr, valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list
def predict_counterfact_spatial_feature(model, feed_dict, f_sng, args, counter_fact_id):
data = prepare_spatial_only_prediction_input(feed_dict, f_sng, args, p_id=0)
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_obj_list = []
pred_rel_spatial_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
rela_spa_dim = Ra.shape[1] // x_step
Ra_spatial = Ra[:, :rela_spa_dim*x_step]
Ra_ftr = Ra[:, rela_spa_dim*x_step:]
valid_object_id_stack = []
for t_step in range(x_step):
pred_obj_list.append(x[:,t_step])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*rela_spa_dim:(t_step+1)*rela_spa_dim])
n_objects_ori = x.shape[0]
relation_dim = rela_spa_dim
state_dim = box_dim
object_appear_id_list = [counter_fact_id]
pred_rel_spatial_gt_list = []
box_only_flag_bp = args.box_only_flag
args.box_only_flag = 1
for p_id in range(args.pred_normal_num):
x = torch.cat(pred_obj_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
valid_object_id_list = check_valid_object_id_list_spatial(x, args)
if counter_fact_id in valid_object_id_list:
counter_idx = valid_object_id_list.index(counter_fact_id)
del valid_object_id_list[counter_idx]
if len(valid_object_id_list) == 0:
break
object_appear_id_list +=valid_object_id_list
#update new appear objects
x, Ra, obj_appear_new_ids = update_new_appear_objects(x, Ra, feed_dict, f_sng, args, p_id, object_appear_id_list, spatial_only=True)
valid_object_id_list = check_valid_object_id_list_spatial(x, args)
if counter_fact_id in valid_object_id_list:
counter_idx = valid_object_id_list.index(counter_fact_id)
del valid_object_id_list[counter_idx]
data_valid = prepare_valid_input(x, Ra, valid_object_id_list, args)
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
n_objects = x.shape[0]
feats = x
invalid_rela_list = []
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 1:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 2:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[:, 0::rela_spa_dim] = -0.5
# padding spatial relation feature
pred_rel_spatial_gt = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, args.bbox_size, args.bbox_size, dtype=Ra.dtype, \
device=Ra.device) - 1.0
# for calculating loss
pred_rel_spatial_gt_valid = Ra[:, (x_step-1)*rela_spa_dim:x_step*rela_spa_dim]
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_spatial_gt[ori_idx] = pred_rel_spatial_gt_valid[valid_idx]
pred_rel_spatial_gt_list.append(pred_rel_spatial_gt)
attr = torch.FloatTensor(n_objects, 3, args.bbox_size, args.bbox_size).cuda().to(x.device)
# normalize data
pred_obj_valid, pred_rel_valid = model._model_spatial_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj_valid += x[:, -state_dim:]
pred_obj = torch.zeros(n_objects_ori, state_dim, args.bbox_size, args.bbox_size, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) - 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = pred_obj_valid[valid_id]
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial[:, 0] = -1
pred_rel_spatial[:, 1] = -1
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_spatial[ori_idx] = pred_rel_valid[valid_idx, :rela_spa_dim]
pred_obj_list.append(pred_obj)
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, rela_spa_dim, \
1, 1).expand(n_objects_ori*n_objects_ori, rela_spa_dim, args.bbox_size, args.bbox_size)) # just padding
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_list)
box_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, :box_dim].contiguous().mean(4).mean(3).view(n_objects_ori, pred_frm_num, box_dim)
spatial_feature = box_ftr*0.5 +0.5
args.box_only_flag = box_only_flag_bp
return spatial_feature
def predict_counterfact_semantic_feature(model, feed_dict, f_sng, args, spatial_feature, counter_fact_id):
semantic_only_flag_bp = args.semantic_only_flag
args.semantic_only_flag = 1
data = prepare_normal_prediction_input(feed_dict, f_sng, args, p_id=0, semantic_only_flag=True)
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_rel_spatial_list = []
pred_rel_ftr_list = []
pred_obj_spatial_list = []
pred_obj_ftr_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
rela_spa_dim = args.rela_spatial_dim
rela_ftr_dim = args.rela_ftr_dim
Ra_spatial = Ra[:, :rela_spa_dim*x_step]
Ra_ftr = Ra[:, rela_spa_dim*x_step:]
valid_object_id_stack = []
pred_rel_spatial_gt_list = []
n_objects_ori = x.shape[0]
x_view = x.view(n_objects_ori, x_step, box_dim + ftr_dim, 1, 1)
for t_step in range(args.n_his+1):
#pred_obj_spatial_list.append(x_view[:,t_step, :box_dim])
pred_obj_ftr_list.append(x_view[:,t_step, box_dim:])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*rela_spa_dim:(t_step+1)*rela_spa_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
relation_dim = args.relation_dim
state_dim = args.state_dim
object_appear_id_list = [counter_fact_id]
obj_num, ftr_t_dim = f_sng[3].shape
ftr_dim = f_sng[1].shape[-1]
t_dim = ftr_t_dim//box_dim
spatial_gt = f_sng[3].view(obj_num, t_dim, box_dim)
for p_id in range(args.pred_normal_num):
if p_id + x_step >=spatial_feature.shape[1]:
break
#x_spatial = torch.cat(pred_obj_spatial_list[p_id:p_id+x_step], dim=1)
#if model.training:
# st_id = p_id
# ed_id = st_id + x_step
# frm_id_list = feed_dict['tube_info']['frm_list'][st_id:ed_id]
# tmp_box_list = [spatial_gt[:, frm_id] for frm_id in frm_id_list]
# x_spatial = torch.stack(tmp_box_list, dim=1).contiguous().view(obj_num, x_step * box_dim, 1, 1)
#else:
x_spatial = spatial_feature[:, p_id:p_id+x_step].view(n_objects_ori, -1, 1, 1)
x_ftr = torch.cat(pred_obj_ftr_list[p_id:p_id+x_step], dim=1)
Ra_spatial = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
Ra_ftr = torch.cat(pred_rel_ftr_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat([Ra_spatial, Ra_ftr], dim=1)
# remove invalid object, object coordinates that has been out of size
valid_object_id_list = check_valid_object_id_list_v2(x_spatial, args)
if counter_fact_id in valid_object_id_list:
counter_idx = valid_object_id_list.index(counter_fact_id)
del valid_object_id_list[counter_idx]
if len(valid_object_id_list) == 0:
break
object_appear_id_list +=valid_object_id_list
#update new appear objects
x_ftr, x_spatial, Ra, obj_appear_new_ids = update_new_appear_objects(x_ftr, Ra, feed_dict, f_sng, args, p_id, object_appear_id_list, semantic_only_flag=True, x_spatial=x_spatial)
valid_object_id_list = check_valid_object_id_list_v2(x_spatial, args)
if counter_fact_id in valid_object_id_list:
counter_idx = valid_object_id_list.index(counter_fact_id)
del valid_object_id_list[counter_idx]
data_valid = prepare_valid_input(x_ftr, Ra, valid_object_id_list, args ,x_spatial)
attr, x_ftr, x_spatial, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
valid_object_id_stack.append(valid_object_id_list)
n_objects = x_ftr.shape[0]
feats = x_spatial
invalid_rela_list = []
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 1:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 0::box_dim] - feats[j, 0::box_dim] # x
Ra[idx, 2:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 1::box_dim] - feats[j, 1::box_dim] # y
Ra[:, 0:rela_spa_dim*x_step:rela_spa_dim] = -0.5
# normalize data
pred_obj_valid, pred_rel_valid = model._model_pred(
attr, x_ftr, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj = torch.zeros(n_objects_ori, ftr_dim, 1, 1, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = _norm(pred_obj_valid[valid_id], dim=0)
pred_rel_ftr = torch.zeros(n_objects_ori*n_objects_ori, ftr_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial[:, 0] = -1
pred_rel_spatial[:, 1] = -1
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_ftr[ori_idx] = _norm(pred_rel_valid[valid_idx, rela_spa_dim:], dim=0)
pred_rel_spatial[ori_idx] = pred_rel_valid[valid_idx, :rela_spa_dim]
pred_obj_ftr_list.append(pred_obj)
pred_rel_ftr_list.append(pred_rel_ftr.view(n_objects_ori*n_objects_ori, ftr_dim, 1, 1))
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, rela_spa_dim, 1, 1)) # just padding
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_ftr_list)
rel_ftr_exp = torch.stack(pred_rel_ftr_list[-pred_frm_num:], dim=1).view(n_objects_ori, n_objects_ori, pred_frm_num, ftr_dim)
obj_ftr = torch.stack(pred_obj_ftr_list[-pred_frm_num:], dim=1).contiguous().view(n_objects_ori, pred_frm_num, ftr_dim)
args.semantic_only_flag = semantic_only_flag_bp
return obj_ftr, rel_ftr_exp, valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list
def predict_counterfact_features_v5(model, feed_dict, f_sng, args, counter_fact_id):
"""
Separately encoding the spatial and semantic features using PropagationNetwork
"""
spatial_feature = predict_counterfact_spatial_feature(model, feed_dict, f_sng, args, counter_fact_id)
obj_ftr, rel_ftr_exp, valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list \
= predict_counterfact_semantic_feature(model, feed_dict, f_sng, args, spatial_feature, counter_fact_id)
obj_num = spatial_feature.shape[0]
frm_num = min(spatial_feature.shape[1], obj_ftr.shape[1])
box_ftr = spatial_feature[:, :frm_num].view(obj_num, -1).contiguous()
return obj_ftr, None, rel_ftr_exp, box_ftr, valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list
def predict_normal_feature_v4(model, feed_dict, f_sng, args):
data = prepare_normal_prediction_input(feed_dict, f_sng, args)
#x: obj_num, state_dim*(n_his+1)
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_obj_list = []
pred_rel_spatial_list = []
pred_rel_ftr_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
rela_spa_dim = args.rela_spatial_dim
rela_ftr_dim = args.rela_ftr_dim
Ra_spatial = Ra[:, :rela_spa_dim*x_step]
Ra_ftr = Ra[:, rela_spa_dim*x_step:]
valid_object_id_stack = []
pred_rel_spatial_gt_list = []
for t_step in range(args.n_his+1):
pred_obj_list.append(x[:,t_step*args.state_dim:(t_step+1)*args.state_dim])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*rela_spa_dim:(t_step+1)*rela_spa_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
n_objects_ori = x.shape[0]
relation_dim = args.relation_dim
state_dim = args.state_dim
object_appear_id_list = []
for p_id in range(args.pred_normal_num):
x = torch.cat(pred_obj_list[p_id:p_id+x_step], dim=1)
Ra_spatial = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
Ra_ftr = torch.cat(pred_rel_ftr_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat([Ra_spatial, Ra_ftr], dim=1)
# remove invalid object, object coordinates that has been out of size
valid_object_id_list = check_valid_object_id_list_v2(x, args)
if len(valid_object_id_list) == 0:
break
object_appear_id_list +=valid_object_id_list
#update new appear objects
x, Ra, obj_appear_new_ids = update_new_appear_objects(x, Ra, feed_dict, f_sng, args, p_id, object_appear_id_list)
valid_object_id_list = check_valid_object_id_list_v2(x, args)
data_valid = prepare_valid_input(x, Ra, valid_object_id_list, args)
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
valid_object_id_stack.append(valid_object_id_list)
n_objects = x.shape[0]
feats = x
invalid_rela_list = []
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 0:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 1:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[idx, 2:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 2::state_dim] - feats[j, 2::state_dim] # h
Ra[idx, 3:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 3::state_dim] - feats[j, 3::state_dim] # w
if args.add_rela_dist_mode==1 or args.add_rela_dist_mode==2:
Ra_x = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra_y = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra_dist = torch.sqrt(Ra_x**2+Ra_y**2+0.0000000001)
Ra[idx, 4:rela_spa_dim*x_step:rela_spa_dim] = Ra_dist
if Ra_dist[-1] > args.rela_dist_thre:
invalid_rela_list.append(idx)
#print(Ra_dist[-1])
if args.add_rela_dist_mode==2:
Rr, Rs = update_valid_rela_input(n_objects, invalid_rela_list, feats, args)
# padding spatial relation feature
pred_rel_spatial_gt = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=Ra.dtype, \
device=Ra.device) #- 1.0
pred_rel_spatial_gt[:, 0] = -1
pred_rel_spatial_gt[:, 1] = -1
pred_rel_spatial_gt_valid = Ra[:, (x_step-1)*rela_spa_dim:x_step*rela_spa_dim].squeeze(3).squeeze(2)
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_spatial_gt[ori_idx] = pred_rel_spatial_gt_valid[valid_idx]
pred_rel_spatial_gt_list.append(pred_rel_spatial_gt)
# normalize data
pred_obj_valid, pred_rel_valid = model._model_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj = torch.zeros(n_objects_ori, state_dim, 1, 1, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = pred_obj_valid[valid_id]
pred_obj[ori_id, box_dim:] = _norm(pred_obj_valid[valid_id, box_dim:], dim=0)
pred_rel_ftr = torch.zeros(n_objects_ori*n_objects_ori, ftr_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial[:, 0] = -1
pred_rel_spatial[:, 1] = -1
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_ftr[ori_idx] = _norm(pred_rel_valid[valid_idx, rela_spa_dim:], dim=0)
pred_rel_spatial[ori_idx] = pred_rel_valid[valid_idx, :rela_spa_dim]
pred_obj_list.append(pred_obj)
pred_rel_ftr_list.append(pred_rel_ftr.view(n_objects_ori*n_objects_ori, ftr_dim, 1, 1))
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, rela_spa_dim, 1, 1)) # just padding
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_list)
box_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, :box_dim].contiguous().view(n_objects_ori, pred_frm_num, box_dim)
rel_ftr_exp = torch.stack(pred_rel_ftr_list[-pred_frm_num:], dim=1).view(n_objects_ori, n_objects_ori, pred_frm_num, ftr_dim)
obj_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, box_dim:].contiguous().view(n_objects_ori, pred_frm_num, ftr_dim)
if args.visualize_flag:
visualize_prediction_v2(box_ftr, feed_dict, whatif_id=100, store_img=True, args=args)
return obj_ftr, None, rel_ftr_exp, box_ftr.view(n_objects_ori, -1), valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list
def predict_normal_feature_v2(model, feed_dict, f_sng, args):
data = prepare_normal_prediction_input(feed_dict, f_sng, args)
#x: obj_num, state_dim*(n_his+1)
x_step = args.n_his + 1
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data
pred_obj_list = []
pred_rel_spatial_list = []
pred_rel_ftr_list = []
box_dim = 4
ftr_dim = f_sng[1].shape[1]
rela_spa_dim = args.rela_spatial_dim
rela_ftr_dim = args.rela_ftr_dim
Ra_spatial = Ra[:, :rela_spa_dim*x_step]
Ra_ftr = Ra[:, rela_spa_dim*x_step:]
valid_object_id_stack = []
pred_rel_spatial_gt_list = []
for t_step in range(args.n_his+1):
pred_obj_list.append(x[:,t_step*args.state_dim:(t_step+1)*args.state_dim])
pred_rel_spatial_list.append(Ra_spatial[:, t_step*rela_spa_dim:(t_step+1)*rela_spa_dim])
pred_rel_ftr_list.append(Ra_ftr[:, t_step*ftr_dim:(t_step+1)*ftr_dim])
n_objects_ori = x.shape[0]
relation_dim = args.relation_dim
state_dim = args.state_dim
for p_id in range(args.pred_normal_num):
x = torch.cat(pred_obj_list[p_id:p_id+x_step], dim=1)
Ra_spatial = torch.cat(pred_rel_spatial_list[p_id:p_id+x_step], dim=1)
Ra_ftr = torch.cat(pred_rel_ftr_list[p_id:p_id+x_step], dim=1)
Ra = torch.cat([Ra_spatial, Ra_ftr], dim=1)
# remove invalid object, object coordinates that has been out of size
valid_object_id_list = check_valid_object_id_list(x, args)
if len(valid_object_id_list) == 0:
break
valid_object_id_stack.append(valid_object_id_list)
data_valid = prepare_valid_input(x, Ra, valid_object_id_list, args)
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx = data_valid
n_objects = x.shape[0]
feats = x
invalid_rela_list = []
# update relation
for i in range(n_objects):
for j in range(n_objects):
idx = i * n_objects + j
Ra[idx, 0:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra[idx, 1:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra[idx, 2:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 2::state_dim] - feats[j, 2::state_dim] # h
Ra[idx, 3:rela_spa_dim*x_step:rela_spa_dim] = feats[i, 3::state_dim] - feats[j, 3::state_dim] # w
if args.add_rela_dist_mode==1 or args.add_rela_dist_mode==2:
Ra_x = feats[i, 0::state_dim] - feats[j, 0::state_dim] # x
Ra_y = feats[i, 1::state_dim] - feats[j, 1::state_dim] # y
Ra_dist = torch.sqrt(Ra_x**2+Ra_y**2+0.0000000001)
Ra[idx, 4:rela_spa_dim*x_step:rela_spa_dim] = Ra_dist
if Ra_dist[-1] > args.rela_dist_thre:
invalid_rela_list.append(idx)
#print(Ra_dist[-1])
if args.add_rela_dist_mode==2:
Rr, Rs = update_valid_rela_input(n_objects, invalid_rela_list, feats, args)
# padding spatial relation feature
pred_rel_spatial_gt = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=Ra.dtype, \
device=Ra.device) #- 1.0
pred_rel_spatial_gt[:, 0] = -1
pred_rel_spatial_gt[:, 1] = -1
pred_rel_spatial_gt_valid = Ra[:, (x_step-1)*rela_spa_dim:x_step*rela_spa_dim].squeeze(3).squeeze(2)
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_spatial_gt[ori_idx] = pred_rel_spatial_gt_valid[valid_idx]
pred_rel_spatial_gt_list.append(pred_rel_spatial_gt)
# normalize data
pred_obj_valid, pred_rel_valid = model._model_pred(
attr, x, Rr, Rs, Ra, node_r_idx, node_s_idx, args.pstep)
pred_obj = torch.zeros(n_objects_ori, state_dim, 1, 1, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
for valid_id, ori_id in enumerate(valid_object_id_list):
pred_obj[ori_id] = pred_obj_valid[valid_id]
pred_obj[ori_id, box_dim:] = _norm(pred_obj_valid[valid_id, box_dim:], dim=0)
pred_rel_ftr = torch.zeros(n_objects_ori*n_objects_ori, ftr_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial = torch.zeros(n_objects_ori*n_objects_ori, rela_spa_dim, dtype=pred_obj_valid.dtype, \
device=pred_obj_valid.device) #- 1.0
pred_rel_spatial[:, 0] = -1
pred_rel_spatial[:, 1] = -1
for valid_id, ori_id in enumerate(valid_object_id_list):
for valid_id_2, ori_id_2 in enumerate(valid_object_id_list):
valid_idx = valid_id * n_objects + valid_id_2
ori_idx = ori_id * n_objects_ori + ori_id_2
pred_rel_ftr[ori_idx] = _norm(pred_rel_valid[valid_idx, rela_spa_dim:], dim=0)
pred_rel_spatial[ori_idx] = pred_rel_valid[valid_idx, :rela_spa_dim]
pred_obj_list.append(pred_obj)
pred_rel_ftr_list.append(pred_rel_ftr.view(n_objects_ori*n_objects_ori, ftr_dim, 1, 1))
pred_rel_spatial_list.append(pred_rel_spatial.view(n_objects_ori*n_objects_ori, rela_spa_dim, 1, 1)) # just padding
#make the output consitent with video scene graph
pred_frm_num = len(pred_obj_list)
box_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, :box_dim].contiguous().view(n_objects_ori, pred_frm_num, box_dim)
rel_ftr_exp = torch.stack(pred_rel_ftr_list[-pred_frm_num:], dim=1).view(n_objects_ori, n_objects_ori, pred_frm_num, ftr_dim)
obj_ftr = torch.stack(pred_obj_list[-pred_frm_num:], dim=1)[:, :, box_dim:].contiguous().view(n_objects_ori, pred_frm_num, ftr_dim)
if args.visualize_flag:
visualize_prediction_v2(box_ftr, feed_dict, whatif_id=100, store_img=True, args=args)
return obj_ftr, None, rel_ftr_exp, box_ftr.view(n_objects_ori, -1), valid_object_id_stack, pred_rel_spatial_list, pred_rel_spatial_gt_list
def visualize_prediction_v2(box_ftr, feed_dict, whatif_id=-1, store_img=False, args=None):
base_folder = os.path.basename(args.load).split('.')[0]
filename = str(feed_dict['meta_ann']['scene_index'])
videoname = 'dumps/'+ base_folder + '/' + filename + '_' + str(int(whatif_id)) +'.avi'
#videoname = filename + '.mp4'
if store_img:
img_folder = 'dumps/'+base_folder +'/'+filename
os.system('mkdir -p ' + img_folder)
background_fn = '../temporal_reasoning-master/background.png'
if not os.path.isfile(background_fn):
background_fn = '../temporal_reasoningv2/background.png'
bg = cv2.imread(background_fn)
H, W, C = bg.shape
bg = cv2.resize(bg, (W, H), interpolation=cv2.INTER_AREA)
fps = 6
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
out = cv2.VideoWriter(videoname, fourcc, fps, (W, H))
scene_idx = feed_dict['meta_ann']['scene_index']
sub_idx = int(scene_idx/1000)
sub_img_folder = 'image_'+str(sub_idx).zfill(2)+'000-'+str(sub_idx+1).zfill(2)+'000'
img_full_folder = os.path.join(args.frm_img_path, sub_img_folder)
if whatif_id == -1:
n_frame = len(feed_dict['tube_info']['frm_list']) + box_ftr.shape[1] - args.n_his -1
else:
n_frame = min(box_ftr.shape[1], len(feed_dict['tube_info']['frm_list']))
padding_patch_list = []
for i in range(n_frame):
if whatif_id==-1:
if i < len(feed_dict['tube_info']['frm_list']):
frm_id = feed_dict['tube_info']['frm_list'][i]
img_full_path = os.path.join(img_full_folder, 'video_'+str(scene_idx).zfill(5), str(frm_id+1)+'.png')
img_ori = cv2.imread(img_full_path)
img = copy.deepcopy(img_ori)
for tube_id in range(len(feed_dict['tube_info']['box_seq']['tubes'])):
tmp_box = feed_dict['tube_info']['box_seq']['tubes'][tube_id][frm_id]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (36,255,12), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if i==len(feed_dict['tube_info']['frm_list'])-1:
padding_patch = img_ori[int(y*H):int(y*H+h*H),int(x*W):int(W*x+w*W)]
hh, ww, c = padding_patch.shape
if hh*ww*c==0:
padding_patch = np.zeros((24, 24, 3), dtype=np.float32)
padding_patch_list.append(padding_patch)
else:
#break
pred_offset = i - len(feed_dict['tube_info']['frm_list']) + args.n_his + 1
frm_id = feed_dict['tube_info'] ['frm_list'][-1] + (args.frame_offset*pred_offset+1)
img = copy.deepcopy(bg)
for tube_id in range(box_ftr.shape[0]):
tmp_box = box_ftr[tube_id][pred_offset]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
y2 = y +h
x2 = x +w
if w<=0 or h<=0:
continue
if x>1:
continue
if y>1:
continue
if x2 <=0:
continue
if y2 <=0:
continue
if x<0:
x=0
if y<0:
y=0
if x2>1:
x2=1
if y2>1:
y2=1
patch_resize = cv2.resize(padding_patch_list[tube_id], (max(1, int(x2*W) - int(x*W)), max(1, int(y2*H) - int(y*H))) )
img[int(y*H):int(y2*H), int(x*W):int(x2*W)] = patch_resize
#img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (36,255,12), 1)
#cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (0,0,0), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0,0,0), 2)
if store_img:
cv2.imwrite(os.path.join( img_folder, '%s_%d.png' % (filename, i)), img.astype(np.uint8))
else:
frm_id = feed_dict['tube_info']['frm_list'][i]
img_full_path = os.path.join(img_full_folder, 'video_'+str(scene_idx).zfill(5), str(frm_id+1)+'.png')
img_rgb = cv2.imread(img_full_path)
#for tube_id in range(len(feed_dict['tube_info']['box_seq']['tubes'])):
#img = copy.deepcopy(bg)
img = copy.deepcopy(img_rgb)
for tube_id in range(box_ftr.shape[0]):
tmp_box = feed_dict['tube_info']['box_seq']['tubes'][tube_id][frm_id]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
img_patch = img_rgb[int(y*H):int(y*H + h*H) , int(x*W): int(x*W + w*W)]
hh, ww, c = img_patch.shape
if hh*ww*c==0:
img_patch = np.zeros((24, 24, 3), dtype=np.float32)
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (36,255,12), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
tmp_box = box_ftr[tube_id][i]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
y2 = y +h
x2 = x +w
if w<=0 or h<=0:
continue
if x>1:
continue
if y>1:
continue
if x2 <=0:
continue
if y2 <=0:
continue
if x<0:
x=0
if y<0:
y=0
if x2>1:
x2=1
if y2>1:
y2=1
#patch_resize = cv2.resize(img_patch, (max(int(x2*W) - int(x*W), 1), max(int(y2*H) - int(y*H), 1)))
#img[int(y*H):int(y2*H), int(x*W):int(x2*W)] = patch_resize
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (0,0,0), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0,0,0), 2)
if store_img:
cv2.imwrite(os.path.join( img_folder, '%s_%d_%d.png' % (filename, i, int(whatif_id))), img.astype(np.uint8))
out.write(img)
def visualize_prediction(box_ftr, feed_dict, whatif_id=-1, store_img=False, args=None):
base_folder = os.path.basename(args.load).split('.')[0]
filename = str(feed_dict['meta_ann']['scene_index'])
videoname = 'dumps/'+ base_folder + '/' + filename + '_' + str(int(whatif_id)) +'.avi'
#videoname = filename + '.mp4'
if store_img:
img_folder = 'dumps/'+base_folder +'/'+filename
os.system('mkdir -p ' + img_folder)
background_fn = '../temporal_reasoning-master/background.png'
if not os.path.isfile(background_fn):
background_fn = '../temporal_reasoningv2/background.png'
bg = cv2.imread(background_fn)
H, W, C = bg.shape
bg = cv2.resize(bg, (W, H), interpolation=cv2.INTER_AREA)
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
out = cv2.VideoWriter(videoname, fourcc, 3, (W, H))
scene_idx = feed_dict['meta_ann']['scene_index']
sub_idx = int(scene_idx/1000)
sub_img_folder = 'image_'+str(sub_idx).zfill(2)+'000-'+str(sub_idx+1).zfill(2)+'000'
img_full_folder = os.path.join(args.frm_img_path, sub_img_folder)
if whatif_id == -1:
n_frame = len(feed_dict['tube_info']['frm_list']) + box_ftr.shape[1]
else:
n_frame = min(box_ftr.shape[1], len(feed_dict['tube_info']['frm_list']))
padding_patch_list = []
for i in range(n_frame):
if whatif_id==-1:
if i < len(feed_dict['tube_info']['frm_list']):
frm_id = feed_dict['tube_info']['frm_list'][i]
img_full_path = os.path.join(img_full_folder, 'video_'+str(scene_idx).zfill(5), str(frm_id+1)+'.png')
img = cv2.imread(img_full_path)
for tube_id in range(len(feed_dict['tube_info']['box_seq']['tubes'])):
tmp_box = feed_dict['tube_info']['box_seq']['tubes'][tube_id][frm_id]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
img = cv2.rectangle(img, (int(x*W), int(y*H)), (int(x*W + w*W), int(y*H + h*H)), (36,255,12), 1)
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if i==len(feed_dict['tube_info']['frm_list'])-1:
padding_patch = img[int(h*H):int(y*H+h*H),int(x*W):int(W*x+w*W)]
hh, ww, c = padding_patch.shape
if hh*ww*c==0:
padding_patch = np.zeros((24, 24, 3), dtype=np.float32)
padding_patch_list.append(padding_patch)
else:
pred_offset = i - len(feed_dict['tube_info']['frm_list'])
frm_id = feed_dict['tube_info'] ['frm_list'][-1] + (args.frame_offset*pred_offset+1)
img = copy.deepcopy(bg)
for tube_id in range(box_ftr.shape[0]):
tmp_box = box_ftr[tube_id][pred_offset]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
y2 = y +h
x2 = x +w
if w<=0 or h<=0:
continue
if x>1:
continue
if y>1:
continue
if x2 <=0:
continue
if y2 <=0:
continue
if x<0:
x=0
if y<0:
y=0
if x2>1:
x2=1
if y2>1:
y2=1
patch_resize = cv2.resize(padding_patch_list[tube_id], (max(1, int(x2*W) - int(x*W)), max(1, int(y2*H) - int(y*H))) )
img[int(y*H):int(y2*H), int(x*W):int(x2*W)] = patch_resize
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if store_img:
cv2.imwrite(os.path.join( img_folder, '%s_%d.png' % (filename, i)), img.astype(np.uint8))
else:
frm_id = feed_dict['tube_info']['frm_list'][i]
img_full_path = os.path.join(img_full_folder, 'video_'+str(scene_idx).zfill(5), str(frm_id+1)+'.png')
img_rgb = cv2.imread(img_full_path)
#for tube_id in range(len(feed_dict['tube_info']['box_seq']['tubes'])):
img = copy.deepcopy(bg)
for tube_id in range(box_ftr.shape[0]):
tmp_box = feed_dict['tube_info']['box_seq']['tubes'][tube_id][frm_id]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
img_patch = img_rgb[int(y*H):int(y*H + h*H) , int(x*W): int(x*W + w*W)]
hh, ww, c = img_patch.shape
if hh*ww*c==0:
img_patch = np.zeros((24, 24, 3), dtype=np.float32)
tmp_box = box_ftr[tube_id][i]
x = float(tmp_box[0] - tmp_box[2]*0.5)
y = float(tmp_box[1] - tmp_box[3]*0.5)
w = float(tmp_box[2])
h = float(tmp_box[3])
y2 = y +h
x2 = x +w
if w<=0 or h<=0:
continue
if x>1:
continue
if y>1:
continue
if x2 <=0:
continue
if y2 <=0:
continue
if x<0:
x=0
if y<0:
y=0
if x2>1:
x2=1
if y2>1:
y2=1
patch_resize = cv2.resize(img_patch, (max(int(x2*W) - int(x*W), 1), max(int(y2*H) - int(y*H), 1)))
img[int(y*H):int(y2*H), int(x*W):int(x2*W)] = patch_resize
cv2.putText(img, str(tube_id), (int(x*W), int(y*H)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
if store_img:
cv2.imwrite(os.path.join( img_folder, '%s_%d_%d.png' % (filename, i, int(whatif_id))), img.astype(np.uint8))
out.write(img)
def collate_dict(batch):
return batch
def remove_wrapper_for_paral_training(feed_dict_list):
for feed_idx, feed_dict in enumerate(feed_dict_list):
new_feed_fict = {}
for key_name, value in feed_dict.items():
if isinstance(value, torch.Tensor):
new_value = value.squeeze(0)
pdb.set_trace()
new_feed_dict[key_name] = new_value
def default_reduce_func(k, v):
if torch.is_tensor(v):
return v.mean()
return v
def custom_reduce_func(k, v):
if isinstance(v, list):
for idx in range(len(v)-1, -1, -1):
if v[idx]<0:
del v[idx]
if len(v)>0:
return sum(v)/len(v)
else:
return -1
else:
invalid_mask = v<0
if invalid_mask.float().sum()>0:
pdb.set_trace()
valid_mask = 1 - invalid_mask.float()
valid_v = torch.sum(v*valid_mask)
valid_num = valid_mask.sum()
if valid_num>0:
return valid_v/valid_num
else:
return -1
if '_max' in k:
return v.max()
elif '_sum' in k:
return v.sum()
else:
return default_reduce_func(k, v)
def decode_mask_to_xyxy(mask):
bbx_xyxy = cocoMask.toBbox(mask)
bbx_xyxy[2] = bbx_xyxy[2] + bbx_xyxy[0]
bbx_xyxy[3] = bbx_xyxy[3] + bbx_xyxy[1]
return bbx_xyxy
def transform_conpcet_forms_for_nscl(pg_list):
nsclseq = clevrer_to_nsclseq(pg_list)
nsclqsseq = nsclseq_to_nsclqsseq(nsclseq)
return nsclqsseq
def transform_conpcet_forms_for_nscl_v2(pg_list):
nsclseq = clevrer_to_nsclseq_v2(pg_list)
nsclqsseq = nsclseq_to_nsclqsseq(nsclseq)
return nsclqsseq
def nsclseq_to_nsclqsseq(seq_program):
qs_seq = copy.deepcopy(seq_program)
cached = defaultdict(list)
for sblock in qs_seq:
for param_type in gdef.parameter_types:
if param_type in sblock:
sblock[param_type + '_idx'] = len(cached[param_type])
sblock[param_type + '_values'] = cached[param_type]
cached[param_type].append(sblock[param_type])
return qs_seq
def get_clevrer_op_attribute(op):
return op.split('_')[1]
def clevrer_to_nsclseq(clevr_program_ori):
# remove useless program
clevr_program = []
for pg_idx, pg in enumerate(clevr_program_ori):
if pg=='get_col_partner' and 0:
if clevr_program[-1]=='unique':
uni_op = clevr_program.pop()
filter_op = clevr_program.pop()
if filter_op.startswith('filter'):
attr = clevr_program.pop()
assert attr in ALL_CONCEPTS
else:
print(clevr_program_ori)
pdb.set_trace()
else:
print(clevr_program_ori)
pdb.set_trace()
else:
clevr_program.append(pg)
nscl_program = [{'op': 'scene', 'inputs':[]}]
mapping = dict()
exe_stack = []
inputs_idx = 0
col_idx = -1
obj_num = 0
obj_stack = None
for block_id, block in enumerate(clevr_program):
if block == 'scene':
current = dict(op='scene')
elif block=='filter_shape' or block=='filter_color' or block=='filter_material':
concept = exe_stack.pop()
if len(nscl_program)>0:
last = nscl_program[-1]
else:
last = {'op': 'padding'}
if last['op']=='filter_shape' or last['op']=='filter_color' or last['op']=='filter_material':
last['concept'].append(concept)
else:
current = dict(op='filter', concept=[concept])
elif block.startswith('filter_order'):
concept = exe_stack.pop()
current = dict(op=block, temporal_concept=[concept])
if len(nscl_program)>0:
last = nscl_program[-1]
if last['op']=='filter_collision':
col_idx = inputs_idx +1
elif block.startswith('end'):
current = dict(op=block, time_concept=['end'])
elif block.startswith('start'):
current = dict(op=block, time_concept=['start'])
elif block.startswith('filter_collision'):
current = dict(op='filter_collision', relational_concept=['collision'])
col_idx = inputs_idx + 1
elif block.startswith('filter_in') or block.startswith('filter_out'):
concept = block.split('_')[-1]
current = dict(op=block, time_concept=[concept])
elif block.startswith('filter_after') or block == 'filter_before':
concept = block.split('_')[-1]
current = dict(op=block, time_concept=[concept])
elif block == 'filter_stationary' or block == 'filter_moving' or block == 'filter_falling':
concept = block.split('_')[-1]
current = dict(op='filter_temporal', temporal_concept=[concept])
elif block == 'filter_top' or block == 'filter_bottom' or block == 'filter_middle':
concept = block.split('_')[-1]
current = dict(op='filter_spatial', temporal_concept=[concept])
elif block.startswith('filter'):
current = dict(op=block)
elif block == 'unique' or block == 'events' or block == 'all_events' or block == 'null' or block == 'get_object':
continue
elif block == 'get_frame':
if not (nscl_program[-1]['op']=='start' or nscl_program[-1]['op']=='end'):
continue
current = dict(op=block)
elif block == 'objects': # fix bug on fitlering time
if len(clevr_program)>(block_id+1):
next_op = clevr_program[block_id+1]
if next_op=='filter_collision':
continue
current = dict(op=block)
obj_num +=1
if obj_num>1:
obj_stack = inputs_idx
elif block == 'events':
current = dict(op=block)
elif block in ALL_CONCEPTS:
exe_stack.append(block)
continue
else:
if block.startswith('query'):
if block_id == len(clevr_program) - 1:
attribute = get_clevrer_op_attribute(block)
current = dict(op='query', attribute=attribute)
elif block == 'exist':
current = dict(op='exist')
elif block == 'count':
if block_id == len(clevr_program) - 1:
current = dict(op='count')
else:
current = dict(op=block)
#raise ValueError('Unknown CLEVR operation: {}.'.format(op))
if current is None:
assert len(block['inputs']) == 1
else:
if block =='end' or block == 'start':
current['inputs'] = []
elif block =='get_frame':
current['inputs'] = [inputs_idx - 1, inputs_idx ]
elif block =='get_col_partner':
current['inputs'] = [inputs_idx, col_idx]
elif block == 'filter_stationary' or block == 'filter_moving':
if obj_stack is not None:
current['inputs'] = [obj_stack, inputs_idx]
else:
current['inputs'] = [inputs_idx]
else:
current['inputs'] = [inputs_idx]
inputs_idx +=1
nscl_program.append(current)
return nscl_program
def sort_by_x(obj):
return obj[1][0, 1, 0, 0]
def decode_mask_to_box(mask, crop_box_size, H, W):
bbx_xywh = cocoMask.toBbox(mask)
bbx_xyxy = copy.deepcopy(bbx_xywh)
crop_box = copy.deepcopy(bbx_xywh)
bbx_xyxy[2] = bbx_xyxy[2] + bbx_xyxy[0]
bbx_xyxy[3] = bbx_xyxy[3] + bbx_xyxy[1]
bbx_xywh[0] = bbx_xywh[0]*1.0/mask['size'][1]
bbx_xywh[2] = bbx_xywh[2]*1.0/mask['size'][1]
bbx_xywh[1] = bbx_xywh[1]*1.0/mask['size'][0]
bbx_xywh[3] = bbx_xywh[3]*1.0/mask['size'][0]
bbx_xywh[0] = bbx_xywh[0] + bbx_xywh[2]/2.0
bbx_xywh[1] = bbx_xywh[1] + bbx_xywh[3]/2.0
crop_box[1] = int((bbx_xyxy[0])*W/mask['size'][1]) # w
crop_box[0] = int((bbx_xyxy[1])*H/mask['size'][0]) # h
crop_box[2] = int(crop_box_size[0])
crop_box[3] = int(crop_box_size[1])
ret = np.ones((4, crop_box_size[0], crop_box_size[1]))
ret[0, :, :] *= bbx_xywh[0]
ret[1, :, :] *= bbx_xywh[1]
ret[2, :, :] *= bbx_xywh[2]
ret[3, :, :] *= bbx_xywh[3]
ret = torch.FloatTensor(ret)
return bbx_xyxy, ret, crop_box.astype(int)
def mapping_obj_ids_to_tube_ids(objects, tubes, frm_id ):
obj_id_to_map_id = {}
fix_ids = []
for obj_id, obj_info in enumerate(objects):
bbox_xyxy, xyhw_exp, crop_box = decode_mask_to_box(objects[obj_id]['mask'], [24, 24], 100, 150)
tube_id = get_tube_id_from_bbox(bbox_xyxy, frm_id, tubes)
obj_id_to_map_id[obj_id] = tube_id
if tube_id==-1:
fix_ids.append(obj_id)
if len(fix_ids)>0:
fix_id = 0 # fixiong bugs invalid ids
for t_id in range(len(tubes)):
if t_id in obj_id_to_map_id.values():
continue
else:
obj_id_to_map_id[fix_ids[fix_id]] = t_id
fix_id +=1
print('invalid tube ids!\n')
if fix_id==len(fix_ids):
break
tube_id = len(tubes)
for obj_id, tube_id in obj_id_to_map_id.items():
if tube_id==-1:
obj_id_to_map_id[obj_id] = tube_id
tube_id +=1
return obj_id_to_map_id
def check_box_in_tubes(objects, idx, tubes):
tube_frm_boxes = [tube[idx] for tube in tubes]
for obj_id, obj_info in enumerate(objects):
box_xyxy = decode_box(obj_info['mask'])
if list(box_xyxy) not in tube_frm_boxes:
return False
return True
def decode_box(obj_info):
bbx_xywh = mask.toBbox(obj_info)
bbx_xyxy = copy.deepcopy(bbx_xywh)
bbx_xyxy[2] = bbx_xyxy[2] + bbx_xyxy[0]
bbx_xyxy[3] = bbx_xyxy[3] + bbx_xyxy[1]
return bbx_xyxy
def set_debugger():
from IPython.core import ultratb
sys.excepthook = ultratb.FormattedTB(call_pdb=True)
def get_tube_id_from_bbox(bbox_xyxy, frame_id, tubes):
for tube_id, tube_info in enumerate(tubes):
if tube_info[frame_id]==list(bbox_xyxy):
return tube_id
return -1
def get_tube_id_from_bbox(bbox_xyxy, frame_id, tubes):
for tube_id, tube_info in enumerate(tubes):
if tube_info[frame_id]==list(bbox_xyxy):
return tube_id
return -1
def checking_duplicate_box_among_tubes(frm_list, tubes):
"""
checking boxes that are using by different tubes
"""
valid_flag=False
for frm_idx, frm_id in enumerate(frm_list):
for tube_id, tube_info in enumerate(tubes):
tmp_box = tube_info[frm_id]
for tube_id2 in range(tube_id+1, len(tubes)):
if tmp_box==tubes[tube_id2][frm_id]:
valid_flag=True
return valid_flag
return valid_flag
def check_object_inconsistent_identifier(frm_list, tubes):
"""
checking whether boxes are lost during the track
"""
valid_flag = False
for tube_id, tube_info in enumerate(tubes):
if tube_info[frm_list[0]]!=[0,0,1,1]:
for tmp_id in range(1, len(frm_list)):
tmp_frm = frm_list[tmp_id]
if tube_info[tmp_frm]==[0, 0, 1, 1]:
valid_flag=True
return valid_flag
return valid_flag
def jsonload(path):
f = open(path)
this_ans = json.load(f)
f.close()
return this_ans
def jsondump(path, this_dic):
f = open(path, 'w')
this_ans = json.dump(this_dic, f)
f.close()
def pickleload(path):
f = open(path, 'rb')
this_ans = pickle.load(f)
f.close()
return this_ans
def pickledump(path, this_dic):
f = open(path, 'wb')
this_ans = pickle.dump(this_dic, f)
f.close()
def clevrer_to_nsclseq_v2(clevr_program_ori):
# remove useless program
clevr_program = []
for pg_idx, pg in enumerate(clevr_program_ori):
clevr_program.append(pg)
nscl_program = [{'op': 'scene', 'inputs':[]}]
mapping = dict()
exe_stack = []
inputs_idx = 0
col_idx = -1
obj_num = 0
obj_stack = None
buffer_for_ancestor = []
for block_id, block in enumerate(clevr_program):
if block == 'query_collision_partner':
block = 'get_col_partner'
if block == 'query_frame':
block = 'get_frame'
if block == 'filter_counterfact':
block = 'get_counterfact'
if block == 'query_object':
block = 'get_object'
if block == 'filter_start':
block = 'start'
if block == 'filter_end':
block = 'end'
if block == 'scene':
current = dict(op='scene')
elif block=='filter_shape' or block=='filter_color' or block=='filter_material':
if len(exe_stack)==0:
print('fail to parse program!')
print(clevr_program)
print(block_id)
continue
concept = exe_stack.pop()
if len(nscl_program)>0:
last = nscl_program[-1]
else:
last = {'op': 'padding'}
if last['op']=='filter_shape' or last['op']=='filter_color' or last['op']=='filter_material':
last['concept'].append(concept)
else:
current = dict(op='filter', concept=[concept])
elif block.startswith('filter_order'):
concept = exe_stack.pop()
current = dict(op=block, temporal_concept=[concept])
if len(nscl_program)>0:
last = nscl_program[-1]
if last['op']=='filter_collision':
col_idx = inputs_idx +1
elif block.startswith('end'):
current = dict(op=block, time_concept=['end'])
elif block.startswith('start'):
current = dict(op=block, time_concept=['start'])
elif block.startswith('filter_collision'):
current = dict(op='filter_collision', relational_concept=['collision'])
buffer_for_ancestor.append(inputs_idx)
buffer_for_ancestor.append(inputs_idx+1)
col_idx = inputs_idx + 1
elif block.startswith('filter_in') or block.startswith('filter_out'):
concept = block.split('_')[-1]
current = dict(op=block, time_concept=[concept])
buffer_for_ancestor.append(inputs_idx)
buffer_for_ancestor.append(inputs_idx+1)
elif block.startswith('filter_after') or block == 'filter_before':
concept = block.split('_')[-1]
current = dict(op=block, time_concept=[concept])
elif block == 'filter_stationary' or block == 'filter_moving' or block == 'filter_falling':
concept = block.split('_')[-1]
current = dict(op='filter_temporal', temporal_concept=[concept])
elif block == 'filter_top' or block == 'filter_bottom' or block == 'filter_middle':
concept = block.split('_')[-1]
current = dict(op='filter_spatial', spatial_concept=[concept])
elif block.startswith('filter'):
current = dict(op=block)
elif block == 'unique' or block == 'all_events' or block == 'null' or block == 'get_object':
continue
elif block == 'get_frame':
if not (nscl_program[-1]['op']=='start' or nscl_program[-1]['op']=='end'):
continue
current = dict(op=block)
elif block == 'objects': # fix bug on fitlering time
if len(clevr_program)>(block_id+1):
next_op = clevr_program[block_id+1]
if next_op=='filter_collision':
continue
current = dict(op=block)
obj_num +=1
if obj_num>1:
obj_stack = inputs_idx
elif block in ALL_CONCEPTS:
exe_stack.append(block)
continue
elif block == 'filter_ancestor':
current = dict(op=block)
else:
if block.startswith('query'):
if block_id == len(clevr_program) - 1:
attribute = get_clevrer_op_attribute(block)
current = dict(op='query', attribute=attribute)
elif block == 'exist':
current = dict(op='exist')
elif block == 'count':
if block_id == len(clevr_program) - 1:
current = dict(op='count')
else:
current = dict(op=block)
if current is None:
assert len(block['inputs']) == 1
else:
if block =='end' or block == 'start':
current['inputs'] = []
elif block =='get_frame':
off_set = 0
if len(nscl_program)>=2 and nscl_program[-2]['op']=='events':
off_set +=1
current['inputs'] = [inputs_idx - 1 - off_set, inputs_idx ]
elif block =='get_col_partner':
current['inputs'] = [inputs_idx, col_idx]
elif block == 'filter_stationary' or block == 'filter_moving' or block =='filter_falling':
if obj_stack is not None:
if nscl_program[obj_stack]['op']=='events':
obj_stack -=1
current['inputs'] = [obj_stack, inputs_idx]
else:
current['inputs'] = [inputs_idx]
elif block == 'filter_ancestor':
current['inputs'] = buffer_for_ancestor
else:
current['inputs'] = [inputs_idx]
inputs_idx +=1
nscl_program.append(current)
return nscl_program
| 46.983967 | 186 | 0.593111 | 24,792 | 158,242 | 3.445063 | 0.022951 | 0.028755 | 0.021543 | 0.029657 | 0.894708 | 0.882157 | 0.868762 | 0.862159 | 0.856703 | 0.847758 | 0 | 0.028119 | 0.282182 | 158,242 | 3,367 | 187 | 46.997921 | 0.723802 | 0.0501 | 0 | 0.808175 | 0 | 0 | 0.029749 | 0.002857 | 0 | 0 | 0 | 0 | 0.001793 | 1 | 0.02474 | false | 0 | 0.006095 | 0.001793 | 0.057368 | 0.00251 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
cb9287c4527b636b9d59ddbf710d930def886e0d | 357 | py | Python | Aula_36/Model/endereco.py | Mateus-Silva11/AulasPython | d34dc4f62ade438e68b0a80e0baac4d6ec0d378e | [
"MIT"
] | null | null | null | Aula_36/Model/endereco.py | Mateus-Silva11/AulasPython | d34dc4f62ade438e68b0a80e0baac4d6ec0d378e | [
"MIT"
] | null | null | null | Aula_36/Model/endereco.py | Mateus-Silva11/AulasPython | d34dc4f62ade438e68b0a80e0baac4d6ec0d378e | [
"MIT"
] | null | null | null | class Endereco:
def __init__(self):
self.id = 0
self.logradouro = ''
self.numero = ''
self.complemento = ''
self.bairro = ''
self.cidade = ''
self.cep = ''
def __str__(self):
return f'{self.id};{self.logradouro};{self.numero};{self.complemento};{self.bairro};{self.cidade};{self.cep}' | 29.75 | 117 | 0.543417 | 39 | 357 | 4.769231 | 0.410256 | 0.064516 | 0.193548 | 0.258065 | 0.709677 | 0.709677 | 0.709677 | 0.709677 | 0.709677 | 0.709677 | 0 | 0.003906 | 0.282913 | 357 | 12 | 117 | 29.75 | 0.722656 | 0 | 0 | 0 | 0 | 0.090909 | 0.276536 | 0.276536 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0 | 0 | 0.090909 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1df0cad3419ee6a9c5206f6395b45c6e5eb5c24c | 33,466 | py | Python | bilibili/broadcast/message/main_pb2.py | Privoce/all-in-danmaku-server | b13bd3dae26d65540b7cf5c3d8ef3569111d1676 | [
"MIT"
] | null | null | null | bilibili/broadcast/message/main_pb2.py | Privoce/all-in-danmaku-server | b13bd3dae26d65540b7cf5c3d8ef3569111d1676 | [
"MIT"
] | null | null | null | bilibili/broadcast/message/main_pb2.py | Privoce/all-in-danmaku-server | b13bd3dae26d65540b7cf5c3d8ef3569111d1676 | [
"MIT"
] | 2 | 2021-07-14T06:34:39.000Z | 2021-07-14T07:30:12.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bilibili/broadcast/message/main.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='bilibili/broadcast/message/main.proto',
package='bilibili.broadcast.message.main',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n%bilibili/broadcast/message/main.proto\x12\x1f\x62ilibili.broadcast.message.main\x1a\x1bgoogle/protobuf/empty.proto\"\\\n\x0fNativePageEvent\x12\x0e\n\x06pageID\x18\x01 \x01(\x03\x12\x39\n\x05items\x18\x02 \x03(\x0b\x32*.bilibili.broadcast.message.main.EventItem\"\\\n\x10TopActivityReply\x12:\n\x06online\x18\x01 \x01(\x0b\x32*.bilibili.broadcast.message.main.TopOnline\x12\x0c\n\x04hash\x18\x02 \x01(\t\"@\n\x07\x41nimate\x12\x0c\n\x04icon\x18\x01 \x01(\t\x12\x0c\n\x04json\x18\x02 \x01(\t\x12\x0b\n\x03svg\x18\x03 \x01(\t\x12\x0c\n\x04loop\x18\x04 \x01(\x05\"\xbe\x01\n\tCommandDm\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0b\n\x03oid\x18\x02 \x01(\x03\x12\x0b\n\x03mid\x18\x03 \x01(\x03\x12\x0c\n\x04type\x18\x04 \x01(\x05\x12\x0f\n\x07\x63ommand\x18\x05 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x06 \x01(\t\x12\r\n\x05state\x18\x07 \x01(\x05\x12\x10\n\x08progress\x18\x08 \x01(\x05\x12\r\n\x05\x63time\x18\t \x01(\t\x12\r\n\x05mtime\x18\n \x01(\t\x12\r\n\x05\x65xtra\x18\x0b \x01(\t\x12\r\n\x05idStr\x18\x0c \x01(\t\"\xb8\x01\n\x0b\x44\x61nmakuElem\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x10\n\x08progress\x18\x02 \x01(\x05\x12\x0c\n\x04mode\x18\x03 \x01(\x05\x12\x10\n\x08\x66ontsize\x18\x04 \x01(\x05\x12\r\n\x05\x63olor\x18\x05 \x01(\r\x12\x0f\n\x07midHash\x18\x06 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x07 \x01(\t\x12\r\n\x05\x63time\x18\x08 \x01(\x03\x12\x0e\n\x06\x61\x63tion\x18\t \x01(\t\x12\x0c\n\x04pool\x18\n \x01(\x05\x12\r\n\x05idStr\x18\x0b \x01(\t\"K\n\x0c\x44\x61nmukuEvent\x12;\n\x05\x65lems\x18\x01 \x01(\x0b\x32,.bilibili.broadcast.message.main.DanmakuElem\"m\n\tEventItem\x12\x0e\n\x06itemID\x18\x01 \x01(\x03\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0b\n\x03num\x18\x03 \x01(\x03\x12\x12\n\ndisplayNum\x18\x04 \x01(\t\x12\x0e\n\x06webKey\x18\x05 \x01(\t\x12\x11\n\tdimension\x18\x06 \x01(\x03\"&\n\x06RedDot\x12\x0c\n\x04type\x18\x01 \x01(\x05\x12\x0e\n\x06number\x18\x02 \x01(\x05\"\xda\x01\n\tTopOnline\x12\x0c\n\x04type\x18\x01 \x01(\x05\x12\x0c\n\x04icon\x18\x02 \x01(\t\x12\x0b\n\x03uri\x18\x03 \x01(\t\x12\x10\n\x08uniqueId\x18\x04 \x01(\t\x12\x39\n\x07\x61nimate\x18\x05 \x01(\x0b\x32(.bilibili.broadcast.message.main.Animate\x12\x37\n\x06redDot\x18\x06 \x01(\x0b\x32\'.bilibili.broadcast.message.main.RedDot\x12\x0c\n\x04name\x18\x07 \x01(\t\x12\x10\n\x08interval\x18\x08 \x01(\x03\x32\x65\n\nNativePage\x12W\n\x0bwatchNotify\x12\x16.google.protobuf.Empty\x1a\x30.bilibili.broadcast.message.main.NativePageEvent2d\n\x08Resource\x12X\n\x0btopActivity\x12\x16.google.protobuf.Empty\x1a\x31.bilibili.broadcast.message.main.TopActivityReplyb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
_NATIVEPAGEEVENT = _descriptor.Descriptor(
name='NativePageEvent',
full_name='bilibili.broadcast.message.main.NativePageEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='pageID', full_name='bilibili.broadcast.message.main.NativePageEvent.pageID', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='items', full_name='bilibili.broadcast.message.main.NativePageEvent.items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=103,
serialized_end=195,
)
_TOPACTIVITYREPLY = _descriptor.Descriptor(
name='TopActivityReply',
full_name='bilibili.broadcast.message.main.TopActivityReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='online', full_name='bilibili.broadcast.message.main.TopActivityReply.online', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hash', full_name='bilibili.broadcast.message.main.TopActivityReply.hash', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=197,
serialized_end=289,
)
_ANIMATE = _descriptor.Descriptor(
name='Animate',
full_name='bilibili.broadcast.message.main.Animate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='icon', full_name='bilibili.broadcast.message.main.Animate.icon', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='json', full_name='bilibili.broadcast.message.main.Animate.json', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='svg', full_name='bilibili.broadcast.message.main.Animate.svg', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='loop', full_name='bilibili.broadcast.message.main.Animate.loop', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=291,
serialized_end=355,
)
_COMMANDDM = _descriptor.Descriptor(
name='CommandDm',
full_name='bilibili.broadcast.message.main.CommandDm',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='bilibili.broadcast.message.main.CommandDm.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oid', full_name='bilibili.broadcast.message.main.CommandDm.oid', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mid', full_name='bilibili.broadcast.message.main.CommandDm.mid', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='bilibili.broadcast.message.main.CommandDm.type', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='command', full_name='bilibili.broadcast.message.main.CommandDm.command', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content', full_name='bilibili.broadcast.message.main.CommandDm.content', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='bilibili.broadcast.message.main.CommandDm.state', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='progress', full_name='bilibili.broadcast.message.main.CommandDm.progress', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ctime', full_name='bilibili.broadcast.message.main.CommandDm.ctime', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mtime', full_name='bilibili.broadcast.message.main.CommandDm.mtime', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='extra', full_name='bilibili.broadcast.message.main.CommandDm.extra', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='idStr', full_name='bilibili.broadcast.message.main.CommandDm.idStr', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=358,
serialized_end=548,
)
_DANMAKUELEM = _descriptor.Descriptor(
name='DanmakuElem',
full_name='bilibili.broadcast.message.main.DanmakuElem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='bilibili.broadcast.message.main.DanmakuElem.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='progress', full_name='bilibili.broadcast.message.main.DanmakuElem.progress', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mode', full_name='bilibili.broadcast.message.main.DanmakuElem.mode', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fontsize', full_name='bilibili.broadcast.message.main.DanmakuElem.fontsize', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='color', full_name='bilibili.broadcast.message.main.DanmakuElem.color', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='midHash', full_name='bilibili.broadcast.message.main.DanmakuElem.midHash', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content', full_name='bilibili.broadcast.message.main.DanmakuElem.content', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ctime', full_name='bilibili.broadcast.message.main.DanmakuElem.ctime', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action', full_name='bilibili.broadcast.message.main.DanmakuElem.action', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pool', full_name='bilibili.broadcast.message.main.DanmakuElem.pool', index=9,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='idStr', full_name='bilibili.broadcast.message.main.DanmakuElem.idStr', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=551,
serialized_end=735,
)
_DANMUKUEVENT = _descriptor.Descriptor(
name='DanmukuEvent',
full_name='bilibili.broadcast.message.main.DanmukuEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='elems', full_name='bilibili.broadcast.message.main.DanmukuEvent.elems', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=737,
serialized_end=812,
)
_EVENTITEM = _descriptor.Descriptor(
name='EventItem',
full_name='bilibili.broadcast.message.main.EventItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='itemID', full_name='bilibili.broadcast.message.main.EventItem.itemID', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='bilibili.broadcast.message.main.EventItem.type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='num', full_name='bilibili.broadcast.message.main.EventItem.num', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='displayNum', full_name='bilibili.broadcast.message.main.EventItem.displayNum', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='webKey', full_name='bilibili.broadcast.message.main.EventItem.webKey', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dimension', full_name='bilibili.broadcast.message.main.EventItem.dimension', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=814,
serialized_end=923,
)
_REDDOT = _descriptor.Descriptor(
name='RedDot',
full_name='bilibili.broadcast.message.main.RedDot',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='bilibili.broadcast.message.main.RedDot.type', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='number', full_name='bilibili.broadcast.message.main.RedDot.number', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=925,
serialized_end=963,
)
_TOPONLINE = _descriptor.Descriptor(
name='TopOnline',
full_name='bilibili.broadcast.message.main.TopOnline',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='bilibili.broadcast.message.main.TopOnline.type', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='icon', full_name='bilibili.broadcast.message.main.TopOnline.icon', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='uri', full_name='bilibili.broadcast.message.main.TopOnline.uri', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='uniqueId', full_name='bilibili.broadcast.message.main.TopOnline.uniqueId', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='animate', full_name='bilibili.broadcast.message.main.TopOnline.animate', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='redDot', full_name='bilibili.broadcast.message.main.TopOnline.redDot', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='bilibili.broadcast.message.main.TopOnline.name', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='interval', full_name='bilibili.broadcast.message.main.TopOnline.interval', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=966,
serialized_end=1184,
)
_NATIVEPAGEEVENT.fields_by_name['items'].message_type = _EVENTITEM
_TOPACTIVITYREPLY.fields_by_name['online'].message_type = _TOPONLINE
_DANMUKUEVENT.fields_by_name['elems'].message_type = _DANMAKUELEM
_TOPONLINE.fields_by_name['animate'].message_type = _ANIMATE
_TOPONLINE.fields_by_name['redDot'].message_type = _REDDOT
DESCRIPTOR.message_types_by_name['NativePageEvent'] = _NATIVEPAGEEVENT
DESCRIPTOR.message_types_by_name['TopActivityReply'] = _TOPACTIVITYREPLY
DESCRIPTOR.message_types_by_name['Animate'] = _ANIMATE
DESCRIPTOR.message_types_by_name['CommandDm'] = _COMMANDDM
DESCRIPTOR.message_types_by_name['DanmakuElem'] = _DANMAKUELEM
DESCRIPTOR.message_types_by_name['DanmukuEvent'] = _DANMUKUEVENT
DESCRIPTOR.message_types_by_name['EventItem'] = _EVENTITEM
DESCRIPTOR.message_types_by_name['RedDot'] = _REDDOT
DESCRIPTOR.message_types_by_name['TopOnline'] = _TOPONLINE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
NativePageEvent = _reflection.GeneratedProtocolMessageType('NativePageEvent', (_message.Message,), {
'DESCRIPTOR' : _NATIVEPAGEEVENT,
'__module__' : 'bilibili.broadcast.message.main_pb2'
# @@protoc_insertion_point(class_scope:bilibili.broadcast.message.main.NativePageEvent)
})
_sym_db.RegisterMessage(NativePageEvent)
TopActivityReply = _reflection.GeneratedProtocolMessageType('TopActivityReply', (_message.Message,), {
'DESCRIPTOR' : _TOPACTIVITYREPLY,
'__module__' : 'bilibili.broadcast.message.main_pb2'
# @@protoc_insertion_point(class_scope:bilibili.broadcast.message.main.TopActivityReply)
})
_sym_db.RegisterMessage(TopActivityReply)
Animate = _reflection.GeneratedProtocolMessageType('Animate', (_message.Message,), {
'DESCRIPTOR' : _ANIMATE,
'__module__' : 'bilibili.broadcast.message.main_pb2'
# @@protoc_insertion_point(class_scope:bilibili.broadcast.message.main.Animate)
})
_sym_db.RegisterMessage(Animate)
CommandDm = _reflection.GeneratedProtocolMessageType('CommandDm', (_message.Message,), {
'DESCRIPTOR' : _COMMANDDM,
'__module__' : 'bilibili.broadcast.message.main_pb2'
# @@protoc_insertion_point(class_scope:bilibili.broadcast.message.main.CommandDm)
})
_sym_db.RegisterMessage(CommandDm)
DanmakuElem = _reflection.GeneratedProtocolMessageType('DanmakuElem', (_message.Message,), {
'DESCRIPTOR' : _DANMAKUELEM,
'__module__' : 'bilibili.broadcast.message.main_pb2'
# @@protoc_insertion_point(class_scope:bilibili.broadcast.message.main.DanmakuElem)
})
_sym_db.RegisterMessage(DanmakuElem)
DanmukuEvent = _reflection.GeneratedProtocolMessageType('DanmukuEvent', (_message.Message,), {
'DESCRIPTOR' : _DANMUKUEVENT,
'__module__' : 'bilibili.broadcast.message.main_pb2'
# @@protoc_insertion_point(class_scope:bilibili.broadcast.message.main.DanmukuEvent)
})
_sym_db.RegisterMessage(DanmukuEvent)
EventItem = _reflection.GeneratedProtocolMessageType('EventItem', (_message.Message,), {
'DESCRIPTOR' : _EVENTITEM,
'__module__' : 'bilibili.broadcast.message.main_pb2'
# @@protoc_insertion_point(class_scope:bilibili.broadcast.message.main.EventItem)
})
_sym_db.RegisterMessage(EventItem)
RedDot = _reflection.GeneratedProtocolMessageType('RedDot', (_message.Message,), {
'DESCRIPTOR' : _REDDOT,
'__module__' : 'bilibili.broadcast.message.main_pb2'
# @@protoc_insertion_point(class_scope:bilibili.broadcast.message.main.RedDot)
})
_sym_db.RegisterMessage(RedDot)
TopOnline = _reflection.GeneratedProtocolMessageType('TopOnline', (_message.Message,), {
'DESCRIPTOR' : _TOPONLINE,
'__module__' : 'bilibili.broadcast.message.main_pb2'
# @@protoc_insertion_point(class_scope:bilibili.broadcast.message.main.TopOnline)
})
_sym_db.RegisterMessage(TopOnline)
_NATIVEPAGE = _descriptor.ServiceDescriptor(
name='NativePage',
full_name='bilibili.broadcast.message.main.NativePage',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=1186,
serialized_end=1287,
methods=[
_descriptor.MethodDescriptor(
name='watchNotify',
full_name='bilibili.broadcast.message.main.NativePage.watchNotify',
index=0,
containing_service=None,
input_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
output_type=_NATIVEPAGEEVENT,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_NATIVEPAGE)
DESCRIPTOR.services_by_name['NativePage'] = _NATIVEPAGE
_RESOURCE = _descriptor.ServiceDescriptor(
name='Resource',
full_name='bilibili.broadcast.message.main.Resource',
file=DESCRIPTOR,
index=1,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=1289,
serialized_end=1389,
methods=[
_descriptor.MethodDescriptor(
name='topActivity',
full_name='bilibili.broadcast.message.main.Resource.topActivity',
index=0,
containing_service=None,
input_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
output_type=_TOPACTIVITYREPLY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_RESOURCE)
DESCRIPTOR.services_by_name['Resource'] = _RESOURCE
# @@protoc_insertion_point(module_scope)
| 46.28769 | 2,585 | 0.753332 | 4,325 | 33,466 | 5.540578 | 0.061734 | 0.051079 | 0.080082 | 0.105162 | 0.817385 | 0.77699 | 0.765221 | 0.668447 | 0.658849 | 0.650753 | 0 | 0.035902 | 0.120271 | 33,466 | 722 | 2,586 | 46.351801 | 0.778031 | 0.028775 | 0 | 0.712349 | 1 | 0.001506 | 0.197389 | 0.165774 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.00753 | 0 | 0.00753 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
383fc03b20e1861cb5172f40fd1ccd9bce8c1c4c | 4,448 | py | Python | 2021/src/day_23_test.py | asmundg/adventofcode | adc0c9c8ba1d0ef04b621f6f8a5237ee34b9a230 | [
"MIT"
] | null | null | null | 2021/src/day_23_test.py | asmundg/adventofcode | adc0c9c8ba1d0ef04b621f6f8a5237ee34b9a230 | [
"MIT"
] | null | null | null | 2021/src/day_23_test.py | asmundg/adventofcode | adc0c9c8ba1d0ef04b621f6f8a5237ee34b9a230 | [
"MIT"
] | null | null | null | from day_23 import available_moves
def test_available_moves_0():
assert sorted(
available_moves(
(6, 1),
{
(2, 1): "B",
(2, 2): "A",
(4, 1): "C",
(4, 2): "D",
(6, 1): "B",
(6, 2): "C",
(8, 1): "D",
(8, 2): "A",
},
)
) == [
(0, 0),
(1, 0),
(3, 0),
(5, 0),
(7, 0),
(9, 0),
(10, 0),
]
def test_available_moves_1():
assert (
available_moves(
(4, 1),
{
(2, 1): "B",
(2, 2): "A",
(4, 1): "C",
(4, 2): "D",
(3, 0): "B",
(6, 2): "C",
(8, 1): "D",
(8, 2): "A",
},
)
== [(6, 1)]
)
def test_available_moves_2():
assert (
available_moves(
(4, 2),
{
(2, 1): "B",
(2, 2): "A",
(6, 1): "C",
(4, 2): "D",
(3, 0): "B",
(6, 2): "C",
(8, 1): "D",
(8, 2): "A",
},
)
== [(5, 0), (7, 0), (9, 0), (10, 0)]
)
def test_available_moves_3():
assert (
available_moves(
(3, 0),
{
(2, 1): "B",
(2, 2): "A",
(6, 1): "C",
(5, 1): "D",
(3, 0): "B",
(6, 2): "C",
(8, 1): "D",
(8, 2): "A",
},
)
== [(4, 2)]
)
def test_available_moves_4():
assert (
available_moves(
(2, 1),
{
(2, 1): "B",
(2, 2): "A",
(4, 2): "B",
(5, 0): "D",
(6, 1): "C",
(6, 2): "C",
(8, 1): "D",
(8, 2): "A",
},
)
== [(4, 1)]
)
def test_available_moves_5():
assert (
available_moves(
(8, 1),
{
(2, 2): "A",
(4, 1): "B",
(4, 2): "B",
(5, 0): "D",
(6, 1): "C",
(6, 2): "C",
(8, 1): "D",
(8, 2): "A",
},
)
== [(7, 0), (9, 0), (10, 0)]
)
def test_available_moves_6():
assert (
available_moves(
(8, 2),
{
(2, 2): "A",
(4, 1): "B",
(4, 2): "B",
(5, 0): "D",
(6, 1): "C",
(6, 2): "C",
(7, 0): "D",
(8, 2): "A",
},
)
== [(9, 0), (10, 0)]
)
def test_available_moves_7():
assert (
available_moves(
(7, 0),
{
(2, 2): "A",
(4, 1): "B",
(4, 2): "B",
(5, 0): "D",
(6, 1): "C",
(6, 2): "C",
(7, 0): "D",
(9, 1): "A",
},
)
== [(8, 2)]
)
def test_available_moves_8():
assert (
available_moves(
(5, 0),
{
(2, 2): "A",
(4, 1): "B",
(4, 2): "B",
(5, 0): "D",
(6, 1): "C",
(6, 2): "C",
(8, 2): "D",
(9, 0): "A",
},
)
== [(8, 1)]
)
def test_available_moves_9():
assert (
available_moves(
(9, 0),
{
(2, 2): "A",
(4, 1): "B",
(4, 2): "B",
(6, 1): "C",
(6, 2): "C",
(8, 1): "D",
(8, 2): "D",
(9, 0): "A",
},
)
== [(2, 1)]
)
def test_available_moves_10():
assert (
available_moves(
(9, 0),
{
(2, 2): "B",
(4, 1): "A",
(4, 2): "B",
(6, 1): "C",
(6, 2): "C",
(8, 1): "D",
(8, 2): "D",
(9, 0): "A",
},
)
== []
)
| 20.40367 | 44 | 0.197392 | 425 | 4,448 | 1.957647 | 0.058824 | 0.387019 | 0.211538 | 0.277644 | 0.657452 | 0.498798 | 0.492788 | 0.438702 | 0.399038 | 0.379808 | 0 | 0.14681 | 0.601844 | 4,448 | 217 | 45 | 20.497696 | 0.322981 | 0 | 0 | 0.579487 | 0 | 0 | 0.019784 | 0 | 0 | 0 | 0 | 0 | 0.05641 | 1 | 0.05641 | true | 0 | 0.005128 | 0 | 0.061538 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
697d23aadae7d54d875670d0f5b76d364fce92fa | 178 | py | Python | pytouch/utils/__init__.py | Pandinosaurus/PyTouch | 3a52bc004bebffe8da75294be53f193062d6577f | [
"MIT"
] | null | null | null | pytouch/utils/__init__.py | Pandinosaurus/PyTouch | 3a52bc004bebffe8da75294be53f193062d6577f | [
"MIT"
] | null | null | null | pytouch/utils/__init__.py | Pandinosaurus/PyTouch | 3a52bc004bebffe8da75294be53f193062d6577f | [
"MIT"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from .model_utils import *
from .train_utils import *
from .transforms import *
from .vis_utils import *
| 25.428571 | 71 | 0.758427 | 25 | 178 | 5.28 | 0.68 | 0.25 | 0.227273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.157303 | 178 | 6 | 72 | 29.666667 | 0.88 | 0.38764 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
387485bceb3c478ff0dde7502d9a9ccb71d1455f | 44 | py | Python | parameters_443.py | BetinRibeiro/web2py_crediario | d7b0aef4579870922c6d87b4b0322b427b2bef98 | [
"BSD-3-Clause"
] | 2 | 2019-10-18T23:04:22.000Z | 2019-10-24T04:03:10.000Z | parameters_443.py | BetinRibeiro/web2py_crediario | d7b0aef4579870922c6d87b4b0322b427b2bef98 | [
"BSD-3-Clause"
] | null | null | null | parameters_443.py | BetinRibeiro/web2py_crediario | d7b0aef4579870922c6d87b4b0322b427b2bef98 | [
"BSD-3-Clause"
] | null | null | null | password="8efefb2c6c7beac3c155ebbfaf35d5b0"
| 22 | 43 | 0.909091 | 2 | 44 | 20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.27907 | 0.022727 | 44 | 1 | 44 | 44 | 0.651163 | 0 | 0 | 0 | 0 | 0 | 0.727273 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
3892c6d0fb83619c4698bd78707e4daa40e8d8ae | 3,277 | py | Python | 208.py | wilbertgeng/LeetCode_exercise | f00c08e0d28ffa88d61d4262c6d1f49f1fa91ebc | [
"MIT"
] | null | null | null | 208.py | wilbertgeng/LeetCode_exercise | f00c08e0d28ffa88d61d4262c6d1f49f1fa91ebc | [
"MIT"
] | null | null | null | 208.py | wilbertgeng/LeetCode_exercise | f00c08e0d28ffa88d61d4262c6d1f49f1fa91ebc | [
"MIT"
] | null | null | null | """208. Implement Trie (Prefix Tree)"""
###### Practice:
class TrieNode:
def __init__(self):
self.children = collections.defaultdict(TrieNode)
self.is_word = False
class Trie(object):
def __init__(self):
self.root = TrieNode()
def insert(self, word):
current = self.root
for letter in word:
current = current.children[letter]
current.is_word = True
def search(self, word):
current = self.root
for letter in word:
current = current.children.get(letter)
if not current:
return False
return current.is_word
def startsWith(self, prefix):
current = self.root
for letter in prefix:
current = current.children.get(letter)
if not current:
return False
return True
#########
class TrieNode:
def __init__(self):
self.children = collections.defaultdict(TrieNode)
self.is_word = False
##### R2:
class Trie(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.root = TrieNode()
def insert(self, word):
"""
Inserts a word into the trie.
:type word: str
:rtype: None
"""
current = self.root
for letter in word:
current = current.children[letter]
current.is_word = True
def search(self, word):
"""
Returns if the word is in the trie.
:type word: str
:rtype: bool
"""
current= self.root
for letter in word:
current = current.chirldren.get(letter)
if current is None:
return False
return current.is_word
def startsWith(self, prefix):
"""
Returns if there is any word in the trie that starts with the given prefix.
:type prefix: str
:rtype: bool
"""
current= self.root
for letter in prefix:
current = current.chirldren.get(letter)
if current is None:
return False
return True
##### R1:
class Trie(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.root = TrieNode()
def insert(self, word):
"""
Inserts a word into the trie.
:type word: str
:rtype: None
"""
current = self.root
for letter in word:
current = current.children[letter]
current.is_word = True
def search(self, word):
"""
Returns if the word is in the trie.
:type word: str
:rtype: bool
"""
current = self.root
for letter in word:
current = current.children.get(letter)
if current is None:
return False
return current.is_word
def startsWith(self, prefix):
"""
Returns if there is any word in the trie that starts with the given prefix.
:type prefix: str
:rtype: bool
"""
current = self.root
for letter in prefix:
current = current.children.get(letter)
if current is None:
return False
return True
| 23.57554 | 83 | 0.539213 | 366 | 3,277 | 4.751366 | 0.150273 | 0.055204 | 0.077631 | 0.093157 | 0.975848 | 0.975848 | 0.960897 | 0.946521 | 0.946521 | 0.946521 | 0 | 0.002432 | 0.372597 | 3,277 | 138 | 84 | 23.746377 | 0.843385 | 0.179432 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.189189 | false | 0 | 0 | 0 | 0.418919 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
38a890267094b3a69307e3c30542ea1a60b5ab37 | 1,863 | py | Python | pyserver/testproject2/pgapp1/migrations/0001_initial.py | kappakkaala/mypyscripts | 263a07f8406bf74a6bbd2e597b60a7d2e8b7935d | [
"MIT"
] | null | null | null | pyserver/testproject2/pgapp1/migrations/0001_initial.py | kappakkaala/mypyscripts | 263a07f8406bf74a6bbd2e597b60a7d2e8b7935d | [
"MIT"
] | null | null | null | pyserver/testproject2/pgapp1/migrations/0001_initial.py | kappakkaala/mypyscripts | 263a07f8406bf74a6bbd2e597b60a7d2e8b7935d | [
"MIT"
] | null | null | null | # Generated by Django 4.0.4 on 2022-05-05 13:03
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Available',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('buildings', models.CharField(max_length=10)),
('rooms', models.CharField(max_length=10)),
('availability', models.DateField(blank=True, default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Booked',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('buildings', models.CharField(max_length=10)),
('rooms', models.CharField(max_length=10)),
('availability', models.DateField(blank=True, default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('buildings', models.CharField(max_length=10)),
],
),
migrations.CreateModel(
name='Rooms',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('buildings', models.CharField(max_length=10)),
('rooms', models.CharField(max_length=10)),
('availability', models.DateField(blank=True, default=django.utils.timezone.now)),
],
),
]
| 37.26 | 117 | 0.567901 | 178 | 1,863 | 5.837079 | 0.275281 | 0.101059 | 0.12127 | 0.161694 | 0.775746 | 0.775746 | 0.775746 | 0.775746 | 0.775746 | 0.775746 | 0 | 0.02207 | 0.294686 | 1,863 | 49 | 118 | 38.020408 | 0.768645 | 0.024155 | 0 | 0.714286 | 1 | 0 | 0.069934 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.047619 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
38d905390d881d14d11467dd497cbc8c28ccf3f1 | 43,220 | py | Python | FPSTest/onChip/fonts.py | AndyZ-Salz/BadApple_QuecPython | cc9e2bce7dcebf02bad428a651a34b142215033e | [
"MIT"
] | null | null | null | FPSTest/onChip/fonts.py | AndyZ-Salz/BadApple_QuecPython | cc9e2bce7dcebf02bad428a651a34b142215033e | [
"MIT"
] | null | null | null | FPSTest/onChip/fonts.py | AndyZ-Salz/BadApple_QuecPython | cc9e2bce7dcebf02bad428a651a34b142215033e | [
"MIT"
] | null | null | null |
'''
16 x 16 汉字字库
宋体、阴码,逐行式,顺向(高位在前)
'''
hanzi_16x16_dict = {
'移' : (0x08, 0x20, 0x1C, 0x20, 0xF0, 0x7C, 0x10, 0x84, 0x11, 0x48, 0xFC, 0x30, 0x10, 0x20, 0x30, 0x48,
0x39, 0x90, 0x54, 0x3E, 0x54, 0x42, 0x91, 0xA4, 0x10, 0x18, 0x10, 0x10, 0x10, 0x60, 0x11, 0x80),
'远' : (0x00, 0x00, 0x23, 0xF8, 0x10, 0x00, 0x10, 0x00, 0x00, 0x00, 0x07, 0xFC, 0xF1, 0x20, 0x11, 0x20,
0x11, 0x20, 0x11, 0x20, 0x11, 0x24, 0x12, 0x24, 0x12, 0x24, 0x14, 0x1C, 0x28, 0x00, 0x47, 0xFE),
'通' : (0x00, 0x00, 0x47, 0xF8, 0x20, 0x10, 0x21, 0xA0, 0x00, 0x40, 0x07, 0xFC, 0xE4, 0x44, 0x24, 0x44,
0x27, 0xFC, 0x24, 0x44, 0x24, 0x44, 0x27, 0xFC, 0x24, 0x44, 0x24, 0x54, 0x54, 0x08, 0x8F, 0xFE),
'信' : (0x08, 0x40, 0x08, 0x20, 0x0B, 0xFE, 0x10, 0x00, 0x10, 0x00, 0x31, 0xFC, 0x30, 0x00, 0x50, 0x00,
0x91, 0xFC, 0x10, 0x00, 0x10, 0x00, 0x11, 0xFC, 0x11, 0x04, 0x11, 0x04, 0x11, 0xFC, 0x11, 0x04),
}
'''
16 x 24 汉字字库
宋体、阴码,逐行式,顺向(高位在前)
'''
hanzi_16x24_dict = {
'移' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x0E, 0x60, 0x38, 0x44, 0x08, 0x7C, 0x08, 0xCC, 0x09, 0x68,
0x0A, 0x30, 0x7C, 0x10, 0x08, 0x20, 0x18, 0x50, 0x1C, 0xB0, 0x1B, 0x22, 0x2A, 0x5E, 0x28, 0x44,
0x28, 0xC4, 0x49, 0x28, 0x08, 0x28, 0x08, 0x10, 0x08, 0x20, 0x08, 0xC0, 0x0F, 0x00, 0x00, 0x00,),
'远' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x08, 0x13, 0xF8, 0x10, 0x00, 0x10, 0x00, 0x00, 0x04,
0x03, 0x5C, 0x00, 0xA0, 0x70, 0xA0, 0x11, 0x20, 0x11, 0x20, 0x11, 0x20, 0x11, 0x20, 0x11, 0x24,
0x12, 0x24, 0x12, 0x36, 0x14, 0x1C, 0x68, 0x00, 0x46, 0x00, 0x43, 0xFC, 0x00, 0x00, 0x00, 0x00,),
'通' : (0x00, 0x00, 0x00, 0x00, 0x01, 0xF8, 0x20, 0x08, 0x10, 0x90, 0x10, 0x60, 0x00, 0x44, 0x03, 0xFC,
0x02, 0x44, 0x12, 0x44, 0x73, 0xFC, 0x12, 0x44, 0x12, 0x44, 0x12, 0x44, 0x13, 0xFC, 0x12, 0x44,
0x12, 0x44, 0x12, 0x44, 0x12, 0x4C, 0x28, 0x00, 0x46, 0x00, 0x43, 0xFE, 0x00, 0x00, 0x00, 0x00,),
'信' : (0x00, 0x00, 0x08, 0x00, 0x0C, 0x40, 0x08, 0x40, 0x08, 0x20, 0x17, 0xFE, 0x10, 0x00, 0x10, 0x00,
0x10, 0x08, 0x31, 0xF0, 0x30, 0x00, 0x50, 0x00, 0x53, 0xFC, 0x10, 0x00, 0x10, 0x00, 0x10, 0xFC,
0x11, 0x04, 0x11, 0x04, 0x11, 0x04, 0x11, 0x04, 0x11, 0xF8, 0x11, 0x04, 0x10, 0x00, 0x00, 0x00,),
}
'''
24 x 24 汉字字库
宋体、阴码,逐行式,顺向(高位在前)
'''
hanzi_24x24_dict = {
'移' : (0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x01,0xC3,0x00,0x1F,0x02,0x00,0x02,
0x07,0xF8,0x02,0x04,0x18,0x02,0x0A,0x30,0x02,0x53,0x60,0x7F,0xE1,0xC0,0x06,0x01,
0x80,0x06,0x03,0xC0,0x07,0x0D,0x80,0x0E,0xF3,0xFC,0x0A,0x46,0x0C,0x12,0x4C,0x08,
0x12,0x12,0x10,0x22,0x21,0x30,0x42,0x01,0x60,0x02,0x00,0x80,0x02,0x03,0x00,0x02,
0x1C,0x00,0x02,0xE0,0x00,0x00,0x00,0x00,),
'远' : (0x00,0x00,0x00,0x00,0x00,0x00,0x10,0x00,0x20,0x08,0x7F,0xF0,0x0C,0x00,0x00,0x04,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x08,0x01,0xFF,0xFC,0x00,0x18,0x80,0x7E,0x18,
0x80,0x04,0x18,0x80,0x04,0x10,0x80,0x04,0x10,0x80,0x04,0x10,0x88,0x04,0x20,0x88,
0x04,0x60,0x8C,0x04,0xC0,0xFC,0x1B,0x00,0x00,0x31,0x00,0x00,0x60,0xFF,0xFE,0x00,
0x1F,0xF8,0x00,0x00,0x00,0x00,0x00,0x00,),
'通' : (0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x7F,0xE0,0x10,0x00,0x30,0x08,0x08,0xC0,0x0C,
0x07,0x00,0x0C,0x03,0x00,0x00,0x7F,0xF8,0x00,0x42,0x08,0x00,0x42,0x08,0x7C,0x7F,
0xF8,0x04,0x42,0x08,0x04,0x42,0x08,0x04,0x42,0x08,0x04,0x7F,0xF8,0x04,0x42,0x08,
0x04,0x42,0x08,0x04,0x42,0x08,0x04,0x42,0x18,0x1A,0x42,0x38,0x31,0x80,0x10,0x60,
0xFF,0xFC,0x00,0x1F,0xF8,0x00,0x00,0x00,),
'信' : (0x00,0x00,0x00,0x00,0x04,0x00,0x03,0x02,0x00,0x02,0x03,0x00,0x02,0x03,0x08,0x04,
0xFF,0xFC,0x04,0x00,0x00,0x08,0x00,0x10,0x0C,0x7F,0xF8,0x14,0x00,0x00,0x14,0x00,
0x10,0x24,0x7F,0xF8,0x44,0x00,0x00,0x04,0x00,0x00,0x04,0x00,0x00,0x04,0x3F,0xF8,
0x04,0x20,0x10,0x04,0x20,0x10,0x04,0x20,0x10,0x04,0x20,0x10,0x04,0x3F,0xF0,0x04,
0x20,0x10,0x04,0x20,0x10,0x00,0x00,0x00,),
}
'''
常用ASCII字符集
字宽:8 【实际取模时,字宽设置为16,对应英文字宽则为8】
字高:16
宋体、阴码,逐行式,顺向(高位在前)
'''
ascii_8x16_dict = {
' ' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,),
'!' : (0x00, 0x00, 0x00, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x00, 0x00, 0x10, 0x10, 0x00, 0x00,),
'"' : (0x00, 0x12, 0x24, 0x24, 0x48, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,),
"#" : (0x00, 0x00, 0x00, 0x12, 0x12, 0x12, 0x7E, 0x24, 0x24, 0x24, 0x7E, 0x24, 0x24, 0x24, 0x00, 0x00,),
'$' : (0x00, 0x00, 0x08, 0x3C, 0x4A, 0x4A, 0x48, 0x38, 0x0C, 0x0A, 0x0A, 0x4A, 0x4A, 0x3C, 0x08, 0x08,),
'%' : (0x00, 0x00, 0x00, 0x44, 0xA4, 0xA8, 0xA8, 0xB0, 0x54, 0x1A, 0x2A, 0x2A, 0x4A, 0x44, 0x00, 0x00,),
'&' : (0x00, 0x00, 0x00, 0x30, 0x48, 0x48, 0x48, 0x50, 0x6E, 0xA4, 0x94, 0x98, 0x89, 0x76, 0x00, 0x00,),
"'" : (0x00, 0x60, 0x20, 0x20, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,),
'(' : (0x00, 0x02, 0x04, 0x08, 0x08, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x08, 0x08, 0x04, 0x02, 0x00,),
')' : (0x00, 0x40, 0x20, 0x10, 0x10, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x10, 0x10, 0x20, 0x40, 0x00,),
'*' : (0x00, 0x00, 0x00, 0x00, 0x10, 0x10, 0xD6, 0x38, 0x38, 0xD6, 0x10, 0x10, 0x00, 0x00, 0x00, 0x00,),
'+' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x08, 0x08, 0x7F, 0x08, 0x08, 0x08, 0x00, 0x00, 0x00, 0x00,),
',' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x20, 0x20, 0x40,),
'-' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,),
'.' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x60, 0x00, 0x00,),
'/' : (0x00, 0x00, 0x02, 0x04, 0x04, 0x04, 0x08, 0x08, 0x10, 0x10, 0x10, 0x20, 0x20, 0x40, 0x40, 0x00,),
'0' : (0x00, 0x00, 0x00, 0x18, 0x24, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x24, 0x18, 0x00, 0x00,),
'1' : (0x00, 0x00, 0x00, 0x08, 0x38, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x3E, 0x00, 0x00,),
'2' : (0x00, 0x00, 0x00, 0x3C, 0x42, 0x42, 0x42, 0x02, 0x04, 0x08, 0x10, 0x20, 0x42, 0x7E, 0x00, 0x00,),
'3' : (0x00, 0x00, 0x00, 0x3C, 0x42, 0x42, 0x02, 0x04, 0x18, 0x04, 0x02, 0x42, 0x42, 0x3C, 0x00, 0x00,),
'4' : (0x00, 0x00, 0x00, 0x04, 0x0C, 0x0C, 0x14, 0x24, 0x24, 0x44, 0x7F, 0x04, 0x04, 0x1F, 0x00, 0x00,),
'5' : (0x00, 0x00, 0x00, 0x7E, 0x40, 0x40, 0x40, 0x78, 0x44, 0x02, 0x02, 0x42, 0x44, 0x38, 0x00, 0x00,),
'6' : (0x00, 0x00, 0x00, 0x18, 0x24, 0x40, 0x40, 0x5C, 0x62, 0x42, 0x42, 0x42, 0x22, 0x1C, 0x00, 0x00,),
'7' : (0x00, 0x00, 0x00, 0x7E, 0x42, 0x04, 0x04, 0x08, 0x08, 0x10, 0x10, 0x10, 0x10, 0x10, 0x00, 0x00,),
'8' : (0x00, 0x00, 0x00, 0x3C, 0x42, 0x42, 0x42, 0x24, 0x18, 0x24, 0x42, 0x42, 0x42, 0x3C, 0x00, 0x00,),
'9' : (0x00, 0x00, 0x00, 0x38, 0x44, 0x42, 0x42, 0x42, 0x46, 0x3A, 0x02, 0x02, 0x24, 0x18, 0x00, 0x00,),
':' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x18, 0x00, 0x00, 0x00, 0x00, 0x18, 0x18, 0x00, 0x00,),
';' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x10, 0x10,),
'<' : (0x00, 0x00, 0x00, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x20, 0x10, 0x08, 0x04, 0x02, 0x00, 0x00,),
'=' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7E, 0x00, 0x00, 0x7E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,),
'>' : (0x00, 0x00, 0x00, 0x40, 0x20, 0x10, 0x08, 0x04, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x00, 0x00,),
'?' : (0x00, 0x00, 0x00, 0x3C, 0x42, 0x42, 0x62, 0x04, 0x08, 0x08, 0x08, 0x00, 0x18, 0x18, 0x00, 0x00,),
'@' : (0x00, 0x00, 0x00, 0x38, 0x44, 0x5A, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0x5C, 0x42, 0x3C, 0x00, 0x00,),
'A' : (0x00, 0x00, 0x00, 0x10, 0x10, 0x18, 0x28, 0x28, 0x24, 0x3C, 0x44, 0x42, 0x42, 0xE7, 0x00, 0x00,),
'B' : (0x00, 0x00, 0x00, 0xF8, 0x44, 0x44, 0x44, 0x78, 0x44, 0x42, 0x42, 0x42, 0x44, 0xF8, 0x00, 0x00,),
'C' : (0x00, 0x00, 0x00, 0x3E, 0x42, 0x42, 0x80, 0x80, 0x80, 0x80, 0x80, 0x42, 0x44, 0x38, 0x00, 0x00,),
'D' : (0x00, 0x00, 0x00, 0xF8, 0x44, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x44, 0xF8, 0x00, 0x00,),
'E' : (0x00, 0x00, 0x00, 0xFC, 0x42, 0x48, 0x48, 0x78, 0x48, 0x48, 0x40, 0x42, 0x42, 0xFC, 0x00, 0x00,),
'F' : (0x00, 0x00, 0x00, 0xFC, 0x42, 0x48, 0x48, 0x78, 0x48, 0x48, 0x40, 0x40, 0x40, 0xE0, 0x00, 0x00,),
'G' : (0x00, 0x00, 0x00, 0x3C, 0x44, 0x44, 0x80, 0x80, 0x80, 0x8E, 0x84, 0x44, 0x44, 0x38, 0x00, 0x00,),
'H' : (0x00, 0x00, 0x00, 0xE7, 0x42, 0x42, 0x42, 0x42, 0x7E, 0x42, 0x42, 0x42, 0x42, 0xE7, 0x00, 0x00,),
'I' : (0x00, 0x00, 0x00, 0x7C, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x7C, 0x00, 0x00,),
'J' : (0x00, 0x00, 0x00, 0x3E, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x88, 0xF0,),
'K' : (0x00, 0x00, 0x00, 0xEE, 0x44, 0x48, 0x50, 0x70, 0x50, 0x48, 0x48, 0x44, 0x44, 0xEE, 0x00, 0x00,),
'L' : (0x00, 0x00, 0x00, 0xE0, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x42, 0xFE, 0x00, 0x00,),
'M' : (0x00, 0x00, 0x00, 0xEE, 0x6C, 0x6C, 0x6C, 0x6C, 0x6C, 0x54, 0x54, 0x54, 0x54, 0xD6, 0x00, 0x00,),
'N' : (0x00, 0x00, 0x00, 0xC7, 0x62, 0x62, 0x52, 0x52, 0x4A, 0x4A, 0x4A, 0x46, 0x46, 0xE2, 0x00, 0x00,),
'O' : (0x00, 0x00, 0x00, 0x38, 0x44, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x44, 0x38, 0x00, 0x00,),
'P' : (0x00, 0x00, 0x00, 0xFC, 0x42, 0x42, 0x42, 0x42, 0x7C, 0x40, 0x40, 0x40, 0x40, 0xE0, 0x00, 0x00,),
'Q' : (0x00, 0x00, 0x00, 0x38, 0x44, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0xB2, 0x4C, 0x38, 0x06, 0x00,),
'R' : (0x00, 0x00, 0x00, 0xFC, 0x42, 0x42, 0x42, 0x7C, 0x48, 0x48, 0x44, 0x44, 0x42, 0xE3, 0x00, 0x00,),
'S' : (0x00, 0x00, 0x00, 0x3E, 0x42, 0x42, 0x40, 0x20, 0x18, 0x04, 0x02, 0x42, 0x42, 0x7C, 0x00, 0x00,),
'T' : (0x00, 0x00, 0x00, 0xFE, 0x92, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x38, 0x00, 0x00,),
'U' : (0x00, 0x00, 0x00, 0xE7, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x3C, 0x00, 0x00,),
'V' : (0x00, 0x00, 0x00, 0xE7, 0x42, 0x42, 0x44, 0x24, 0x24, 0x28, 0x28, 0x18, 0x10, 0x10, 0x00, 0x00,),
'W' : (0x00, 0x00, 0x00, 0xD6, 0x54, 0x54, 0x54, 0x54, 0x54, 0x6C, 0x28, 0x28, 0x28, 0x28, 0x00, 0x00,),
'X' : (0x00, 0x00, 0x00, 0xE7, 0x42, 0x24, 0x24, 0x18, 0x18, 0x18, 0x24, 0x24, 0x42, 0xE7, 0x00, 0x00,),
'Y' : (0x00, 0x00, 0x00, 0xEE, 0x44, 0x44, 0x28, 0x28, 0x10, 0x10, 0x10, 0x10, 0x10, 0x38, 0x00, 0x00,),
'Z' : (0x00, 0x00, 0x00, 0x7E, 0x84, 0x04, 0x08, 0x08, 0x10, 0x20, 0x20, 0x42, 0x42, 0xFC, 0x00, 0x00,),
'[' : (0x00, 0x1E, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x1E, 0x00,),
'\\' : (0x00, 0x00, 0x40, 0x20, 0x20, 0x20, 0x10, 0x10, 0x10, 0x08, 0x08, 0x04, 0x04, 0x04, 0x02, 0x02,),
']' : (0x00, 0x78, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x78, 0x00,),
'^' : (0x00, 0x18, 0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,),
'_' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,),
'`' : (0x00, 0x60, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,),
'a' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x44, 0x0C, 0x34, 0x44, 0x4C, 0x36, 0x00, 0x00,),
'b' : (0x00, 0x00, 0x00, 0x00, 0xC0, 0x40, 0x40, 0x58, 0x64, 0x42, 0x42, 0x42, 0x64, 0x58, 0x00, 0x00,),
'c' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1C, 0x22, 0x40, 0x40, 0x40, 0x22, 0x1C, 0x00, 0x00,),
'd' : (0x00, 0x00, 0x00, 0x00, 0x06, 0x02, 0x02, 0x3E, 0x42, 0x42, 0x42, 0x42, 0x46, 0x3B, 0x00, 0x00,),
'e' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3C, 0x42, 0x42, 0x7E, 0x40, 0x42, 0x3C, 0x00, 0x00,),
'f' : (0x00, 0x00, 0x00, 0x00, 0x0C, 0x12, 0x10, 0x7C, 0x10, 0x10, 0x10, 0x10, 0x10, 0x7C, 0x00, 0x00,),
'g' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3E, 0x44, 0x44, 0x38, 0x40, 0x3C, 0x42, 0x42, 0x3C,),
'h' : (0x00, 0x00, 0x00, 0x00, 0xC0, 0x40, 0x40, 0x5C, 0x62, 0x42, 0x42, 0x42, 0x42, 0xE7, 0x00, 0x00,),
'i' : (0x00, 0x00, 0x00, 0x30, 0x30, 0x00, 0x00, 0x70, 0x10, 0x10, 0x10, 0x10, 0x10, 0x7C, 0x00, 0x00,),
'j' : (0x00, 0x00, 0x00, 0x0C, 0x0C, 0x00, 0x00, 0x1C, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x44, 0x78,),
'k' : (0x00, 0x00, 0x00, 0x00, 0xC0, 0x40, 0x40, 0x4E, 0x48, 0x50, 0x70, 0x48, 0x44, 0xEE, 0x00, 0x00,),
'l' : (0x00, 0x00, 0x00, 0x10, 0x70, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x7C, 0x00, 0x00,),
'm' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFE, 0x49, 0x49, 0x49, 0x49, 0x49, 0xED, 0x00, 0x00,),
'n' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xDC, 0x62, 0x42, 0x42, 0x42, 0x42, 0xE7, 0x00, 0x00,),
'o' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3C, 0x42, 0x42, 0x42, 0x42, 0x42, 0x3C, 0x00, 0x00,),
'p' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xD8, 0x64, 0x42, 0x42, 0x42, 0x64, 0x58, 0x40, 0xE0,),
'q' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1A, 0x26, 0x42, 0x42, 0x42, 0x26, 0x1A, 0x02, 0x07,),
'r' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xEE, 0x32, 0x20, 0x20, 0x20, 0x20, 0xF8, 0x00, 0x00,),
's' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3E, 0x42, 0x40, 0x3C, 0x02, 0x42, 0x7C, 0x00, 0x00,),
't' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x10, 0x7C, 0x10, 0x10, 0x10, 0x10, 0x12, 0x0C, 0x00, 0x00,),
'u' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC6, 0x42, 0x42, 0x42, 0x42, 0x46, 0x3B, 0x00, 0x00,),
'v' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xEE, 0x44, 0x44, 0x28, 0x28, 0x10, 0x10, 0x00, 0x00,),
'w' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xDB, 0x89, 0x4A, 0x5A, 0x54, 0x24, 0x24, 0x00, 0x00,),
'x' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x76, 0x24, 0x18, 0x18, 0x18, 0x24, 0x6E, 0x00, 0x00,),
'y' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE7, 0x42, 0x24, 0x24, 0x18, 0x18, 0x10, 0x10, 0x60,),
'z' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7E, 0x44, 0x08, 0x10, 0x10, 0x22, 0x7E, 0x00, 0x00,),
'{' : (0x00, 0x03, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x08, 0x04, 0x04, 0x04, 0x04, 0x04, 0x03, 0x00,),
'|' : (0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,),
'}' : (0x00, 0xC0, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x10, 0x20, 0x20, 0x20, 0x20, 0x20, 0xC0, 0x00,),
'~' : (0x20, 0x5A, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,),
}
'''
常用ASCII字符集
字宽:16 【实际取模时,字宽设置为32,对应英文字宽则为16】
字高:24
宋体、阴码,逐行式,顺向(高位在前)
'''
ascii_16x24_dict = {
' ' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'!' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00,
0x00, 0x00, 0x00, 0x00, 0x01, 0x80, 0x03, 0xC0, 0x01, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'"' : (0x00, 0x00, 0x00, 0x00, 0x03, 0x18, 0x07, 0x38, 0x0E, 0x70, 0x18, 0xC0, 0x10, 0x80, 0x21, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'#' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x08, 0x04, 0x08, 0x04, 0x18, 0x0C, 0x10,
0x7F, 0xFE, 0x7F, 0xFE, 0x08, 0x10, 0x08, 0x10, 0x08, 0x10, 0x08, 0x10, 0x08, 0x10, 0x7F, 0xFE,
0x7F, 0xFE, 0x18, 0x20, 0x10, 0x20, 0x10, 0x20, 0x10, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'$' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x80, 0x01, 0xC0, 0x07, 0xB0, 0x19, 0x98, 0x19, 0x98,
0x19, 0xB8, 0x1D, 0x80, 0x0F, 0x80, 0x07, 0x80, 0x01, 0xC0, 0x01, 0xF0, 0x01, 0xB8, 0x01, 0x98,
0x39, 0x98, 0x39, 0x98, 0x31, 0x98, 0x19, 0xB0, 0x07, 0xC0, 0x01, 0x80, 0x01, 0x80, 0x00, 0x00),
'%' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x00, 0x64, 0x10, 0x46, 0x10, 0xC6, 0x20,
0xC6, 0x40, 0xC6, 0x40, 0x46, 0x80, 0x44, 0x80, 0x3D, 0x18, 0x01, 0x64, 0x02, 0x46, 0x02, 0x42,
0x04, 0x42, 0x04, 0x42, 0x08, 0x46, 0x10, 0x64, 0x10, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'&' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x08, 0x80, 0x10, 0xC0, 0x10, 0xC0,
0x10, 0xC0, 0x19, 0x80, 0x19, 0x00, 0x1E, 0x20, 0x1C, 0x18, 0x2E, 0x10, 0x46, 0x10, 0x43, 0x10,
0xC3, 0xA0, 0xC1, 0xE0, 0x60, 0xE1, 0x31, 0xF2, 0x1E, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
"'" : (0x00, 0x00, 0x00, 0x00, 0x3C, 0x00, 0x3C, 0x00, 0x0C, 0x00, 0x0C, 0x00, 0x18, 0x00, 0x20, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'(' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x30, 0x00, 0x20, 0x00, 0x40, 0x00, 0xC0,
0x00, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80,
0x00, 0xC0, 0x00, 0xC0, 0x00, 0x60, 0x00, 0x20, 0x00, 0x10, 0x00, 0x08, 0x00, 0x04, 0x00, 0x00),
')' : (0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x10, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x03, 0x00,
0x03, 0x00, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x00,
0x03, 0x00, 0x03, 0x00, 0x06, 0x00, 0x04, 0x00, 0x08, 0x00, 0x10, 0x00, 0x60, 0x00, 0x00, 0x00),
'*' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x80, 0x01, 0x80,
0x01, 0x80, 0x71, 0x8E, 0x79, 0xBC, 0x0F, 0x60, 0x01, 0x80, 0x0F, 0x70, 0x79, 0x9E, 0x61, 0x8E,
0x01, 0x80, 0x01, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'+' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x80,
0x00, 0x80, 0x00, 0x80, 0x00, 0x80, 0x00, 0x80, 0x7F, 0xFE, 0x00, 0x80, 0x00, 0x80, 0x00, 0x80,
0x00, 0x80, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
',' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x3C, 0x00, 0x3C, 0x00, 0x0C, 0x00, 0x0C, 0x00, 0x18, 0x00, 0x20, 0x00),
'-' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7F, 0xFE, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'.' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x18, 0x00, 0x3C, 0x00, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'/' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x04, 0x00, 0x08, 0x00, 0x10, 0x00, 0x10,
0x00, 0x20, 0x00, 0x40, 0x00, 0x40, 0x00, 0x80, 0x01, 0x80, 0x01, 0x00, 0x02, 0x00, 0x02, 0x00,
0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x10, 0x00, 0x20, 0x00, 0x20, 0x00, 0x40, 0x00, 0x00, 0x00),
'0' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x80, 0x06, 0x60, 0x1C, 0x18, 0x18, 0x18,
0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C,
0x30, 0x0C, 0x18, 0x18, 0x18, 0x18, 0x0C, 0x30, 0x03, 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'1' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x01, 0x80, 0x07, 0x80, 0x01, 0x80,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0xC0, 0x0F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'2' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xC0, 0x0C, 0x30, 0x10, 0x18, 0x30, 0x0C,
0x38, 0x0C, 0x38, 0x1C, 0x00, 0x18, 0x00, 0x30, 0x00, 0x60, 0x00, 0xC0, 0x01, 0x80, 0x02, 0x00,
0x04, 0x00, 0x18, 0x04, 0x30, 0x0C, 0x3F, 0xF8, 0x3F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'3' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x80, 0x08, 0x70, 0x10, 0x38, 0x38, 0x18,
0x18, 0x18, 0x00, 0x18, 0x00, 0x30, 0x00, 0xE0, 0x01, 0xE0, 0x00, 0x18, 0x00, 0x18, 0x00, 0x0C,
0x10, 0x0C, 0x38, 0x0C, 0x30, 0x18, 0x18, 0x30, 0x07, 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'4' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x00, 0x70, 0x00, 0xF0, 0x00, 0xF0,
0x01, 0x70, 0x02, 0x70, 0x04, 0x70, 0x08, 0x70, 0x18, 0x70, 0x10, 0x70, 0x20, 0x70, 0x7F, 0xFE,
0x00, 0x70, 0x00, 0x70, 0x00, 0x70, 0x00, 0x70, 0x03, 0xFE, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'5' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1F, 0xF8, 0x10, 0x00, 0x10, 0x00,
0x10, 0x00, 0x10, 0x00, 0x13, 0xC0, 0x1C, 0x30, 0x10, 0x18, 0x00, 0x0C, 0x00, 0x0C, 0x00, 0x0C,
0x38, 0x0C, 0x38, 0x0C, 0x30, 0x18, 0x18, 0x30, 0x07, 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'6' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xE0, 0x06, 0x18, 0x08, 0x18, 0x18, 0x18,
0x30, 0x00, 0x30, 0x00, 0x30, 0xC0, 0x37, 0x38, 0x38, 0x1C, 0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C,
0x30, 0x0C, 0x30, 0x0C, 0x18, 0x08, 0x0C, 0x10, 0x03, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'7' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1F, 0xFC, 0x30, 0x08, 0x20, 0x10,
0x20, 0x10, 0x00, 0x20, 0x00, 0x40, 0x00, 0x40, 0x00, 0x80, 0x01, 0x80, 0x01, 0x80, 0x03, 0x00,
0x03, 0x00, 0x03, 0x00, 0x03, 0x80, 0x03, 0x80, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'8' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xC0, 0x0C, 0x30, 0x30, 0x08, 0x30, 0x0C,
0x30, 0x0C, 0x30, 0x0C, 0x1C, 0x18, 0x0F, 0x20, 0x07, 0xE0, 0x18, 0x70, 0x30, 0x18, 0x20, 0x0C,
0x60, 0x0C, 0x60, 0x0C, 0x30, 0x0C, 0x18, 0x18, 0x07, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'9' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x80, 0x0C, 0x70, 0x30, 0x18, 0x30, 0x18,
0x70, 0x0C, 0x70, 0x0C, 0x70, 0x0C, 0x30, 0x1C, 0x30, 0x3C, 0x1C, 0x6C, 0x07, 0x8C, 0x00, 0x1C,
0x00, 0x18, 0x00, 0x18, 0x18, 0x30, 0x18, 0x60, 0x0F, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
':' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x01, 0x00, 0x03, 0x80, 0x03, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x01, 0x80, 0x03, 0x80, 0x03, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
';' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x03, 0x80, 0x03, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x80, 0x03, 0x80, 0x01, 0x80, 0x01, 0x00, 0x00, 0x00),
'<' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x18, 0x00, 0x20, 0x00, 0xC0,
0x01, 0x80, 0x02, 0x00, 0x0C, 0x00, 0x18, 0x00, 0x30, 0x00, 0x18, 0x00, 0x04, 0x00, 0x03, 0x00,
0x01, 0x80, 0x00, 0x40, 0x00, 0x30, 0x00, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'=' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x3F, 0xFC, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7F, 0xFE, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'>' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x00, 0x18, 0x00, 0x04, 0x00, 0x03, 0x00,
0x01, 0x80, 0x00, 0x40, 0x00, 0x30, 0x00, 0x18, 0x00, 0x0C, 0x00, 0x18, 0x00, 0x20, 0x00, 0xC0,
0x01, 0x80, 0x02, 0x00, 0x0C, 0x00, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'?' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0xE0, 0x18, 0x18, 0x30, 0x0C, 0x20, 0x0C,
0x30, 0x0C, 0x38, 0x0C, 0x00, 0x1C, 0x00, 0x38, 0x00, 0xE0, 0x01, 0x80, 0x01, 0x00, 0x01, 0x00,
0x01, 0x00, 0x00, 0x00, 0x01, 0x80, 0x03, 0x80, 0x03, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'@' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xE0, 0x06, 0x18, 0x08, 0x04, 0x10, 0x7E,
0x31, 0x92, 0x23, 0x12, 0x62, 0x32, 0x66, 0x32, 0x66, 0x32, 0x64, 0x22, 0x64, 0x62, 0x24, 0x64,
0x33, 0xB8, 0x30, 0x02, 0x18, 0x04, 0x0C, 0x18, 0x03, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'A' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x01, 0x80, 0x02, 0x80, 0x02, 0xC0,
0x02, 0xC0, 0x04, 0x40, 0x04, 0x60, 0x04, 0x60, 0x08, 0x60, 0x08, 0x30, 0x0F, 0xF0, 0x10, 0x30,
0x10, 0x18, 0x10, 0x18, 0x20, 0x18, 0x20, 0x1C, 0xF8, 0x3E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'B' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3F, 0xF0, 0x18, 0x18, 0x18, 0x0C,
0x18, 0x0C, 0x18, 0x0C, 0x18, 0x18, 0x18, 0x60, 0x1F, 0xB0, 0x18, 0x0C, 0x18, 0x0C, 0x18, 0x0E,
0x18, 0x0E, 0x18, 0x0E, 0x18, 0x0C, 0x18, 0x18, 0xFF, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'C' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xE0, 0x06, 0x1C, 0x18, 0x04, 0x18, 0x06,
0x30, 0x02, 0x30, 0x00, 0x70, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x70, 0x00,
0x30, 0x02, 0x30, 0x04, 0x18, 0x04, 0x0C, 0x18, 0x03, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'D' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3F, 0xE0, 0x18, 0x18, 0x18, 0x1C,
0x18, 0x0C, 0x18, 0x0E, 0x18, 0x0E, 0x18, 0x0E, 0x18, 0x0E, 0x18, 0x0E, 0x18, 0x0E, 0x18, 0x0C,
0x18, 0x0C, 0x18, 0x1C, 0x18, 0x18, 0x18, 0x60, 0x7F, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'E' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3F, 0xFC, 0x18, 0x04, 0x18, 0x02,
0x18, 0x00, 0x18, 0x10, 0x18, 0x10, 0x18, 0x30, 0x1F, 0xF0, 0x18, 0x10, 0x18, 0x10, 0x18, 0x00,
0x18, 0x00, 0x18, 0x02, 0x18, 0x04, 0x18, 0x0C, 0x7F, 0xFC, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'F' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3F, 0xFC, 0x18, 0x06, 0x18, 0x02,
0x18, 0x00, 0x18, 0x00, 0x18, 0x10, 0x18, 0x10, 0x1F, 0xF0, 0x18, 0x10, 0x18, 0x10, 0x18, 0x00,
0x18, 0x00, 0x18, 0x00, 0x18, 0x00, 0x18, 0x00, 0x7E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'G' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xC8, 0x0E, 0x38, 0x18, 0x18, 0x30, 0x08,
0x30, 0x04, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x20, 0x60, 0x1C, 0x60, 0x18,
0x30, 0x18, 0x30, 0x18, 0x18, 0x18, 0x0C, 0x18, 0x03, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'H' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x1C, 0x30, 0x0C, 0x30, 0x0C,
0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0x3F, 0xFC, 0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C,
0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0xFC, 0x3E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'I' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xF0, 0x01, 0x80, 0x01, 0x80,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x1F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'J' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xFC, 0x00, 0x60, 0x00, 0x60,
0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60,
0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x70, 0x40, 0x71, 0x80, 0x1E, 0x00),
'K' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3C, 0x3C, 0x18, 0x30, 0x18, 0x20,
0x18, 0x40, 0x18, 0x80, 0x19, 0x00, 0x1B, 0x80, 0x1D, 0x80, 0x18, 0xC0, 0x18, 0xC0, 0x18, 0x60,
0x18, 0x30, 0x18, 0x30, 0x18, 0x18, 0x18, 0x1C, 0x7E, 0x3E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'L' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3C, 0x00, 0x18, 0x00, 0x18, 0x00,
0x18, 0x00, 0x18, 0x00, 0x18, 0x00, 0x18, 0x00, 0x18, 0x00, 0x18, 0x00, 0x18, 0x00, 0x18, 0x00,
0x18, 0x00, 0x18, 0x02, 0x18, 0x06, 0x18, 0x0C, 0x7F, 0xFC, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'M' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x70, 0x1E, 0x30, 0x1C, 0x38, 0x1C,
0x38, 0x2C, 0x28, 0x2C, 0x2C, 0x2C, 0x2C, 0x4C, 0x2C, 0x4C, 0x24, 0x4C, 0x26, 0x8C, 0x26, 0x8C,
0x22, 0x8C, 0x23, 0x0C, 0x23, 0x0C, 0x23, 0x0C, 0xF1, 0x3F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'N' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x78, 0x0E, 0x38, 0x04, 0x2C, 0x04,
0x26, 0x04, 0x27, 0x04, 0x23, 0x04, 0x21, 0x84, 0x21, 0xC4, 0x20, 0xC4, 0x20, 0x64, 0x20, 0x74,
0x20, 0x34, 0x20, 0x1C, 0x20, 0x1C, 0x20, 0x0C, 0xF8, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'O' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xC0, 0x0C, 0x30, 0x18, 0x18, 0x30, 0x0C,
0x30, 0x0C, 0x30, 0x0E, 0x70, 0x06, 0x70, 0x06, 0x70, 0x06, 0x70, 0x06, 0x70, 0x06, 0x30, 0x06,
0x30, 0x0C, 0x30, 0x0C, 0x18, 0x08, 0x0C, 0x30, 0x03, 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'P' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3F, 0xF8, 0x18, 0x0C, 0x18, 0x0E,
0x18, 0x06, 0x18, 0x06, 0x18, 0x0C, 0x18, 0x0C, 0x1F, 0xF0, 0x18, 0x00, 0x18, 0x00, 0x18, 0x00,
0x18, 0x00, 0x18, 0x00, 0x18, 0x00, 0x18, 0x00, 0x7E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'Q' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xC0, 0x0C, 0x30, 0x18, 0x18, 0x30, 0x0C,
0x30, 0x0C, 0x70, 0x0E, 0x60, 0x06, 0x60, 0x06, 0x60, 0x06, 0x60, 0x06, 0x60, 0x06, 0x70, 0x0E,
0x37, 0x8C, 0x38, 0xCC, 0x18, 0x78, 0x0C, 0x70, 0x03, 0xF0, 0x00, 0x3C, 0x00, 0x18, 0x00, 0x00),
'R' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3F, 0xF8, 0x18, 0x1C, 0x18, 0x0C,
0x18, 0x0C, 0x18, 0x0C, 0x18, 0x18, 0x18, 0x30, 0x1F, 0xC0, 0x18, 0xC0, 0x18, 0x60, 0x18, 0x60,
0x18, 0x30, 0x18, 0x30, 0x18, 0x18, 0x18, 0x18, 0x7E, 0x0E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'S' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0xC0, 0x18, 0x38, 0x30, 0x18, 0x20, 0x08,
0x20, 0x00, 0x30, 0x00, 0x38, 0x00, 0x0F, 0x00, 0x03, 0xE0, 0x00, 0x78, 0x00, 0x18, 0x00, 0x0C,
0x40, 0x0C, 0x20, 0x0C, 0x30, 0x08, 0x38, 0x18, 0x27, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'T' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3F, 0xFC, 0x61, 0x84, 0x41, 0x82,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x07, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'U' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x78, 0x0C, 0x30, 0x08, 0x30, 0x08,
0x30, 0x08, 0x30, 0x08, 0x30, 0x08, 0x30, 0x08, 0x30, 0x08, 0x30, 0x08, 0x30, 0x08, 0x30, 0x08,
0x30, 0x08, 0x30, 0x08, 0x18, 0x08, 0x18, 0x30, 0x07, 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'V' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x78, 0x0C, 0x18, 0x08, 0x18, 0x08,
0x18, 0x10, 0x1C, 0x10, 0x0C, 0x10, 0x0C, 0x20, 0x0E, 0x20, 0x06, 0x40, 0x06, 0x40, 0x06, 0x40,
0x03, 0x80, 0x03, 0x80, 0x03, 0x80, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'W' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x71, 0x86, 0x21, 0x84, 0x31, 0x84,
0x30, 0x84, 0x31, 0xC4, 0x31, 0xC8, 0x11, 0xC8, 0x1A, 0xC8, 0x1A, 0x48, 0x1A, 0x70, 0x1A, 0x70,
0x0C, 0x70, 0x0C, 0x70, 0x0C, 0x20, 0x0C, 0x20, 0x08, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'X' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x1C, 0x18, 0x10, 0x0C, 0x10,
0x06, 0x20, 0x06, 0x40, 0x03, 0x40, 0x03, 0x80, 0x01, 0x80, 0x01, 0xC0, 0x02, 0xC0, 0x02, 0x60,
0x04, 0x70, 0x08, 0x30, 0x08, 0x18, 0x10, 0x1C, 0x7C, 0x3E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'Y' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x1C, 0x18, 0x08, 0x18, 0x10,
0x0C, 0x10, 0x0C, 0x20, 0x06, 0x20, 0x06, 0x40, 0x03, 0x40, 0x03, 0x80, 0x01, 0x80, 0x01, 0x80,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x07, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'Z' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1F, 0xFC, 0x30, 0x18, 0x20, 0x38,
0x00, 0x30, 0x00, 0x60, 0x00, 0xC0, 0x00, 0xC0, 0x01, 0x80, 0x03, 0x00, 0x07, 0x00, 0x06, 0x00,
0x0C, 0x00, 0x18, 0x04, 0x18, 0x04, 0x30, 0x1C, 0x7F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'[' : (0x00, 0x00, 0x00, 0x00, 0x03, 0xFC, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00,
0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00,
0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0xFC, 0x00, 0x00),
'\\' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x18, 0x00, 0x08, 0x00, 0x0C, 0x00,
0x06, 0x00, 0x02, 0x00, 0x03, 0x00, 0x01, 0x00, 0x01, 0x80, 0x00, 0x80, 0x00, 0xC0, 0x00, 0x40,
0x00, 0x60, 0x00, 0x20, 0x00, 0x30, 0x00, 0x10, 0x00, 0x18, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04),
']' : (0x00, 0x00, 0x00, 0x00, 0x3F, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0,
0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0,
0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x00, 0xC0, 0x3F, 0xC0, 0x00, 0x00),
'^' : (0x00, 0x00, 0x00, 0x00, 0x03, 0xC0, 0x0C, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'_' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF),
'`' : (0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x01, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'a' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x0F, 0xE0, 0x30, 0x10, 0x30, 0x18, 0x00, 0x18, 0x03, 0xF8, 0x1C, 0x18,
0x30, 0x18, 0x70, 0x18, 0x70, 0x18, 0x30, 0x7A, 0x0F, 0x8C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'b' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x30, 0x00, 0x10, 0x00, 0x10, 0x00,
0x10, 0x00, 0x10, 0x00, 0x13, 0xF0, 0x14, 0x18, 0x18, 0x0C, 0x18, 0x0C, 0x10, 0x0C, 0x10, 0x0C,
0x10, 0x0C, 0x10, 0x0C, 0x18, 0x08, 0x1C, 0x10, 0x03, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'c' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x07, 0xE0, 0x0C, 0x18, 0x18, 0x18, 0x30, 0x00, 0x30, 0x00, 0x30, 0x00,
0x30, 0x00, 0x30, 0x04, 0x18, 0x08, 0x0C, 0x18, 0x03, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'd' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x28, 0x00, 0x18, 0x00, 0x18, 0x00, 0x18,
0x00, 0x18, 0x00, 0x18, 0x07, 0xD8, 0x0C, 0x38, 0x18, 0x18, 0x30, 0x18, 0x30, 0x18, 0x30, 0x18,
0x30, 0x18, 0x30, 0x18, 0x18, 0x18, 0x0C, 0x7E, 0x03, 0x90, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'e' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x07, 0xE0, 0x08, 0x18, 0x18, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0x3F, 0xF0,
0x30, 0x00, 0x30, 0x00, 0x18, 0x08, 0x0C, 0x10, 0x03, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'f' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xFE, 0x03, 0x06, 0x02, 0x00,
0x02, 0x00, 0x02, 0x00, 0x3F, 0xF8, 0x02, 0x00, 0x02, 0x00, 0x02, 0x00, 0x02, 0x00, 0x02, 0x00,
0x02, 0x00, 0x02, 0x00, 0x02, 0x00, 0x02, 0x00, 0x3F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'g' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x07, 0xCE, 0x08, 0x36, 0x18, 0x10, 0x10, 0x18, 0x18, 0x10, 0x08, 0x30,
0x0F, 0xC0, 0x10, 0x00, 0x1F, 0x80, 0x0B, 0xF8, 0x30, 0x0C, 0x20, 0x0C, 0x30, 0x0C, 0x0F, 0xF0),
'h' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x00, 0x18, 0x00, 0x18, 0x00, 0x18, 0x00,
0x18, 0x00, 0x18, 0x00, 0x1B, 0xF0, 0x1C, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18,
0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x7C, 0x3E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'i' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x80, 0x01, 0xC0, 0x01, 0x80, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x1F, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x1F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'j' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x38, 0x00, 0x30, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x01, 0xF0, 0x00, 0x10, 0x00, 0x10, 0x00, 0x10, 0x00, 0x10, 0x00, 0x10,
0x00, 0x10, 0x00, 0x10, 0x00, 0x10, 0x00, 0x10, 0x00, 0x10, 0x00, 0x30, 0x38, 0x20, 0x1F, 0xC0),
'k' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x00, 0x18, 0x00, 0x18, 0x00, 0x18, 0x00,
0x18, 0x00, 0x18, 0x00, 0x18, 0x7C, 0x18, 0x20, 0x18, 0x40, 0x19, 0x80, 0x1B, 0x80, 0x1C, 0xC0,
0x18, 0x60, 0x18, 0x60, 0x18, 0x30, 0x18, 0x18, 0x7C, 0x3E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'l' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80,
0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x1F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'm' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0xEF, 0x3C, 0x31, 0xC6, 0x21, 0x86, 0x21, 0x86, 0x21, 0x86, 0x21, 0x86,
0x21, 0x86, 0x21, 0x86, 0x21, 0x86, 0x21, 0x86, 0xFB, 0xCF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'n' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x7B, 0xF0, 0x1C, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18,
0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x7C, 0x3E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'o' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x07, 0xE0, 0x18, 0x18, 0x10, 0x0C, 0x30, 0x0C, 0x30, 0x0C, 0x30, 0x0C,
0x30, 0x0C, 0x30, 0x0C, 0x10, 0x08, 0x08, 0x10, 0x07, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'p' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x7B, 0xE0, 0x1C, 0x18, 0x18, 0x0C, 0x18, 0x0C, 0x18, 0x0C, 0x18, 0x0C,
0x18, 0x0C, 0x18, 0x0C, 0x18, 0x0C, 0x1C, 0x38, 0x1B, 0xC0, 0x18, 0x00, 0x18, 0x00, 0x3C, 0x00),
'q' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x07, 0xC8, 0x18, 0x38, 0x30, 0x18, 0x30, 0x18, 0x30, 0x18, 0x30, 0x18,
0x30, 0x18, 0x30, 0x18, 0x30, 0x18, 0x18, 0x38, 0x07, 0xD8, 0x00, 0x18, 0x00, 0x18, 0x00, 0x3C),
'r' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x7C, 0x7C, 0x04, 0x8E, 0x05, 0x00, 0x06, 0x00, 0x04, 0x00, 0x04, 0x00,
0x04, 0x00, 0x04, 0x00, 0x04, 0x00, 0x04, 0x00, 0x7F, 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
's' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x07, 0xF8, 0x08, 0x18, 0x18, 0x08, 0x18, 0x00, 0x0F, 0x00, 0x01, 0xF0,
0x00, 0x38, 0x00, 0x0C, 0x10, 0x0C, 0x18, 0x18, 0x17, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
't' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00,
0x03, 0x00, 0x07, 0x00, 0x3F, 0xF8, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00, 0x03, 0x00,
0x03, 0x00, 0x03, 0x00, 0x03, 0x04, 0x03, 0x18, 0x00, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'u' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x70, 0x78, 0x10, 0x18, 0x10, 0x18, 0x10, 0x18, 0x10, 0x18, 0x10, 0x18,
0x10, 0x18, 0x10, 0x18, 0x18, 0x18, 0x1C, 0x7E, 0x07, 0x90, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'v' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x7E, 0x3E, 0x18, 0x08, 0x08, 0x10, 0x0C, 0x10, 0x0C, 0x20, 0x06, 0x20,
0x06, 0x40, 0x03, 0x40, 0x03, 0x80, 0x01, 0x80, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'w' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0xFB, 0xCF, 0x21, 0x84, 0x31, 0x84, 0x31, 0x88, 0x11, 0xC8, 0x1A, 0xC8,
0x1A, 0x50, 0x0A, 0x70, 0x0C, 0x70, 0x0C, 0x20, 0x04, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'x' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x3E, 0x7C, 0x0C, 0x10, 0x06, 0x20, 0x03, 0x40, 0x01, 0x80, 0x01, 0x80,
0x02, 0xC0, 0x04, 0x60, 0x0C, 0x30, 0x18, 0x18, 0x7C, 0x7E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'y' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x7E, 0x3E, 0x18, 0x18, 0x0C, 0x10, 0x0C, 0x30, 0x06, 0x20, 0x06, 0x20,
0x03, 0x40, 0x03, 0x40, 0x01, 0x80, 0x01, 0x80, 0x01, 0x00, 0x01, 0x00, 0x12, 0x00, 0x3C, 0x00),
'z' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x1F, 0xF8, 0x10, 0x30, 0x20, 0x60, 0x00, 0xC0, 0x01, 0x80, 0x03, 0x00,
0x06, 0x00, 0x06, 0x04, 0x0C, 0x08, 0x18, 0x18, 0x3F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
'{' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x00, 0x20, 0x00, 0x20, 0x00, 0x60, 0x00, 0x60, 0x00, 0x60,
0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0xC0, 0x01, 0x80, 0x00, 0x40, 0x00, 0x60, 0x00, 0x60,
0x00, 0x60, 0x00, 0x60, 0x00, 0x60, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x1C, 0x00, 0x00),
'|' : (0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00,
0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00,
0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00),
'}' : (0x00, 0x00, 0x00, 0x00, 0x18, 0x00, 0x04, 0x00, 0x04, 0x00, 0x06, 0x00, 0x06, 0x00, 0x06, 0x00,
0x06, 0x00, 0x06, 0x00, 0x06, 0x00, 0x03, 0x00, 0x01, 0x80, 0x02, 0x00, 0x06, 0x00, 0x06, 0x00,
0x06, 0x00, 0x06, 0x00, 0x06, 0x00, 0x04, 0x00, 0x04, 0x00, 0x04, 0x00, 0x38, 0x00, 0x00, 0x00),
'~' : (0x00, 0x00, 0x1E, 0x00, 0x21, 0x82, 0x40, 0xC4, 0x00, 0x78, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
} | 93.549784 | 106 | 0.627927 | 6,892 | 43,220 | 3.936013 | 0.03105 | 0.670623 | 0.856416 | 0.986176 | 0.774542 | 0.723191 | 0.663951 | 0.62565 | 0.57463 | 0.543518 | 0 | 0.546942 | 0.1882 | 43,220 | 462 | 107 | 93.549784 | 0.226216 | 0.000717 | 0 | 0.069767 | 0 | 0 | 0.00475 | 0 | 0 | 0 | 0.622893 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2a44da1972fd047a5a5dbb2b6b419e86a286c901 | 142 | py | Python | CODES/13. Comparison operator/comparison-operator.py | eltechno/python_course | f74abac7df3f9f41864afd06479389260c29ea3a | [
"MIT"
] | 4 | 2019-05-04T00:33:25.000Z | 2021-05-29T20:37:59.000Z | CODES/13. Comparison operator/comparison-operator.py | eltechno/python_course | f74abac7df3f9f41864afd06479389260c29ea3a | [
"MIT"
] | null | null | null | CODES/13. Comparison operator/comparison-operator.py | eltechno/python_course | f74abac7df3f9f41864afd06479389260c29ea3a | [
"MIT"
] | 3 | 2020-05-05T13:14:28.000Z | 2022-02-03T16:18:37.000Z | """
> greater than
< small than
== equal to
!= not equal to
>= greater than or equal to
<= small than or equal to
"""
| 14.2 | 30 | 0.528169 | 19 | 142 | 3.947368 | 0.368421 | 0.373333 | 0.293333 | 0.346667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.359155 | 142 | 9 | 31 | 15.777778 | 0.824176 | 0.915493 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2a5a41296b0fe72b3d29a5fcd990b2a2c30b2ffb | 1,865 | py | Python | unit.py | MowHogz/Tetrix | 25a418b620dc57c5473c3a440c435a8953363a72 | [
"MIT"
] | null | null | null | unit.py | MowHogz/Tetrix | 25a418b620dc57c5473c3a440c435a8953363a72 | [
"MIT"
] | null | null | null | unit.py | MowHogz/Tetrix | 25a418b620dc57c5473c3a440c435a8953363a72 | [
"MIT"
] | null | null | null | import copy
from shape import shape
class unit():
def __init__(self):
self.bloc = shape(False)
def start(self,matrix):
self.bloc.insert(matrix)
def clock(self,brd):
self.bloc.remove(brd)
self.nbloc = shape(self.bloc)
self.nbloc.shape = self.nbloc.twist_clock(brd)
self.nbloc.update_hw()
if self.nbloc.can_insert(brd):
self.bloc = self.nbloc
else:
pass
self.bloc.insert(brd)
def cclock(self,brd):
self.bloc.remove(brd)
self.nbloc = shape(self.bloc)
self.nbloc.shape = self.nbloc.twist_cclock(brd)
self.nbloc.update_hw()
if self.nbloc.can_insert(brd):
self.bloc = self.nbloc
else:
pass
self.bloc.insert(brd)
def down(self,matrix):
self.bloc.remove(matrix)
self.block2 = copy.copy(self.bloc)
self.block2.down()
if self.block2.can_insert(matrix):
#print("does fit")
self.bloc = self.block2
can = True
else:
#print("doesn't fit")
can = False
self.bloc.insert(matrix)
return can
def right(self,matrix):
self.bloc.remove(matrix)
self.block2 = copy.copy(self.bloc)
self.block2.right()
if self.block2.can_insert(matrix):
self.bloc = self.block2
can = True
else:
can = False
self.bloc.insert(matrix)
return can
def left(self,matrix):
self.bloc.remove(matrix)
self.block2 = copy.copy(self.bloc)
self.block2.left()
if self.block2.can_insert(matrix):
self.bloc = self.block2
can = True
else:
can = False
self.bloc.insert(matrix)
return can | 28.692308 | 55 | 0.537802 | 226 | 1,865 | 4.380531 | 0.163717 | 0.177778 | 0.121212 | 0.109091 | 0.815152 | 0.815152 | 0.787879 | 0.758586 | 0.758586 | 0.715152 | 0 | 0.009959 | 0.353887 | 1,865 | 65 | 56 | 28.692308 | 0.811618 | 0.019839 | 0 | 0.733333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.116667 | false | 0.033333 | 0.033333 | 0 | 0.216667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2a7676a690e37391cbf03132599e6d41144122e6 | 43 | py | Python | src/lib/lib-dynload/__init__.py | DTenore/skulpt | 098d20acfb088d6db85535132c324b7ac2f2d212 | [
"MIT"
] | 2,671 | 2015-01-03T08:23:25.000Z | 2022-03-31T06:15:48.000Z | src/lib/lib-dynload/__init__.py | wakeupmuyunhe/skulpt | a8fb11a80fb6d7c016bab5dfe3712517a350b347 | [
"MIT"
] | 972 | 2015-01-05T08:11:00.000Z | 2022-03-29T13:47:15.000Z | src/lib/lib-dynload/__init__.py | wakeupmuyunhe/skulpt | a8fb11a80fb6d7c016bab5dfe3712517a350b347 | [
"MIT"
] | 845 | 2015-01-03T19:53:36.000Z | 2022-03-29T18:34:22.000Z | import _sk_fail; _sk_fail._("lib-dynload")
| 21.5 | 42 | 0.767442 | 7 | 43 | 4 | 0.714286 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.069767 | 43 | 1 | 43 | 43 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0.255814 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
2af8c414c11f4db6c8339b5a23066ee9803284b2 | 9,685 | py | Python | pixela_letters/letters/alphabet_lowercase.py | ryosms/pixela-letters | 591d6faf3e0fa0380238fe7538304490a458bfb3 | [
"MIT"
] | 2 | 2018-11-13T07:33:43.000Z | 2018-11-17T02:13:44.000Z | pixela_letters/letters/alphabet_lowercase.py | ryosms/pixela-letters | 591d6faf3e0fa0380238fe7538304490a458bfb3 | [
"MIT"
] | null | null | null | pixela_letters/letters/alphabet_lowercase.py | ryosms/pixela-letters | 591d6faf3e0fa0380238fe7538304490a458bfb3 | [
"MIT"
] | null | null | null | class AlphabetLowercase(object):
@staticmethod
def a():
"""
. . . .
. . . .
. # # .
# . . #
. # . #
# . # #
. # . #
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 1, 1, 0],
[1, 0, 0, 1],
[0, 1, 0, 1],
[1, 0, 1, 1],
[0, 1, 0, 1],
]
@staticmethod
def b():
"""
# . . .
# . . .
# . . .
# # # .
# . . #
# . . #
# # # .
"""
return [
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 1, 1, 0],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 1, 1, 0],
]
@staticmethod
def c():
"""
. . . .
. . . .
. # # .
# . . #
# . . .
# . . #
. # # .
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 1, 1, 0],
[1, 0, 0, 1],
[1, 0, 0, 0],
[1, 0, 0, 1],
[0, 1, 1, 0],
]
@staticmethod
def d():
"""
. . . #
. . . #
. . . #
. # # #
# . . #
# . . #
. # # #
"""
return [
[0, 0, 0, 1],
[0, 0, 0, 1],
[0, 0, 0, 1],
[0, 1, 1, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[0, 1, 1, 1],
]
@staticmethod
def e():
"""
. . . .
. . . .
. # # .
# . . #
# # # .
# . . .
. # # #
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 1, 1, 0],
[1, 0, 0, 1],
[1, 1, 1, 0],
[1, 0, 0, 0],
[0, 1, 1, 1],
]
@staticmethod
def f():
"""
. . # .
. # . #
. # . .
# # # #
. # . .
. # . .
. # . .
"""
return [
[0, 0, 1, 0],
[0, 1, 0, 1],
[0, 1, 0, 0],
[1, 1, 1, 1],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
]
@staticmethod
def g():
"""
. # # #
# . . #
# . . #
. # # #
. . . #
# . . #
. # # .
"""
return [
[0, 1, 1, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[0, 1, 1, 1],
[0, 0, 0, 1],
[1, 0, 0, 1],
[0, 1, 1, 0],
]
@staticmethod
def h():
"""
# . . .
# . . .
# . . .
# # # .
# . . #
# . . #
# . . #
"""
return [
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 1, 1, 0],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
]
@staticmethod
def i():
"""
. . .
. # .
. . .
. # .
. # .
. # .
. # .
"""
return [
[0], [1], [0], [1], [1], [1], [1]
]
@staticmethod
def j():
"""
. . .
. . #
. . .
. . #
. . #
# . #
. # .
"""
return [
[0, 0, 0],
[0, 0, 1],
[0, 0, 0],
[0, 0, 1],
[0, 0, 1],
[1, 0, 1],
[0, 1, 0],
]
@staticmethod
def k():
"""
# . . .
# . . .
# . . #
# . # .
# # . .
# . # .
# . . #
"""
return [
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 1],
[1, 0, 1, 0],
[1, 1, 0, 0],
[1, 0, 1, 0],
[1, 0, 0, 1],
]
@staticmethod
def l():
"""
# # .
. # .
. # .
. # .
. # .
. # .
. # #
"""
return [
[1, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 1],
]
@staticmethod
def m():
"""
. . . . .
. . . . .
. # . # .
# . # . #
# . # . #
# . # . #
# . # . #
"""
return [
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 1, 0, 1, 0],
[1, 0, 1, 0, 1],
[1, 0, 1, 0, 1],
[1, 0, 1, 0, 1],
[1, 0, 1, 0, 1],
]
@staticmethod
def n():
"""
. . . .
. . . .
. # # .
# . . #
# . . #
# . . #
# . . #
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 1, 1, 0],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
]
@staticmethod
def o():
"""
. . . .
. . . .
. # # .
# . . #
# . . #
# . . #
. # # .
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 1, 1, 0],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[0, 1, 1, 0],
]
@staticmethod
def p():
"""
. . . .
. . . .
# # # .
# . . #
# # # .
# . . .
# . . .
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[1, 1, 1, 0],
[1, 0, 0, 1],
[1, 1, 1, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
]
@staticmethod
def q():
"""
. . . .
. . . .
. # # #
# . . #
. # # #
. . . #
. . . #
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 1, 1, 1],
[1, 0, 0, 1],
[0, 1, 1, 1],
[0, 0, 0, 1],
[0, 0, 0, 1],
]
@staticmethod
def r():
"""
. . . .
. . . .
# . # #
# # . .
# . . .
# . . .
# . . .
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[1, 0, 1, 1],
[1, 1, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
]
@staticmethod
def s():
"""
. . . .
. . . .
. # # #
# . . .
. # # .
. . . #
# # # .
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 1, 1, 1],
[1, 0, 0, 0],
[0, 1, 1, 0],
[0, 0, 0, 1],
[1, 1, 1, 0],
]
@staticmethod
def t():
"""
. . . .
. . . .
. # . .
# # # #
. # . .
. # . #
. # # .
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 1, 0, 0],
[1, 1, 1, 1],
[0, 1, 0, 0],
[0, 1, 0, 1],
[0, 1, 1, 0],
]
@staticmethod
def u():
"""
. . . .
. . . .
# . . #
# . . #
# . . #
# . . #
. # # #
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[0, 1, 1, 1],
]
@staticmethod
def v():
"""
. . .
. . .
# . #
# . #
# . #
# . #
. # .
"""
return [
[0, 0, 0],
[0, 0, 0],
[1, 0, 1],
[1, 0, 1],
[1, 0, 1],
[1, 0, 1],
[0, 1, 0],
]
@staticmethod
def w():
"""
. . . . .
. . . . .
# . # . #
# . # . #
# . # . #
# . # . #
. # . # .
"""
return [
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 0, 1, 0, 1],
[1, 0, 1, 0, 1],
[1, 0, 1, 0, 1],
[1, 0, 1, 0, 1],
[0, 1, 0, 1, 0],
]
@staticmethod
def x():
"""
. . . . .
. . . . .
# . . . #
. # . # .
. . # . .
. # . # .
# . . . #
"""
return [
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 0, 0, 0, 1],
[0, 1, 0, 1, 0],
[0, 0, 1, 0, 0],
[0, 1, 0, 1, 0],
[1, 0, 0, 0, 1],
]
@staticmethod
def y():
"""
. . . .
. . . .
# . . #
# . . #
. # . #
. . # .
# # # .
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[1, 0, 0, 1],
[1, 0, 0, 1],
[0, 1, 0, 1],
[0, 0, 1, 0],
[1, 1, 1, 0],
]
@staticmethod
def z():
"""
. . . .
. . . .
# # # #
. . . #
. # # .
# . . .
# # # #
"""
return [
[0, 0, 0, 0],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 1],
[0, 1, 1, 0],
[1, 0, 0, 0],
[1, 1, 1, 1],
]
| 17.869004 | 45 | 0.134951 | 814 | 9,685 | 1.605651 | 0.041769 | 0.387146 | 0.358072 | 0.321347 | 0.901301 | 0.889824 | 0.850803 | 0.838562 | 0.781178 | 0.728386 | 0 | 0.207149 | 0.647599 | 9,685 | 541 | 46 | 17.902033 | 0.175798 | 0.148787 | 0 | 0.797153 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.092527 | true | 0 | 0 | 0 | 0.188612 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 15 |
2d61a4180e1879ab7251e0c1b5af6de265441b1a | 6,729 | py | Python | z2/part2/interactive/jm/random_normal_1/171191047.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 1 | 2020-04-16T12:13:47.000Z | 2020-04-16T12:13:47.000Z | z2/part2/interactive/jm/random_normal_1/171191047.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 18 | 2020-03-06T17:50:15.000Z | 2020-05-19T14:58:30.000Z | z2/part2/interactive/jm/random_normal_1/171191047.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 18 | 2020-03-06T17:45:13.000Z | 2020-06-09T19:18:31.000Z | from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 171191047
"""
"""
random actions, total chaos
"""
board = gamma_new(8, 5, 6, 3)
assert board is not None
assert gamma_move(board, 1, 3, 2) == 1
assert gamma_move(board, 2, 3, 1) == 1
assert gamma_move(board, 3, 3, 1) == 0
assert gamma_move(board, 3, 4, 4) == 1
assert gamma_move(board, 4, 0, 5) == 0
assert gamma_move(board, 4, 0, 3) == 1
assert gamma_move(board, 5, 0, 0) == 1
assert gamma_move(board, 5, 1, 1) == 1
assert gamma_move(board, 6, 7, 3) == 1
assert gamma_move(board, 6, 6, 0) == 1
assert gamma_move(board, 1, 2, 5) == 0
assert gamma_move(board, 1, 7, 1) == 1
assert gamma_move(board, 2, 6, 1) == 1
assert gamma_move(board, 3, 2, 5) == 0
assert gamma_move(board, 3, 0, 1) == 1
assert gamma_move(board, 4, 0, 3) == 0
assert gamma_move(board, 5, 0, 4) == 1
assert gamma_move(board, 6, 1, 4) == 1
assert gamma_move(board, 6, 2, 0) == 0
assert gamma_free_fields(board, 6) == 7
assert gamma_move(board, 1, 1, 2) == 1
assert gamma_move(board, 2, 4, 0) == 1
assert gamma_move(board, 3, 5, 0) == 1
assert gamma_move(board, 3, 2, 4) == 0
assert gamma_free_fields(board, 3) == 5
assert gamma_move(board, 4, 3, 5) == 0
assert gamma_move(board, 4, 2, 1) == 1
assert gamma_free_fields(board, 4) == 23
assert gamma_move(board, 5, 3, 2) == 0
assert gamma_move(board, 6, 4, 1) == 0
assert gamma_move(board, 6, 7, 1) == 0
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 1, 4, 2) == 1
assert gamma_move(board, 2, 2, 5) == 0
assert gamma_move(board, 3, 0, 3) == 0
assert gamma_move(board, 3, 3, 0) == 0
assert gamma_busy_fields(board, 3) == 3
assert gamma_move(board, 4, 5, 2) == 1
assert gamma_move(board, 4, 6, 4) == 0
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 5, 7, 3) == 0
assert gamma_move(board, 5, 2, 2) == 0
assert gamma_move(board, 6, 1, 1) == 0
assert gamma_move(board, 1, 5, 1) == 0
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 2, 6, 4) == 0
assert gamma_move(board, 3, 4, 2) == 0
assert gamma_move(board, 3, 4, 2) == 0
assert gamma_move(board, 5, 7, 4) == 0
assert gamma_move(board, 6, 0, 3) == 0
assert gamma_move(board, 6, 5, 3) == 0
assert gamma_golden_move(board, 6, 1, 1) == 0
assert gamma_move(board, 1, 4, 2) == 0
assert gamma_move(board, 2, 4, 5) == 0
assert gamma_move(board, 2, 7, 4) == 0
assert gamma_move(board, 3, 6, 1) == 0
assert gamma_move(board, 3, 5, 2) == 0
assert gamma_move(board, 4, 7, 2) == 0
assert gamma_move(board, 5, 3, 3) == 0
assert gamma_move(board, 5, 1, 1) == 0
assert gamma_move(board, 6, 2, 0) == 0
assert gamma_move(board, 6, 7, 4) == 1
assert gamma_move(board, 1, 0, 7) == 0
assert gamma_move(board, 2, 3, 3) == 0
assert gamma_move(board, 3, 0, 1) == 0
assert gamma_move(board, 3, 7, 4) == 0
assert gamma_golden_possible(board, 3) == 1
assert gamma_move(board, 4, 0, 0) == 0
assert gamma_move(board, 5, 3, 4) == 0
assert gamma_move(board, 5, 0, 3) == 0
assert gamma_free_fields(board, 5) == 1
assert gamma_move(board, 6, 1, 5) == 0
assert gamma_move(board, 6, 3, 1) == 0
assert gamma_busy_fields(board, 6) == 4
assert gamma_golden_move(board, 6, 1, 6) == 0
assert gamma_move(board, 2, 4, 6) == 0
assert gamma_move(board, 2, 4, 0) == 0
assert gamma_move(board, 3, 1, 2) == 0
assert gamma_move(board, 3, 4, 1) == 0
board540164325 = gamma_board(board)
assert board540164325 is not None
assert board540164325 == ("56..3..6\n"
"4......6\n"
".1.114..\n"
"3542..21\n"
"5...236.\n")
del board540164325
board540164325 = None
assert gamma_move(board, 4, 1, 4) == 0
assert gamma_move(board, 4, 2, 1) == 0
assert gamma_move(board, 5, 0, 7) == 0
assert gamma_move(board, 6, 4, 2) == 0
assert gamma_move(board, 6, 5, 4) == 0
assert gamma_golden_possible(board, 6) == 1
assert gamma_move(board, 1, 3, 2) == 0
assert gamma_move(board, 1, 7, 4) == 0
assert gamma_free_fields(board, 1) == 8
board921837182 = gamma_board(board)
assert board921837182 is not None
assert board921837182 == ("56..3..6\n"
"4......6\n"
".1.114..\n"
"3542..21\n"
"5...236.\n")
del board921837182
board921837182 = None
assert gamma_golden_move(board, 4, 0, 5) == 0
assert gamma_move(board, 5, 4, 2) == 0
assert gamma_move(board, 5, 5, 3) == 0
assert gamma_free_fields(board, 5) == 1
assert gamma_move(board, 6, 0, 2) == 0
assert gamma_move(board, 6, 5, 4) == 0
assert gamma_move(board, 1, 0, 2) == 1
assert gamma_move(board, 1, 0, 1) == 0
assert gamma_move(board, 2, 3, 2) == 0
assert gamma_move(board, 2, 2, 2) == 0
assert gamma_busy_fields(board, 3) == 3
assert gamma_move(board, 4, 4, 1) == 0
assert gamma_move(board, 5, 3, 3) == 0
assert gamma_move(board, 5, 4, 4) == 0
assert gamma_busy_fields(board, 5) == 3
assert gamma_move(board, 6, 0, 2) == 0
assert gamma_golden_move(board, 6, 4, 1) == 0
assert gamma_move(board, 1, 2, 7) == 0
assert gamma_move(board, 1, 1, 2) == 0
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_move(board, 2, 3, 2) == 0
assert gamma_move(board, 3, 2, 2) == 0
assert gamma_free_fields(board, 3) == 4
assert gamma_golden_move(board, 3, 3, 0) == 0
assert gamma_move(board, 4, 3, 2) == 0
assert gamma_move(board, 4, 2, 4) == 0
board867705791 = gamma_board(board)
assert board867705791 is not None
assert board867705791 == ("56..3..6\n"
"4......6\n"
"11.114..\n"
"3542..21\n"
"5...236.\n")
del board867705791
board867705791 = None
assert gamma_move(board, 5, 3, 5) == 0
assert gamma_move(board, 6, 2, 2) == 0
assert gamma_move(board, 6, 3, 2) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 1, 1, 4) == 0
assert gamma_move(board, 2, 2, 6) == 0
assert gamma_move(board, 3, 3, 4) == 1
assert gamma_move(board, 4, 2, 2) == 1
assert gamma_free_fields(board, 4) == 6
assert gamma_move(board, 5, 2, 6) == 0
assert gamma_move(board, 5, 0, 0) == 0
assert gamma_golden_move(board, 5, 4, 1) == 0
assert gamma_move(board, 6, 6, 2) == 0
assert gamma_move(board, 1, 2, 6) == 0
assert gamma_free_fields(board, 1) == 6
assert gamma_move(board, 2, 3, 1) == 0
assert gamma_busy_fields(board, 2) == 3
assert gamma_golden_move(board, 2, 1, 0) == 0
assert gamma_move(board, 3, 4, 0) == 0
assert gamma_move(board, 4, 3, 2) == 0
assert gamma_move(board, 4, 7, 4) == 0
assert gamma_move(board, 5, 3, 3) == 0
assert gamma_busy_fields(board, 5) == 3
assert gamma_move(board, 6, 5, 2) == 0
assert gamma_move(board, 1, 6, 2) == 0
assert gamma_move(board, 1, 5, 0) == 0
assert gamma_move(board, 2, 4, 5) == 0
assert gamma_move(board, 2, 1, 4) == 0
gamma_delete(board)
| 33.477612 | 46 | 0.648982 | 1,266 | 6,729 | 3.302528 | 0.036335 | 0.373595 | 0.419756 | 0.559675 | 0.862473 | 0.842143 | 0.770151 | 0.41569 | 0.299211 | 0.282707 | 0 | 0.135436 | 0.184723 | 6,729 | 200 | 47 | 33.645 | 0.626686 | 0 | 0 | 0.186813 | 0 | 0 | 0.022587 | 0 | 0 | 0 | 0 | 0 | 0.818681 | 1 | 0 | false | 0 | 0.005495 | 0 | 0.005495 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2d9b8b6e5b98b48a1226b0f153bf06890a78bd18 | 3,583 | py | Python | gymnasiums/tests/tests_gymnasium_detail_view.py | hbuyse/dj-gymnasiums | 39f590dc703eec01c753ea54d7f4afd06f81a582 | [
"MIT"
] | null | null | null | gymnasiums/tests/tests_gymnasium_detail_view.py | hbuyse/dj-gymnasiums | 39f590dc703eec01c753ea54d7f4afd06f81a582 | [
"MIT"
] | null | null | null | gymnasiums/tests/tests_gymnasium_detail_view.py | hbuyse/dj-gymnasiums | 39f590dc703eec01c753ea54d7f4afd06f81a582 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# coding=utf-8
"""Tests the views."""
# Django
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
# Current django project
from gymnasiums.models import Gymnasium
class TestVcnAccountDetailViewAsAnonymous(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.gymnasium = Gymnasium.objects.create(
name='Watteau',
address='37 rue Lequesne',
city='Nogent-Sur-Marne',
zip_code=94130,
phone='0100000000',
surface=123,
capacity=456
)
def test_get(self):
"""Tests."""
r = self.client.get(reverse('gymnasiums:detail', kwargs={'pk': self.gymnasium.id}))
self.assertEqual(r.status_code, 200)
class TestVcnAccountDetailViewAsLogged(TestCase):
"""Tests."""
def setUp(self):
"""Setup for al the following tests."""
self.dict = {
'username': "hbuyse",
'password': "usermodel",
'first_name': "Henri",
'last_name': "Buyse"
}
self.user = get_user_model().objects.create_user(**self.dict)
self.gymnasium = Gymnasium.objects.create(
name='Watteau',
address='37 rue Lequesne',
city='Nogent-Sur-Marne',
zip_code=94130,
phone='0100000000',
surface=123,
capacity=456
)
def test_get(self):
"""Tests."""
r = self.client.get(reverse('gymnasiums:detail', kwargs={'pk': self.gymnasium.id}))
self.assertEqual(r.status_code, 200)
class TestVcnAccountDetailViewAsStaff(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.dict = {
'username': "hbuyse",
'password': "usermodel",
'first_name': "Henri",
'last_name': "Buyse",
'is_staff': True
}
self.staff = get_user_model().objects.create_user(**self.dict)
self.gymnasium = Gymnasium.objects.create(
name='Watteau',
address='37 rue Lequesne',
city='Nogent-Sur-Marne',
zip_code=94130,
phone='0100000000',
surface=123,
capacity=456
)
def test_get(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
r = self.client.get(reverse('gymnasiums:detail', kwargs={'pk': self.gymnasium.id}))
self.assertEqual(r.status_code, 200)
class TestVcnAccountDetailViewAsSuperuser(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.dict = {
'username': "hbuyse",
'password': "usermodel",
'first_name': "Henri",
'last_name': "Buyse",
'email': 'toto@example.com'
}
self.superuser = get_user_model().objects.create_superuser(**self.dict)
self.gymnasium = Gymnasium.objects.create(
name='Watteau',
address='37 rue Lequesne',
city='Nogent-Sur-Marne',
zip_code=94130,
phone='0100000000',
surface=123,
capacity=456
)
def test_get(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
r = self.client.get(reverse('gymnasiums:detail', kwargs={'pk': self.gymnasium.id}))
self.assertEqual(r.status_code, 200)
| 28.895161 | 106 | 0.559866 | 361 | 3,583 | 5.473684 | 0.240997 | 0.040486 | 0.032895 | 0.04251 | 0.786437 | 0.761134 | 0.761134 | 0.745951 | 0.745951 | 0.745951 | 0 | 0.041568 | 0.295004 | 3,583 | 123 | 107 | 29.130081 | 0.740697 | 0.053586 | 0 | 0.752941 | 0 | 0 | 0.153221 | 0 | 0 | 0 | 0 | 0 | 0.070588 | 1 | 0.094118 | false | 0.058824 | 0.047059 | 0 | 0.188235 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
2d9ebb174dc50e95e4a1ee1dd19f7449783361bf | 51,510 | py | Python | ent/python/antchain_sdk_ent/client.py | alipay/antchain-openapi-prod-sdk | f78549e5135d91756093bd88d191ca260b28e083 | [
"MIT"
] | 6 | 2020-06-28T06:40:50.000Z | 2022-02-25T11:02:18.000Z | ent/python/antchain_sdk_ent/client.py | alipay/antchain-openapi-prod-sdk | f78549e5135d91756093bd88d191ca260b28e083 | [
"MIT"
] | null | null | null | ent/python/antchain_sdk_ent/client.py | alipay/antchain-openapi-prod-sdk | f78549e5135d91756093bd88d191ca260b28e083 | [
"MIT"
] | 6 | 2020-06-30T09:29:03.000Z | 2022-01-07T10:42:22.000Z | # -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
import time
from Tea.exceptions import TeaException, UnretryableException
from Tea.request import TeaRequest
from Tea.core import TeaCore
from antchain_alipay_util.antchain_utils import AntchainUtils
from typing import Dict
from antchain_sdk_ent import models as ent_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_tea_util import models as util_models
from alibabacloud_rpc_util.client import Client as RPCUtilClient
class Client:
_endpoint: str = None
_region_id: str = None
_access_key_id: str = None
_access_key_secret: str = None
_protocol: str = None
_user_agent: str = None
_read_timeout: int = None
_connect_timeout: int = None
_http_proxy: str = None
_https_proxy: str = None
_socks_5proxy: str = None
_socks_5net_work: str = None
_no_proxy: str = None
_max_idle_conns: int = None
_security_token: str = None
_max_idle_time_millis: int = None
_keep_alive_duration_millis: int = None
_max_requests: int = None
_max_requests_per_host: int = None
def __init__(
self,
config: ent_models.Config,
):
"""
Init client with Config
@param config: config contains the necessary information to create a client
"""
if UtilClient.is_unset(config):
raise TeaException({
'code': 'ParameterMissing',
'message': "'config' can not be unset"
})
self._access_key_id = config.access_key_id
self._access_key_secret = config.access_key_secret
self._security_token = config.security_token
self._endpoint = config.endpoint
self._protocol = config.protocol
self._user_agent = config.user_agent
self._read_timeout = UtilClient.default_number(config.read_timeout, 20000)
self._connect_timeout = UtilClient.default_number(config.connect_timeout, 20000)
self._http_proxy = config.http_proxy
self._https_proxy = config.https_proxy
self._no_proxy = config.no_proxy
self._socks_5proxy = config.socks_5proxy
self._socks_5net_work = config.socks_5net_work
self._max_idle_conns = UtilClient.default_number(config.max_idle_conns, 60000)
self._max_idle_time_millis = UtilClient.default_number(config.max_idle_time_millis, 5)
self._keep_alive_duration_millis = UtilClient.default_number(config.keep_alive_duration_millis, 5000)
self._max_requests = UtilClient.default_number(config.max_requests, 100)
self._max_requests_per_host = UtilClient.default_number(config.max_requests_per_host, 100)
def do_request(
self,
version: str,
action: str,
protocol: str,
method: str,
pathname: str,
request: dict,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> dict:
"""
Encapsulate the request and invoke the network
@param action: api name
@param protocol: http or https
@param method: e.g. GET
@param pathname: pathname of every api
@param request: which contains request params
@param runtime: which controls some details of call api, such as retry times
@return: the response
"""
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'maxIdleTimeMillis': self._max_idle_time_millis,
'keepAliveDurationMillis': self._keep_alive_duration_millis,
'maxRequests': self._max_requests,
'maxRequestsPerHost': self._max_requests_per_host,
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl,
# 收益模型
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
_request.protocol = UtilClient.default_string(self._protocol, protocol)
_request.method = method
_request.pathname = pathname
_request.query = {
'method': action,
'version': version,
'sign_type': 'HmacSHA1',
'req_time': AntchainUtils.get_timestamp(),
'req_msg_id': AntchainUtils.get_nonce(),
'access_key': self._access_key_id,
'base_sdk_version': 'TeaSDK-2.0',
'sdk_version': '1.4.17'
}
if not UtilClient.empty(self._security_token):
_request.query['security_token'] = self._security_token
_request.headers = TeaCore.merge({
'host': UtilClient.default_string(self._endpoint, 'openapi.antchain.antgroup.com'),
'user-agent': UtilClient.get_user_agent(self._user_agent)
}, headers)
tmp = UtilClient.anyify_map_value(RPCUtilClient.query(request))
_request.body = UtilClient.to_form_string(tmp)
_request.headers['content-type'] = 'application/x-www-form-urlencoded'
signed_param = TeaCore.merge(_request.query,
RPCUtilClient.query(request))
_request.query['sign'] = AntchainUtils.get_signature(signed_param, self._access_key_secret)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
raw = UtilClient.read_as_string(_response.body)
obj = UtilClient.parse_json(raw)
res = UtilClient.assert_as_map(obj)
resp = UtilClient.assert_as_map(res.get('response'))
if AntchainUtils.has_error(raw, self._access_key_secret):
raise TeaException({
'message': resp.get('result_msg'),
'data': resp,
'code': resp.get('result_code')
})
return resp
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
async def do_request_async(
self,
version: str,
action: str,
protocol: str,
method: str,
pathname: str,
request: dict,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> dict:
"""
Encapsulate the request and invoke the network
@param action: api name
@param protocol: http or https
@param method: e.g. GET
@param pathname: pathname of every api
@param request: which contains request params
@param runtime: which controls some details of call api, such as retry times
@return: the response
"""
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'maxIdleTimeMillis': self._max_idle_time_millis,
'keepAliveDurationMillis': self._keep_alive_duration_millis,
'maxRequests': self._max_requests,
'maxRequestsPerHost': self._max_requests_per_host,
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl,
# 收益模型
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
_request.protocol = UtilClient.default_string(self._protocol, protocol)
_request.method = method
_request.pathname = pathname
_request.query = {
'method': action,
'version': version,
'sign_type': 'HmacSHA1',
'req_time': AntchainUtils.get_timestamp(),
'req_msg_id': AntchainUtils.get_nonce(),
'access_key': self._access_key_id,
'base_sdk_version': 'TeaSDK-2.0',
'sdk_version': '1.4.17'
}
if not UtilClient.empty(self._security_token):
_request.query['security_token'] = self._security_token
_request.headers = TeaCore.merge({
'host': UtilClient.default_string(self._endpoint, 'openapi.antchain.antgroup.com'),
'user-agent': UtilClient.get_user_agent(self._user_agent)
}, headers)
tmp = UtilClient.anyify_map_value(RPCUtilClient.query(request))
_request.body = UtilClient.to_form_string(tmp)
_request.headers['content-type'] = 'application/x-www-form-urlencoded'
signed_param = TeaCore.merge(_request.query,
RPCUtilClient.query(request))
_request.query['sign'] = AntchainUtils.get_signature(signed_param, self._access_key_secret)
_last_request = _request
_response = await TeaCore.async_do_action(_request, _runtime)
raw = await UtilClient.read_as_string_async(_response.body)
obj = UtilClient.parse_json(raw)
res = UtilClient.assert_as_map(obj)
resp = UtilClient.assert_as_map(res.get('response'))
if AntchainUtils.has_error(raw, self._access_key_secret):
raise TeaException({
'message': resp.get('result_msg'),
'data': resp,
'code': resp.get('result_code')
})
return resp
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def query_customer_project(
self,
request: ent_models.QueryCustomerProjectRequest,
) -> ent_models.QueryCustomerProjectResponse:
"""
Description: 查询用户参与的所有项目
Summary: 用户参与的项目查询
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_customer_project_ex(request, headers, runtime)
async def query_customer_project_async(
self,
request: ent_models.QueryCustomerProjectRequest,
) -> ent_models.QueryCustomerProjectResponse:
"""
Description: 查询用户参与的所有项目
Summary: 用户参与的项目查询
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_customer_project_ex_async(request, headers, runtime)
def query_customer_project_ex(
self,
request: ent_models.QueryCustomerProjectRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryCustomerProjectResponse:
"""
Description: 查询用户参与的所有项目
Summary: 用户参与的项目查询
"""
UtilClient.validate_model(request)
return ent_models.QueryCustomerProjectResponse().from_map(
self.do_request('1.0', 'antchain.ent.customer.project.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_customer_project_ex_async(
self,
request: ent_models.QueryCustomerProjectRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryCustomerProjectResponse:
"""
Description: 查询用户参与的所有项目
Summary: 用户参与的项目查询
"""
UtilClient.validate_model(request)
return ent_models.QueryCustomerProjectResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.customer.project.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_customer_data(
self,
request: ent_models.QueryCustomerDataRequest,
) -> ent_models.QueryCustomerDataResponse:
"""
Description: 查询用户数据的接口
Summary: 用户数据查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_customer_data_ex(request, headers, runtime)
async def query_customer_data_async(
self,
request: ent_models.QueryCustomerDataRequest,
) -> ent_models.QueryCustomerDataResponse:
"""
Description: 查询用户数据的接口
Summary: 用户数据查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_customer_data_ex_async(request, headers, runtime)
def query_customer_data_ex(
self,
request: ent_models.QueryCustomerDataRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryCustomerDataResponse:
"""
Description: 查询用户数据的接口
Summary: 用户数据查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryCustomerDataResponse().from_map(
self.do_request('1.0', 'antchain.ent.customer.data.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_customer_data_ex_async(
self,
request: ent_models.QueryCustomerDataRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryCustomerDataResponse:
"""
Description: 查询用户数据的接口
Summary: 用户数据查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryCustomerDataResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.customer.data.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def get_user_sharecode(
self,
request: ent_models.GetUserSharecodeRequest,
) -> ent_models.GetUserSharecodeResponse:
"""
Description: 为用户创建分享码
Summary: 用户分享码创建接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_user_sharecode_ex(request, headers, runtime)
async def get_user_sharecode_async(
self,
request: ent_models.GetUserSharecodeRequest,
) -> ent_models.GetUserSharecodeResponse:
"""
Description: 为用户创建分享码
Summary: 用户分享码创建接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_user_sharecode_ex_async(request, headers, runtime)
def get_user_sharecode_ex(
self,
request: ent_models.GetUserSharecodeRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.GetUserSharecodeResponse:
"""
Description: 为用户创建分享码
Summary: 用户分享码创建接口
"""
UtilClient.validate_model(request)
return ent_models.GetUserSharecodeResponse().from_map(
self.do_request('1.0', 'antchain.ent.user.sharecode.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def get_user_sharecode_ex_async(
self,
request: ent_models.GetUserSharecodeRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.GetUserSharecodeResponse:
"""
Description: 为用户创建分享码
Summary: 用户分享码创建接口
"""
UtilClient.validate_model(request)
return ent_models.GetUserSharecodeResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.user.sharecode.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def send_user_projectordermsg(
self,
request: ent_models.SendUserProjectordermsgRequest,
) -> ent_models.SendUserProjectordermsgResponse:
"""
Description: 发送用户的项目订单数据
Summary: 用户项目订单消息发送接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.send_user_projectordermsg_ex(request, headers, runtime)
async def send_user_projectordermsg_async(
self,
request: ent_models.SendUserProjectordermsgRequest,
) -> ent_models.SendUserProjectordermsgResponse:
"""
Description: 发送用户的项目订单数据
Summary: 用户项目订单消息发送接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.send_user_projectordermsg_ex_async(request, headers, runtime)
def send_user_projectordermsg_ex(
self,
request: ent_models.SendUserProjectordermsgRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.SendUserProjectordermsgResponse:
"""
Description: 发送用户的项目订单数据
Summary: 用户项目订单消息发送接口
"""
UtilClient.validate_model(request)
return ent_models.SendUserProjectordermsgResponse().from_map(
self.do_request('1.0', 'antchain.ent.user.projectordermsg.send', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def send_user_projectordermsg_ex_async(
self,
request: ent_models.SendUserProjectordermsgRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.SendUserProjectordermsgResponse:
"""
Description: 发送用户的项目订单数据
Summary: 用户项目订单消息发送接口
"""
UtilClient.validate_model(request)
return ent_models.SendUserProjectordermsgResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.user.projectordermsg.send', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_project_info(
self,
request: ent_models.QueryProjectInfoRequest,
) -> ent_models.QueryProjectInfoResponse:
"""
Description: 查询项目信息查询
Summary: 项目信息查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_project_info_ex(request, headers, runtime)
async def query_project_info_async(
self,
request: ent_models.QueryProjectInfoRequest,
) -> ent_models.QueryProjectInfoResponse:
"""
Description: 查询项目信息查询
Summary: 项目信息查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_project_info_ex_async(request, headers, runtime)
def query_project_info_ex(
self,
request: ent_models.QueryProjectInfoRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryProjectInfoResponse:
"""
Description: 查询项目信息查询
Summary: 项目信息查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryProjectInfoResponse().from_map(
self.do_request('1.0', 'antchain.ent.project.info.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_project_info_ex_async(
self,
request: ent_models.QueryProjectInfoRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryProjectInfoResponse:
"""
Description: 查询项目信息查询
Summary: 项目信息查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryProjectInfoResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.project.info.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_user_info(
self,
request: ent_models.QueryUserInfoRequest,
) -> ent_models.QueryUserInfoResponse:
"""
Description: 查询用户信息
Summary: 用户信息查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_user_info_ex(request, headers, runtime)
async def query_user_info_async(
self,
request: ent_models.QueryUserInfoRequest,
) -> ent_models.QueryUserInfoResponse:
"""
Description: 查询用户信息
Summary: 用户信息查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_user_info_ex_async(request, headers, runtime)
def query_user_info_ex(
self,
request: ent_models.QueryUserInfoRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryUserInfoResponse:
"""
Description: 查询用户信息
Summary: 用户信息查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryUserInfoResponse().from_map(
self.do_request('1.0', 'antchain.ent.user.info.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_user_info_ex_async(
self,
request: ent_models.QueryUserInfoRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryUserInfoResponse:
"""
Description: 查询用户信息
Summary: 用户信息查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryUserInfoResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.user.info.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_user_tokenallocationdetail(
self,
request: ent_models.QueryUserTokenallocationdetailRequest,
) -> ent_models.QueryUserTokenallocationdetailResponse:
"""
Description: 查询用户粉丝粒获得明细
Summary: 用户粉丝粒获得明细查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_user_tokenallocationdetail_ex(request, headers, runtime)
async def query_user_tokenallocationdetail_async(
self,
request: ent_models.QueryUserTokenallocationdetailRequest,
) -> ent_models.QueryUserTokenallocationdetailResponse:
"""
Description: 查询用户粉丝粒获得明细
Summary: 用户粉丝粒获得明细查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_user_tokenallocationdetail_ex_async(request, headers, runtime)
def query_user_tokenallocationdetail_ex(
self,
request: ent_models.QueryUserTokenallocationdetailRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryUserTokenallocationdetailResponse:
"""
Description: 查询用户粉丝粒获得明细
Summary: 用户粉丝粒获得明细查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryUserTokenallocationdetailResponse().from_map(
self.do_request('1.0', 'antchain.ent.user.tokenallocationdetail.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_user_tokenallocationdetail_ex_async(
self,
request: ent_models.QueryUserTokenallocationdetailRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryUserTokenallocationdetailResponse:
"""
Description: 查询用户粉丝粒获得明细
Summary: 用户粉丝粒获得明细查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryUserTokenallocationdetailResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.user.tokenallocationdetail.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_user_tokenredeemdetail(
self,
request: ent_models.QueryUserTokenredeemdetailRequest,
) -> ent_models.QueryUserTokenredeemdetailResponse:
"""
Description: 查询用户粉丝粒兑现明细
Summary: 用户粉丝粒兑现明细查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_user_tokenredeemdetail_ex(request, headers, runtime)
async def query_user_tokenredeemdetail_async(
self,
request: ent_models.QueryUserTokenredeemdetailRequest,
) -> ent_models.QueryUserTokenredeemdetailResponse:
"""
Description: 查询用户粉丝粒兑现明细
Summary: 用户粉丝粒兑现明细查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_user_tokenredeemdetail_ex_async(request, headers, runtime)
def query_user_tokenredeemdetail_ex(
self,
request: ent_models.QueryUserTokenredeemdetailRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryUserTokenredeemdetailResponse:
"""
Description: 查询用户粉丝粒兑现明细
Summary: 用户粉丝粒兑现明细查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryUserTokenredeemdetailResponse().from_map(
self.do_request('1.0', 'antchain.ent.user.tokenredeemdetail.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_user_tokenredeemdetail_ex_async(
self,
request: ent_models.QueryUserTokenredeemdetailRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryUserTokenredeemdetailResponse:
"""
Description: 查询用户粉丝粒兑现明细
Summary: 用户粉丝粒兑现明细查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryUserTokenredeemdetailResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.user.tokenredeemdetail.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def exec_event_report(
self,
request: ent_models.ExecEventReportRequest,
) -> ent_models.ExecEventReportResponse:
"""
Description: 上报事件
Summary: 事件上报
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.exec_event_report_ex(request, headers, runtime)
async def exec_event_report_async(
self,
request: ent_models.ExecEventReportRequest,
) -> ent_models.ExecEventReportResponse:
"""
Description: 上报事件
Summary: 事件上报
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.exec_event_report_ex_async(request, headers, runtime)
def exec_event_report_ex(
self,
request: ent_models.ExecEventReportRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ExecEventReportResponse:
"""
Description: 上报事件
Summary: 事件上报
"""
UtilClient.validate_model(request)
return ent_models.ExecEventReportResponse().from_map(
self.do_request('1.0', 'antchain.ent.event.report.exec', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def exec_event_report_ex_async(
self,
request: ent_models.ExecEventReportRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ExecEventReportResponse:
"""
Description: 上报事件
Summary: 事件上报
"""
UtilClient.validate_model(request)
return ent_models.ExecEventReportResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.event.report.exec', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_event_info(
self,
request: ent_models.QueryEventInfoRequest,
) -> ent_models.QueryEventInfoResponse:
"""
Description: 查询事件信息
Summary: 事件信息查询
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_event_info_ex(request, headers, runtime)
async def query_event_info_async(
self,
request: ent_models.QueryEventInfoRequest,
) -> ent_models.QueryEventInfoResponse:
"""
Description: 查询事件信息
Summary: 事件信息查询
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_event_info_ex_async(request, headers, runtime)
def query_event_info_ex(
self,
request: ent_models.QueryEventInfoRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryEventInfoResponse:
"""
Description: 查询事件信息
Summary: 事件信息查询
"""
UtilClient.validate_model(request)
return ent_models.QueryEventInfoResponse().from_map(
self.do_request('1.0', 'antchain.ent.event.info.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_event_info_ex_async(
self,
request: ent_models.QueryEventInfoRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryEventInfoResponse:
"""
Description: 查询事件信息
Summary: 事件信息查询
"""
UtilClient.validate_model(request)
return ent_models.QueryEventInfoResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.event.info.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def import_nft_meta(
self,
request: ent_models.ImportNftMetaRequest,
) -> ent_models.ImportNftMetaResponse:
"""
Description: 阿里拍卖nft资产元数据导入
Summary: 阿里拍卖nft资产元数据导入
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.import_nft_meta_ex(request, headers, runtime)
async def import_nft_meta_async(
self,
request: ent_models.ImportNftMetaRequest,
) -> ent_models.ImportNftMetaResponse:
"""
Description: 阿里拍卖nft资产元数据导入
Summary: 阿里拍卖nft资产元数据导入
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.import_nft_meta_ex_async(request, headers, runtime)
def import_nft_meta_ex(
self,
request: ent_models.ImportNftMetaRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ImportNftMetaResponse:
"""
Description: 阿里拍卖nft资产元数据导入
Summary: 阿里拍卖nft资产元数据导入
"""
UtilClient.validate_model(request)
return ent_models.ImportNftMetaResponse().from_map(
self.do_request('1.0', 'antchain.ent.nft.meta.import', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def import_nft_meta_ex_async(
self,
request: ent_models.ImportNftMetaRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ImportNftMetaResponse:
"""
Description: 阿里拍卖nft资产元数据导入
Summary: 阿里拍卖nft资产元数据导入
"""
UtilClient.validate_model(request)
return ent_models.ImportNftMetaResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.nft.meta.import', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def exec_nft_transfer(
self,
request: ent_models.ExecNftTransferRequest,
) -> ent_models.ExecNftTransferResponse:
"""
Description: nft资产订单落库,链上流转
Summary: nft资产流转
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.exec_nft_transfer_ex(request, headers, runtime)
async def exec_nft_transfer_async(
self,
request: ent_models.ExecNftTransferRequest,
) -> ent_models.ExecNftTransferResponse:
"""
Description: nft资产订单落库,链上流转
Summary: nft资产流转
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.exec_nft_transfer_ex_async(request, headers, runtime)
def exec_nft_transfer_ex(
self,
request: ent_models.ExecNftTransferRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ExecNftTransferResponse:
"""
Description: nft资产订单落库,链上流转
Summary: nft资产流转
"""
UtilClient.validate_model(request)
return ent_models.ExecNftTransferResponse().from_map(
self.do_request('1.0', 'antchain.ent.nft.transfer.exec', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def exec_nft_transfer_ex_async(
self,
request: ent_models.ExecNftTransferRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ExecNftTransferResponse:
"""
Description: nft资产订单落库,链上流转
Summary: nft资产流转
"""
UtilClient.validate_model(request)
return ent_models.ExecNftTransferResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.nft.transfer.exec', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def get_isv_sharecode(
self,
request: ent_models.GetIsvSharecodeRequest,
) -> ent_models.GetIsvSharecodeResponse:
"""
Description: 外部ISV获取分享码接口
Summary: 获得ISV分享码
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_isv_sharecode_ex(request, headers, runtime)
async def get_isv_sharecode_async(
self,
request: ent_models.GetIsvSharecodeRequest,
) -> ent_models.GetIsvSharecodeResponse:
"""
Description: 外部ISV获取分享码接口
Summary: 获得ISV分享码
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_isv_sharecode_ex_async(request, headers, runtime)
def get_isv_sharecode_ex(
self,
request: ent_models.GetIsvSharecodeRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.GetIsvSharecodeResponse:
"""
Description: 外部ISV获取分享码接口
Summary: 获得ISV分享码
"""
UtilClient.validate_model(request)
return ent_models.GetIsvSharecodeResponse().from_map(
self.do_request('1.0', 'antchain.ent.isv.sharecode.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def get_isv_sharecode_ex_async(
self,
request: ent_models.GetIsvSharecodeRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.GetIsvSharecodeResponse:
"""
Description: 外部ISV获取分享码接口
Summary: 获得ISV分享码
"""
UtilClient.validate_model(request)
return ent_models.GetIsvSharecodeResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.isv.sharecode.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def add_provision(
self,
request: ent_models.AddProvisionRequest,
) -> ent_models.AddProvisionResponse:
"""
Description: 备付金追加接口
Summary: 备付金追加接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.add_provision_ex(request, headers, runtime)
async def add_provision_async(
self,
request: ent_models.AddProvisionRequest,
) -> ent_models.AddProvisionResponse:
"""
Description: 备付金追加接口
Summary: 备付金追加接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.add_provision_ex_async(request, headers, runtime)
def add_provision_ex(
self,
request: ent_models.AddProvisionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.AddProvisionResponse:
"""
Description: 备付金追加接口
Summary: 备付金追加接口
"""
UtilClient.validate_model(request)
return ent_models.AddProvisionResponse().from_map(
self.do_request('1.0', 'antchain.ent.provision.add', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def add_provision_ex_async(
self,
request: ent_models.AddProvisionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.AddProvisionResponse:
"""
Description: 备付金追加接口
Summary: 备付金追加接口
"""
UtilClient.validate_model(request)
return ent_models.AddProvisionResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.provision.add', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def reclaim_provision_extraprovision(
self,
request: ent_models.ReclaimProvisionExtraprovisionRequest,
) -> ent_models.ReclaimProvisionExtraprovisionResponse:
"""
Description: 赎回链上多余备付金
Summary: 赎回链上多余备付金
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.reclaim_provision_extraprovision_ex(request, headers, runtime)
async def reclaim_provision_extraprovision_async(
self,
request: ent_models.ReclaimProvisionExtraprovisionRequest,
) -> ent_models.ReclaimProvisionExtraprovisionResponse:
"""
Description: 赎回链上多余备付金
Summary: 赎回链上多余备付金
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.reclaim_provision_extraprovision_ex_async(request, headers, runtime)
def reclaim_provision_extraprovision_ex(
self,
request: ent_models.ReclaimProvisionExtraprovisionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ReclaimProvisionExtraprovisionResponse:
"""
Description: 赎回链上多余备付金
Summary: 赎回链上多余备付金
"""
UtilClient.validate_model(request)
return ent_models.ReclaimProvisionExtraprovisionResponse().from_map(
self.do_request('1.0', 'antchain.ent.provision.extraprovision.reclaim', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def reclaim_provision_extraprovision_ex_async(
self,
request: ent_models.ReclaimProvisionExtraprovisionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ReclaimProvisionExtraprovisionResponse:
"""
Description: 赎回链上多余备付金
Summary: 赎回链上多余备付金
"""
UtilClient.validate_model(request)
return ent_models.ReclaimProvisionExtraprovisionResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.provision.extraprovision.reclaim', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def exec_token_redeem(
self,
request: ent_models.ExecTokenRedeemRequest,
) -> ent_models.ExecTokenRedeemResponse:
"""
Description: 链上Token兑现接口
Summary: 链上Token兑现接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.exec_token_redeem_ex(request, headers, runtime)
async def exec_token_redeem_async(
self,
request: ent_models.ExecTokenRedeemRequest,
) -> ent_models.ExecTokenRedeemResponse:
"""
Description: 链上Token兑现接口
Summary: 链上Token兑现接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.exec_token_redeem_ex_async(request, headers, runtime)
def exec_token_redeem_ex(
self,
request: ent_models.ExecTokenRedeemRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ExecTokenRedeemResponse:
"""
Description: 链上Token兑现接口
Summary: 链上Token兑现接口
"""
UtilClient.validate_model(request)
return ent_models.ExecTokenRedeemResponse().from_map(
self.do_request('1.0', 'antchain.ent.token.redeem.exec', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def exec_token_redeem_ex_async(
self,
request: ent_models.ExecTokenRedeemRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ExecTokenRedeemResponse:
"""
Description: 链上Token兑现接口
Summary: 链上Token兑现接口
"""
UtilClient.validate_model(request)
return ent_models.ExecTokenRedeemResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.token.redeem.exec', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def get_tpp_sharecode(
self,
request: ent_models.GetTppSharecodeRequest,
) -> ent_models.GetTppSharecodeResponse:
"""
Description: 针对淘票票的获取专属邀请码获取接口
Summary: 淘票票专属邀请码获取接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_tpp_sharecode_ex(request, headers, runtime)
async def get_tpp_sharecode_async(
self,
request: ent_models.GetTppSharecodeRequest,
) -> ent_models.GetTppSharecodeResponse:
"""
Description: 针对淘票票的获取专属邀请码获取接口
Summary: 淘票票专属邀请码获取接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_tpp_sharecode_ex_async(request, headers, runtime)
def get_tpp_sharecode_ex(
self,
request: ent_models.GetTppSharecodeRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.GetTppSharecodeResponse:
"""
Description: 针对淘票票的获取专属邀请码获取接口
Summary: 淘票票专属邀请码获取接口
"""
UtilClient.validate_model(request)
return ent_models.GetTppSharecodeResponse().from_map(
self.do_request('1.0', 'antchain.ent.tpp.sharecode.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def get_tpp_sharecode_ex_async(
self,
request: ent_models.GetTppSharecodeRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.GetTppSharecodeResponse:
"""
Description: 针对淘票票的获取专属邀请码获取接口
Summary: 淘票票专属邀请码获取接口
"""
UtilClient.validate_model(request)
return ent_models.GetTppSharecodeResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.tpp.sharecode.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_tpp_allinfo(
self,
request: ent_models.QueryTppAllinfoRequest,
) -> ent_models.QueryTppAllinfoResponse:
"""
Description: 针对淘票票的查询全部必要信息的接口
Summary: 淘票票查询全部必要信息接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_tpp_allinfo_ex(request, headers, runtime)
async def query_tpp_allinfo_async(
self,
request: ent_models.QueryTppAllinfoRequest,
) -> ent_models.QueryTppAllinfoResponse:
"""
Description: 针对淘票票的查询全部必要信息的接口
Summary: 淘票票查询全部必要信息接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_tpp_allinfo_ex_async(request, headers, runtime)
def query_tpp_allinfo_ex(
self,
request: ent_models.QueryTppAllinfoRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryTppAllinfoResponse:
"""
Description: 针对淘票票的查询全部必要信息的接口
Summary: 淘票票查询全部必要信息接口
"""
UtilClient.validate_model(request)
return ent_models.QueryTppAllinfoResponse().from_map(
self.do_request('1.0', 'antchain.ent.tpp.allinfo.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_tpp_allinfo_ex_async(
self,
request: ent_models.QueryTppAllinfoRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryTppAllinfoResponse:
"""
Description: 针对淘票票的查询全部必要信息的接口
Summary: 淘票票查询全部必要信息接口
"""
UtilClient.validate_model(request)
return ent_models.QueryTppAllinfoResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.tpp.allinfo.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_tpp_participationinfo(
self,
request: ent_models.QueryTppParticipationinfoRequest,
) -> ent_models.QueryTppParticipationinfoResponse:
"""
Description: 针对淘票票的参与信息查询接口
Summary: 淘票票参与信息查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_tpp_participationinfo_ex(request, headers, runtime)
async def query_tpp_participationinfo_async(
self,
request: ent_models.QueryTppParticipationinfoRequest,
) -> ent_models.QueryTppParticipationinfoResponse:
"""
Description: 针对淘票票的参与信息查询接口
Summary: 淘票票参与信息查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_tpp_participationinfo_ex_async(request, headers, runtime)
def query_tpp_participationinfo_ex(
self,
request: ent_models.QueryTppParticipationinfoRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryTppParticipationinfoResponse:
"""
Description: 针对淘票票的参与信息查询接口
Summary: 淘票票参与信息查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryTppParticipationinfoResponse().from_map(
self.do_request('1.0', 'antchain.ent.tpp.participationinfo.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_tpp_participationinfo_ex_async(
self,
request: ent_models.QueryTppParticipationinfoRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.QueryTppParticipationinfoResponse:
"""
Description: 针对淘票票的参与信息查询接口
Summary: 淘票票参与信息查询接口
"""
UtilClient.validate_model(request)
return ent_models.QueryTppParticipationinfoResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.tpp.participationinfo.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def exec_withdraw_create(
self,
request: ent_models.ExecWithdrawCreateRequest,
) -> ent_models.ExecWithdrawCreateResponse:
"""
Description: 兑现请求提交接口
Summary: 兑现请求提交
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.exec_withdraw_create_ex(request, headers, runtime)
async def exec_withdraw_create_async(
self,
request: ent_models.ExecWithdrawCreateRequest,
) -> ent_models.ExecWithdrawCreateResponse:
"""
Description: 兑现请求提交接口
Summary: 兑现请求提交
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.exec_withdraw_create_ex_async(request, headers, runtime)
def exec_withdraw_create_ex(
self,
request: ent_models.ExecWithdrawCreateRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ExecWithdrawCreateResponse:
"""
Description: 兑现请求提交接口
Summary: 兑现请求提交
"""
UtilClient.validate_model(request)
return ent_models.ExecWithdrawCreateResponse().from_map(
self.do_request('1.0', 'antchain.ent.withdraw.create.exec', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def exec_withdraw_create_ex_async(
self,
request: ent_models.ExecWithdrawCreateRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ent_models.ExecWithdrawCreateResponse:
"""
Description: 兑现请求提交接口
Summary: 兑现请求提交
"""
UtilClient.validate_model(request)
return ent_models.ExecWithdrawCreateResponse().from_map(
await self.do_request_async('1.0', 'antchain.ent.withdraw.create.exec', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
| 38.070953 | 171 | 0.637896 | 4,886 | 51,510 | 6.461523 | 0.066312 | 0.057584 | 0.044154 | 0.080517 | 0.950176 | 0.92623 | 0.912103 | 0.886161 | 0.828577 | 0.820183 | 0 | 0.003683 | 0.267346 | 51,510 | 1,352 | 172 | 38.099112 | 0.832878 | 0.043603 | 0 | 0.707743 | 1 | 0 | 0.072402 | 0.033551 | 0 | 0 | 0 | 0 | 0.004362 | 1 | 0.045802 | false | 0 | 0.030534 | 0 | 0.187568 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
fad6e10cabbf9f4ff78a2e676102c3cfdae5d1e3 | 16,758 | py | Python | cloak/serverapi/tests/test_cli.py | encryptme/private-end-points | 4fcefa27d84407af284ea9c2340d1d98509d7f8b | [
"MIT"
] | 5 | 2018-01-30T20:18:14.000Z | 2021-06-27T13:37:09.000Z | cloak/serverapi/tests/test_cli.py | encryptme/private-end-points | 4fcefa27d84407af284ea9c2340d1d98509d7f8b | [
"MIT"
] | 2 | 2020-11-23T12:41:24.000Z | 2021-01-25T11:13:12.000Z | cloak/serverapi/tests/test_cli.py | encryptme/private-end-points | 4fcefa27d84407af284ea9c2340d1d98509d7f8b | [
"MIT"
] | 4 | 2019-06-14T17:36:03.000Z | 2022-02-01T06:09:19.000Z | from functools import partial
import json
import os.path
import shutil
import tempfile
from six.moves.configparser import NoOptionError
from cloak.serverapi.tests.base import TestCase
class RegisterTestCase(TestCase):
def test_register(self):
self.assertIsNone(self.session.target_id)
returncode = self.main([
'register',
'-k', 'secret_onetime_reg_key',
'-n', 'srv1.team.example.com',
])
self.assertEqual(returncode, 0)
self.assertEqual(self.session.target_id, self.def_target_id)
self.assertNotEqual(self.stdout.getvalue(), '')
def test_register_quiet(self):
returncode = self.main([
'--quiet',
'register',
'-k', 'secret_onetime_reg_key',
'-n', 'srv1.team.example.com',
])
self.assertEqual(returncode, 0)
self.assertEqual(self.session.target_id, self.def_target_id)
self.assertEqual(self.stdout.getvalue(), '')
def test_register_auto_name(self):
returncode = self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
self.assertEqual(returncode, 0)
self.assertIsNotNone(self.session.name)
def test_already_registered(self):
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
returncode = self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
self.assertNotEqual(returncode, 0)
class InfoTestCase(TestCase):
def test_not_registered(self):
returncode = self.main(['info'])
self.assertNotEqual(returncode, 0)
def test_auth_fail(self):
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
self.session.auth_token = 'bogus'
returncode = self.main(['info'])
self.assertNotEqual(returncode, 0)
def test_print_info(self):
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
returncode = self.main(['info'])
self.assertEqual(returncode, 0)
self.assertIn(self.session.server_id, self.stdout.getvalue())
def test_print_json(self):
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
self.stdout.seek(0)
self.stdout.truncate()
returncode = self.main(['info', '--json'])
self.assertEqual(returncode, 0)
json.loads(self.stdout.getvalue())
class UpdateTestCase(TestCase):
def test_update_noop(self):
self.main([
'register',
'-k', 'secret_onetime_reg_key',
'-n', 'server1.example.com',
])
returncode = self.main(['update'])
self.assertEqual(returncode, 0)
def test_update(self):
self.main([
'register',
'-k', 'secret_onetime_reg_key',
'-n', 'server1.example.com',
])
returncode = self.main([
'update',
'-n', 'server2.example.com',
'-a', '2050-01-01',
'-j',
])
self.assertEqual(returncode, 0)
self.assertIn('server2.example.com', self.stdout.getvalue())
self.assertIn('2050-01-01', self.stdout.getvalue())
class CSRTestCase(TestCase):
def test_existing_key(self):
with tempfile.NamedTemporaryFile('wb', 0) as key_file:
key_file.write(self.privkey_rsa_2048)
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
returncode = self.main([
'req', '-k', key_file.name,
])
self.assertEqual(returncode, 0)
self.assertIsNotNone(self.session.csr)
def test_new_key(self):
key_dir = tempfile.mkdtemp()
self.addCleanup(partial(shutil.rmtree, key_dir))
key_path = os.path.join(key_dir, 'privkey.pem')
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
returncode = self.main([
'req', '-k', key_path
])
self.assertEqual(returncode, 0)
self.assertIsNotNone(self.session.csr)
def test_bogus_key(self):
with tempfile.NamedTemporaryFile('wb', 0) as key_file:
key_file.write(b'bogus')
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
returncode = self.main([
'req', '-k', key_file.name,
])
self.assertNotEqual(returncode, 0)
self.assertIsNone(self.session.csr)
self.assertIn(key_file.name, self.stderr.getvalue())
def test_bogus_key_path(self):
key_dir = tempfile.mkdtemp()
self.addCleanup(partial(shutil.rmtree, key_dir))
key_path = os.path.join(key_dir, 'bogus', 'privkey.pem')
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
returncode = self.main([
'req', '-k', key_path
])
self.assertNotEqual(returncode, 0)
self.assertIsNone(self.session.csr)
self.assertIn(key_path, self.stderr.getvalue())
privkey_rsa_2048 = b'-----BEGIN PRIVATE KEY-----\nMIIEwAIBADANBgkqhkiG9w0BAQEFAASCBKowggSmAgEAAoIBAQC7eUdE6MAUMmpF\nku2W9MQnU6V+1q17stlITuNF8zhb4HbplX+Lx8soxnRvY6Hn/uP6IIIi3jNim7vv\nruG53VO/CTSiHgg4wf3rO9Lpy8wIIgQwoUBDrOqsGYlJYK8sc1gEsROA9YdAYSgJ\nrm9luF2bmRho92eFCqzq/2dIgNqT5I2WnwvZSW9cup+BzULfwvXF3QXAzTphGhf+\nsDTdgyd7v3dHRHiyVTva3FICuWgtklDBqcP7GrX/TofPal3/Q6asgHc3UxhWPznY\nFYf73SMpwk7SVFXybW0i8kh0oOk6VVODMThrQnHNpU3sfwqc+ZEFMgFWnOg5sh22\nWbQWZQjdAgMBAAECggEBALg610mlfFScsoiKecb14+lNrv21U6iSuinvtDJicIkB\nTXoAOuYPQdthIrzv6QSGHF0KIzjGqTKHHinM7u/qy0iZcEq8PpIgOTo4gOzWJDv9\nyaZMYE3hGIBlW99rDtocw2tg5Gy/W9ltYJ4a+Ee65OpqiW1layp3sjQBJus+DQ51\nRNAefYOo8UrQGFCzDUgH+QUOCTImbD6sVttDI0DojDM8slPOjdb3ZMRO+esQS0Q/\nzoO3f9dCe0VDBRbgJvRGMs+z/TzqqhSqbZwJDFs3e3ItZyXdz8eaGxfsH4et2PFd\nbAjSuBScbYXQWMTYCNdFgNVQ+5hGkAnolxduAFylnLUCgYEA7DBdp67Q4IpNtuKE\nh1OEQZ5pW7Qi/KpHyqXIsiscsBCgV6wdU38C+KW16gz3Sowc7Wry+cRjJQzF5Cqj\nxw8GcO/+OSqmjOTeHBcPKs2Pp4YnZ+0Bo0jfKSg8/gN/aHi607VavusOLIPzgv6V\nr62RViE5rQHK0waZBG6WQrXn+e8CgYEAyzLcoZcsMOLuk18wszQ77tcoPf9DTsIo\n5hM+NeVzm3fit7LG0TonRC7DZYoBAaQJuxujUXqcu6jTIPeIndRPc2FuWhPQPWzC\nJ/S2dy0WQ5bhvhh7Jw9Ko/2a5SsdP0yxuCwQwIUw1O8zawpWW6xeL1P6O9k01fGr\n6AS4osFg5fMCgYEAjuMzxZ4c/7qsCVhAlR4RhSEw3Cm+gN0DUbW6FQ+/60QjvOaD\nV2AfjA20YEQ31wGs/nUVScVltaRklAS30FVmsCyAwFTtLY/IT3Yj1uFFZzPh4x2f\nQAl1+JA/Ve0Hx0xCupGctKO/j27EgxtBs2Zt5o1zNxc+fSwgpm3AudsS3EECgYEA\nnA7zDhPJd75CFuMrxuYeBYAvQvYyHmHWAWXUCJaxpDx93jGqqnQsRhxYKzrDLRxr\n8Mz4MJKnnyS5Cf+yZ+zwHCA/HWVMMHC/6Onz3TG+gKh3tYSdyNDgtXQHq2viaYQg\nld8Z+pIQf+k6J0JoMr3+FAE+FQrrnkiei3Jcz3sPTWsCgYEAkSSunhic1isgO3xo\nX2G2WjWQwOEVlb2XqK5d7aCNwElAvwtKtU78qJxWVWIiStsRNNyq8pTba9DNH9hy\n+v8hSlVExYFjTm1HlpLqFOu3J60vh0A/76O8QT5Pn3gLs6H8OsIxiIK+edqxbO3K\nCberEki3Q3eUI5fua0HCyZrkP/A=\n-----END PRIVATE KEY-----\n'
class PKITestCase(TestCase):
def setUp(self):
super().setUp()
self.out_path = tempfile.mkdtemp()
self.server_cert_path = os.path.join(self.out_path, 'server.pem')
self.hook_path = os.path.join(self.out_path, 'changed.txt')
self.addCleanup(partial(shutil.rmtree, self.out_path))
def test_get_empty_pki(self):
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
returncode = self.main([
'pki', '-o', self.out_path
])
self.assertEqual(returncode, 0)
self.assertPKINotSaved()
with self.assertRaises(NoOptionError):
self.get_config().get('serverapi', 'pki_tag')
def test_get_pki(self):
with tempfile.NamedTemporaryFile('wb', 0) as key_file:
key_file.write(self.privkey_rsa_2048)
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
self.main([
'req', '-k', key_file.name,
])
returncode = self.main([
'pki', '-o', self.out_path
])
self.assertEqual(returncode, 0)
self.assertPKISaved()
self.assertIsNotNone(self.get_config().get('serverapi', 'pki_tag'))
def test_post_hook(self):
with tempfile.NamedTemporaryFile('wb', 0) as key_file:
key_file.write(self.privkey_rsa_2048)
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
self.main([
'req', '-k', key_file.name,
])
returncode = self.main([
'pki',
'--out', self.out_path,
'--post-hook', 'touch {}'.format(self.hook_path)
])
self.assertEqual(returncode, 0)
self.assertPKISaved()
self.assertTrue(os.path.exists(self.hook_path))
self.assertIsNotNone(self.get_config().get('serverapi', 'pki_tag'))
def test_post_hook_fail(self):
with tempfile.NamedTemporaryFile('wb', 0) as key_file:
key_file.write(self.privkey_rsa_2048)
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
self.main([
'req', '-k', key_file.name,
])
returncode = self.main([
'pki',
'--out', self.out_path,
'--post-hook', 'false',
])
self.assertNotEqual(returncode, 0)
self.assertPKISaved()
with self.assertRaises(NoOptionError):
self.get_config().get('serverapi', 'pki_tag')
def test_not_modified(self):
with tempfile.NamedTemporaryFile('wb', 0) as key_file:
key_file.write(self.privkey_rsa_2048)
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
self.main([
'req', '-k', key_file.name,
])
self.main([
'pki', '-o', self.out_path
])
os.unlink(self.server_cert_path)
# Should be a no-op because of the tag.
returncode = self.main([
'pki',
'--out', self.out_path,
'--post-hook', 'touch {}'.format(self.hook_path)
])
self.assertEqual(returncode, 0)
self.assertFalse(os.path.exists(self.server_cert_path))
self.assertFalse(os.path.exists(self.hook_path))
def test_force_download(self):
with tempfile.NamedTemporaryFile('wb', 0) as key_file:
key_file.write(self.privkey_rsa_2048)
self.main([
'register',
'-k', 'secret_onetime_reg_key',
])
self.main([
'req', '-k', key_file.name,
])
self.main([
'pki', '-o', self.out_path
])
os.unlink(self.server_cert_path)
# Should be a no-op because of the tag.
returncode = self.main([
'pki',
'--out', self.out_path,
'--force',
'--post-hook', 'touch {}'.format(self.hook_path)
])
self.assertEqual(returncode, 0)
self.assertTrue(os.path.exists(self.server_cert_path))
self.assertTrue(os.path.exists(self.hook_path))
#
# Utils
#
def assertPKISaved(self):
for filename in self.pki_filenames:
self.assertTrue(os.path.exists(os.path.join(self.out_path, filename)))
def assertPKINotSaved(self):
for filename in self.pki_filenames:
self.assertFalse(os.path.exists(os.path.join(self.out_path, filename)))
privkey_rsa_2048 = b'-----BEGIN PRIVATE KEY-----\nMIIEwAIBADANBgkqhkiG9w0BAQEFAASCBKowggSmAgEAAoIBAQC7eUdE6MAUMmpF\nku2W9MQnU6V+1q17stlITuNF8zhb4HbplX+Lx8soxnRvY6Hn/uP6IIIi3jNim7vv\nruG53VO/CTSiHgg4wf3rO9Lpy8wIIgQwoUBDrOqsGYlJYK8sc1gEsROA9YdAYSgJ\nrm9luF2bmRho92eFCqzq/2dIgNqT5I2WnwvZSW9cup+BzULfwvXF3QXAzTphGhf+\nsDTdgyd7v3dHRHiyVTva3FICuWgtklDBqcP7GrX/TofPal3/Q6asgHc3UxhWPznY\nFYf73SMpwk7SVFXybW0i8kh0oOk6VVODMThrQnHNpU3sfwqc+ZEFMgFWnOg5sh22\nWbQWZQjdAgMBAAECggEBALg610mlfFScsoiKecb14+lNrv21U6iSuinvtDJicIkB\nTXoAOuYPQdthIrzv6QSGHF0KIzjGqTKHHinM7u/qy0iZcEq8PpIgOTo4gOzWJDv9\nyaZMYE3hGIBlW99rDtocw2tg5Gy/W9ltYJ4a+Ee65OpqiW1layp3sjQBJus+DQ51\nRNAefYOo8UrQGFCzDUgH+QUOCTImbD6sVttDI0DojDM8slPOjdb3ZMRO+esQS0Q/\nzoO3f9dCe0VDBRbgJvRGMs+z/TzqqhSqbZwJDFs3e3ItZyXdz8eaGxfsH4et2PFd\nbAjSuBScbYXQWMTYCNdFgNVQ+5hGkAnolxduAFylnLUCgYEA7DBdp67Q4IpNtuKE\nh1OEQZ5pW7Qi/KpHyqXIsiscsBCgV6wdU38C+KW16gz3Sowc7Wry+cRjJQzF5Cqj\nxw8GcO/+OSqmjOTeHBcPKs2Pp4YnZ+0Bo0jfKSg8/gN/aHi607VavusOLIPzgv6V\nr62RViE5rQHK0waZBG6WQrXn+e8CgYEAyzLcoZcsMOLuk18wszQ77tcoPf9DTsIo\n5hM+NeVzm3fit7LG0TonRC7DZYoBAaQJuxujUXqcu6jTIPeIndRPc2FuWhPQPWzC\nJ/S2dy0WQ5bhvhh7Jw9Ko/2a5SsdP0yxuCwQwIUw1O8zawpWW6xeL1P6O9k01fGr\n6AS4osFg5fMCgYEAjuMzxZ4c/7qsCVhAlR4RhSEw3Cm+gN0DUbW6FQ+/60QjvOaD\nV2AfjA20YEQ31wGs/nUVScVltaRklAS30FVmsCyAwFTtLY/IT3Yj1uFFZzPh4x2f\nQAl1+JA/Ve0Hx0xCupGctKO/j27EgxtBs2Zt5o1zNxc+fSwgpm3AudsS3EECgYEA\nnA7zDhPJd75CFuMrxuYeBYAvQvYyHmHWAWXUCJaxpDx93jGqqnQsRhxYKzrDLRxr\n8Mz4MJKnnyS5Cf+yZ+zwHCA/HWVMMHC/6Onz3TG+gKh3tYSdyNDgtXQHq2viaYQg\nld8Z+pIQf+k6J0JoMr3+FAE+FQrrnkiei3Jcz3sPTWsCgYEAkSSunhic1isgO3xo\nX2G2WjWQwOEVlb2XqK5d7aCNwElAvwtKtU78qJxWVWIiStsRNNyq8pTba9DNH9hy\n+v8hSlVExYFjTm1HlpLqFOu3J60vh0A/76O8QT5Pn3gLs6H8OsIxiIK+edqxbO3K\nCberEki3Q3eUI5fua0HCyZrkP/A=\n-----END PRIVATE KEY-----\n'
pki_filenames = [
'anchor.pem', 'client_ca.pem', 'server.pem', 'crl_urls.txt'
]
class CRLsTestCase(TestCase):
"""
Breaking the rules and testing with live CRLs.
"""
def setUp(self):
super().setUp()
self.out_path = tempfile.mkdtemp()
self.crl_path = os.path.join(self.out_path, 'cloak-public-clients.crl')
self.pem_path = os.path.join(self.out_path, 'cloak-public-clients.pem')
self.hook_path = os.path.join(self.out_path, 'changed.txt')
self.addCleanup(partial(shutil.rmtree, self.out_path))
def test_fetch_crls(self):
returncode = self.main([
'crls',
'--out', self.out_path,
'--format', 'pem',
'--post-hook', 'touch {}'.format(self.hook_path),
'http://crl.getcloak.com/cloak-public-clients.crl',
'http://crl.getcloak.com/cloak-public-servers.crl',
])
self.assertEqual(returncode, 0)
self.assertTrue(os.path.exists(self.pem_path))
self.assertTrue(os.path.exists(self.hook_path))
def test_fetch_from_file(self):
with tempfile.NamedTemporaryFile('wb') as f:
f.write(b'http://crl.getcloak.com/cloak-public-clients.crl\n')
f.write(b'http://crl.getcloak.com/cloak-public-servers.crl\n')
f.write(b'\n')
f.flush()
returncode = self.main([
'crls',
'--infile', f.name,
'--out', self.out_path,
'--format', 'pem',
'--post-hook', 'touch {}'.format(self.hook_path),
])
self.assertEqual(returncode, 0)
self.assertTrue(os.path.exists(self.pem_path))
self.assertTrue(os.path.exists(self.hook_path))
def test_crls_noop(self):
self.main([
'crls',
'--out', self.out_path,
'--format', 'der',
'http://crl.getcloak.com/cloak-public-clients.crl',
'http://crl.getcloak.com/cloak-public-servers.crl',
])
returncode = self.main([
'crls',
'--out', self.out_path,
'--format', 'der',
'--post-hook', 'touch {}'.format(self.hook_path),
'http://crl.getcloak.com/cloak-public-clients.crl',
'http://crl.getcloak.com/cloak-public-servers.crl',
])
self.assertEqual(returncode, 0)
self.assertTrue(os.path.exists(self.crl_path))
self.assertFalse(os.path.exists(self.hook_path))
def test_hook_fail(self):
returncode = self.main([
'crls',
'--out', self.out_path,
'--post-hook', 'false',
'http://crl.getcloak.com/cloak-public-clients.crl',
'http://crl.getcloak.com/cloak-public-servers.crl',
])
self.assertNotEqual(returncode, 0)
def test_bad_url(self):
url = 'http://crl.getcloak.com/totally-bogus-crl.crl'
returncode = self.main([
'crls', '--out', self.out_path, url
])
self.assertEqual(returncode, 0)
self.assertIn(url, self.stderr.getvalue())
| 37.914027 | 1,762 | 0.623225 | 1,608 | 16,758 | 6.347015 | 0.148632 | 0.038409 | 0.044092 | 0.043112 | 0.868117 | 0.855575 | 0.833725 | 0.829512 | 0.805213 | 0.774152 | 0 | 0.044475 | 0.252655 | 16,758 | 441 | 1,763 | 38 | 0.770441 | 0.007698 | 0 | 0.758523 | 0 | 0.005682 | 0.327172 | 0.236378 | 0 | 0 | 0 | 0 | 0.181818 | 1 | 0.082386 | false | 0 | 0.019886 | 0 | 0.127841 | 0.005682 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
faefb70b8507c6ad3cd85add227f5fb1100e23cc | 643 | py | Python | cep/energies/energy_leaf.py | hjw-1014/Multi-Objective-Reactive-Motion-Planning-in-Mobile-Manipulators | 9a8801e9c663174b753c4852b2313c5a3f302434 | [
"MIT"
] | null | null | null | cep/energies/energy_leaf.py | hjw-1014/Multi-Objective-Reactive-Motion-Planning-in-Mobile-Manipulators | 9a8801e9c663174b753c4852b2313c5a3f302434 | [
"MIT"
] | null | null | null | cep/energies/energy_leaf.py | hjw-1014/Multi-Objective-Reactive-Motion-Planning-in-Mobile-Manipulators | 9a8801e9c663174b753c4852b2313c5a3f302434 | [
"MIT"
] | null | null | null | import torch.nn as nn
class EnergyLeaf(nn.Module):
'''
An Energy Leaf is an Energy Base Model that provides the unnormalized log prob(action|state)
'''
def __init__(self):
super(EnergyLeaf, self).__init__()
def set_context(self, state):
pass
def log_prob(self, action):
pass
class EnergyLeaf_x(nn.Module):
'''
An Energy Leaf is an Energy Base Model that provides the unnormalized log prob(action|state)
'''
def __init__(self):
super(EnergyLeaf_x, self).__init__()
def set_context(self, state):
pass
def log_prob(self, action, state):
pass | 21.433333 | 96 | 0.640747 | 86 | 643 | 4.534884 | 0.337209 | 0.082051 | 0.051282 | 0.082051 | 0.841026 | 0.841026 | 0.841026 | 0.841026 | 0.841026 | 0.841026 | 0 | 0 | 0.26283 | 643 | 30 | 97 | 21.433333 | 0.822785 | 0.287714 | 0 | 0.533333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0.266667 | 0.066667 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 9 |
8793f6c7bf9676b69e4e22bcf9f888b208614162 | 76,071 | py | Python | test/test_huge_query.py | pilate/cassandra-dbapi2 | bb4a638602f936ff05e2a8afec9ea08b72baf796 | [
"Apache-2.0"
] | null | null | null | test/test_huge_query.py | pilate/cassandra-dbapi2 | bb4a638602f936ff05e2a8afec9ea08b72baf796 | [
"Apache-2.0"
] | null | null | null | test/test_huge_query.py | pilate/cassandra-dbapi2 | bb4a638602f936ff05e2a8afec9ea08b72baf796 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import time
from cql import query
huge_query = """\
BEGIN BATCH USING CONSISTENCY ONE
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_459', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_458', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_451', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_450', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_453', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_452', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_455', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_454', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_457', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_456', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_208', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_209', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_204', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_205', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_206', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_207', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_200', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_201', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_202', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_203', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_49', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_48', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_41', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_40', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_43', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_42', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_45', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_44', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_47', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_46', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_367', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_366', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_365', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_338', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_339', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_364', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_330', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_331', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_332', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_333', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_334', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_335', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_336', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_337', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_361', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_363', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_360', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_362', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_149', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_148', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_143', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_142', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_141', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_140', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_147', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_146', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_145', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_144', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_419', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_418', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_415', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_414', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_417', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_416', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_411', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_410', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_413', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_412', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_358', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_359', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_240', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_241', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_242', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_243', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_244', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_245', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_246', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_247', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_248', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_249', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_482', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_483', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_480', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_481', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_486', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_487', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_484', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_485', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_488', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_489', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_350', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_351', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_30', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_31', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_32', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_33', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_34', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_35', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_36', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_37', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_38', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_39', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_189', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_188', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_187', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_186', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_185', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_184', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_183', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_182', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_181', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_180', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_341', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_340', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_343', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_342', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_89', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_88', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_347', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_346', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_85', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_84', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_87', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_86', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_81', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_80', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_83', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_82', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_114', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_115', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_116', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_117', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_110', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_111', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_112', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_113', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_118', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_119', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_446', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_447', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_444', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_445', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_442', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_443', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_440', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_441', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_448', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_449', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_398', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_399', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_390', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_239', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_238', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_391', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_231', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_230', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_233', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_232', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_235', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_234', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_237', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_236', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_305', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_304', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_307', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_306', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_301', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_300', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_303', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_302', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_309', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_308', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_7', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_74', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_75', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_8', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_9', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_158', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_159', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_76', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_77', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_70', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_71', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_72', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_73', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_150', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_151', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_152', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_153', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_78', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_79', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_156', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_157', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_408', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_409', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_402', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_403', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_400', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_401', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_406', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_407', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_404', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_405', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_154', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_155', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_279', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_278', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_275', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_274', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_277', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_276', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_271', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_270', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_273', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_272', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_479', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_478', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_477', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_476', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_475', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_474', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_473', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_472', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_471', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_470', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_345', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_344', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_349', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_348', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_29', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_28', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_27', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_26', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_25', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_24', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_23', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_22', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_21', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_20', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_198', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_199', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_194', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_195', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_196', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_197', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_190', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_191', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_192', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_193', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_297', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_296', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_295', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_294', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_293', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_292', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_291', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_290', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_356', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_357', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_354', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_355', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_352', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_353', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_299', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_298', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_121', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_120', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_123', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_122', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_125', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_124', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_127', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_126', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_129', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_128', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_433', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_432', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_431', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_430', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_437', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_436', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_435', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_434', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_439', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_438', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_226', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_227', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_224', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_225', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_222', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_223', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_220', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_221', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_385', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_384', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_387', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_386', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_381', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_380', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_228', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_229', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_389', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_388', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_312', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_313', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_310', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_311', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_316', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_317', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_314', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_315', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_318', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_319', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_383', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_382', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_16', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_17', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_14', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_15', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_12', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_13', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_10', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_11', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_18', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_19', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_165', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_164', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_167', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_166', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_161', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_160', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_163', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_162', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_169', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_168', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_63', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_62', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_61', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_60', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_67', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_66', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_65', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_64', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_69', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_68', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_369', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_368', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_4', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_5', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_268', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_269', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_0', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_1', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_2', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_3', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_262', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_263', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_260', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_261', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_266', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_267', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_264', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_265', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_468', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_469', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_464', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_465', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_466', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_467', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_460', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_461', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_462', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_463', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_219', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_218', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_217', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_216', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_215', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_214', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_213', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_212', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_211', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_210', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_58', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_59', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_372', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_52', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_53', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_50', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_51', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_56', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_57', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_54', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_55', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_378', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_379', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_284', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_285', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_286', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_328', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_280', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_281', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_282', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_283', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_323', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_322', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_321', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_320', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_288', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_289', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_325', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_324', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_6', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_329', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_287', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_373', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_138', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_139', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_136', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_137', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_134', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_135', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_132', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_133', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_130', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_131', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_420', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_421', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_422', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_423', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_424', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_425', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_426', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_427', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_428', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_429', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_327', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_326', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_253', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_252', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_251', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_250', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_257', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_256', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_255', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_254', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_392', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_393', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_259', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_258', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_396', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_397', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_394', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_395', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_495', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_494', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_497', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_496', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_491', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_490', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_493', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_492', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_499', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_498', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_172', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_173', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_170', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_171', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_176', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_177', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_174', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_175', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_178', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_179', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_374', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_375', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_376', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_377', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_370', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_371', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_98', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_99', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_96', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_97', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_94', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_95', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_92', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_93', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_90', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_91', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_107', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_106', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_105', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_104', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_103', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_102', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_101', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_100', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_109', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
INSERT INTO rolling_cf_standard (KEY, 'col_2', 'col_3', 'col_0', 'col_1', 'col_4') VALUES ('row_108', 'val_2', 'val_3', 'val_0', 'val_1', 'val_4');
APPLY BATCH;
"""
class TestHugeQuery(unittest.TestCase):
MAX_TIME = 1.0 # seconds. this gives a ton of room.
def test_huge_query_noparams(self):
t1 = time.time()
expanded = query.prepare_inline(huge_query, {})
t2 = time.time()
self.assertEqual(huge_query, expanded)
self.assertTrue((t2 - t1) < self.MAX_TIME)
t1 = time.time()
prepared, names = query.prepare_query(huge_query)
t2 = time.time()
self.assertEqual(huge_query, prepared)
self.assertEqual(names, [])
self.assertTrue((t2 - t1) < self.MAX_TIME)
def test_huge_query_params(self):
huge_query_2 = huge_query + ':boo'
t1 = time.time()
expanded = query.prepare_inline(huge_query_2, {'boo': 'hoo'})
t2 = time.time()
self.assertEqual(huge_query, expanded[:len(huge_query)])
self.assertTrue(expanded.endswith("\n'hoo'"))
self.assertTrue((t2 - t1) < self.MAX_TIME)
t1 = time.time()
prepared, names = query.prepare_query(huge_query_2)
t2 = time.time()
self.assertEqual(huge_query, prepared[:len(huge_query)])
self.assertEqual(names, ['boo'])
self.assertTrue(prepared.endswith('\n?'))
self.assertTrue((t2 - t1) < self.MAX_TIME)
| 135.841071 | 147 | 0.642492 | 14,796 | 76,071 | 2.861855 | 0.041836 | 0.047327 | 0.200737 | 0.224353 | 0.943983 | 0.943983 | 0.943983 | 0.942613 | 0.940251 | 0.93652 | 0 | 0.094895 | 0.111343 | 76,071 | 559 | 148 | 136.084079 | 0.531486 | 0.010359 | 0 | 0.022388 | 0 | 0.932836 | 0.982675 | 0 | 0 | 0 | 0 | 0 | 0.022388 | 1 | 0.003731 | false | 0 | 0.005597 | 0 | 0.01306 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
87d4d6d35ede411f5cc0cd923683d49eb65aef54 | 18,134 | py | Python | pyedgeconnect/orch/_tunnels_configuration.py | SPOpenSource/edgeconnect-python | 158aad220f8cacfa029df41b0ac2a37f7dac943f | [
"MIT"
] | 15 | 2021-07-02T17:09:13.000Z | 2022-02-08T17:06:51.000Z | pyedgeconnect/orch/_tunnels_configuration.py | SPOpenSource/edgeconnect-python | 158aad220f8cacfa029df41b0ac2a37f7dac943f | [
"MIT"
] | null | null | null | pyedgeconnect/orch/_tunnels_configuration.py | SPOpenSource/edgeconnect-python | 158aad220f8cacfa029df41b0ac2a37f7dac943f | [
"MIT"
] | 4 | 2021-07-16T00:05:24.000Z | 2022-03-26T02:04:17.000Z | # MIT License
# (C) Copyright 2021 Hewlett Packard Enterprise Development LP.
#
# tunnelsConfiguration : ECOS tunnel configuration
def get_total_tunnel_count(
self,
metadata: bool = True,
) -> dict:
"""Get total tunnel count across all appliances
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - tunnelsConfiguration
- GET
- /tunnels2
:param metadata: Includes the tunnel count, false returns an empty
body, defaults to True
:type metadata: bool
:return: Returns dictionary of tunnel count with single key
"totalTunnelCount"
:rtype: dict
"""
return self._get("/tunnels2?metaData={}".format(metadata))
def get_tunnel_count_for_appliances(
self,
ne_pk_list: list[str],
) -> dict:
"""Get total tunnel count organized by appliance and overaly for
specified appliances
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - tunnelsConfiguration
- POST
- /tunnels2/tunnelCounts
:param ne_pk_list: List of one or more appliance Network Primary
Keys (nePk), e.g. ``["3.NE","5.NE"]``
:type ne_pk_list: list[str]
:return: Returns nexted dictionary of tunnel count, each top-level
key is an appliance NePK, then sub-dictionary is tunnel counts
per-overlay name and total.
:rtype: dict
"""
data = {"ids": ne_pk_list}
return self._post(
"/tunnels2/tunnelCounts",
data=data,
)
def get_physical_tunnel_details(
self,
limit: int,
matching_alias: str = None,
state: str = None,
tunnel_id: bool = None,
alias: bool = None,
tag: bool = None,
source_ne_pk: bool = None,
dest_ne_pk: bool = None,
dest_tunnel_id: bool = None,
dest_tunnel_alias: bool = None,
operational_status: bool = None,
admin_status: bool = None,
remote_id_state: bool = None,
fec_status: bool = None,
fec_ratio: bool = None,
) -> dict:
"""Get physical tunnel details across all appliances
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - tunnelsConfiguration
- GET
- /tunnels2/physical
:param limit: Max number of tunnels to return in response
:type limit: int
:param matching_alias: Match tunnel alias on text string provided,
defaults to None
:type matching_alias: str, optional
:param state: Regular expression to match tunnel state,
e.g. ``Up`` ``Down``, defaults to None
:type state: str, optional
:param tunnel_id: Include tunnel id in response, defaults to None
:type tunnel_id: bool, optional
:param alias: Include alias name of tunnel in UI in response,
defaults to None
:type alias: bool, optional
:param tag: Include overlay name for bonded tunnel in response,
defaults to None
:type tag: bool, optional
:param source_ne_pk: Include nePk of appliance that the tunnel
belongs to in response, defaults to None
:type source_ne_pk: bool, optional
:param dest_ne_pk: Include nePk of destination appliance for the
tunnel in response, defaults to None
:type dest_ne_pk: bool, optional
:param dest_tunnel_id: Include tunnel id of opposite tunnel on the
destination appliance in response, defaults to None
:type dest_tunnel_id: bool, optional
:param dest_tunnel_alias: Include tunnel alias of opposite tunnel on
the destination appliance in response, defaults to None
:type dest_tunnel_alias: bool, optional
:param operation_status: Include current status of tunnel in
response, defaults to None
:type operational_status: bool, optional
:param admin_status: Include admin status of tunnel in response,
defaults to None
:type admin_status: bool, optional
:param remote_id_state: Include remote tunnel id state in response,
defaults to None
:type remote_id_state: bool, optional
:param fec_status: Include FEC status of the tunnel in response,
defaults to None
:type fec_status: bool, optional
:param fec_ratio: Include current FEC ratio of the tunnel in
response, defaults to None
:type fec_ratio: bool, optional
:return: Returns dictionary of tunnel details based on supplied
query details
:rtype: dict
"""
path = "/tunnels2/physical?limit={}".format(limit)
if matching_alias is not None:
path = path + "&matchingAlias={}".format(matching_alias)
if state is not None:
path = path + "&state={}".format(state)
if tunnel_id is not None:
path = path + "&id={}".format(tunnel_id)
if alias is not None:
path = path + "&alias={}".format(alias)
if tag is not None:
path = path + "&tag={}".format(tag)
if source_ne_pk is not None:
path = path + "&srcNePk={}".format(source_ne_pk)
if dest_ne_pk is not None:
path = path + "&destNePk={}".format(dest_ne_pk)
if dest_tunnel_id is not None:
path = path + "&destTunnelId={}".format(dest_tunnel_id)
if dest_tunnel_alias is not None:
path = path + "&destTunnelAlias={}".format(dest_tunnel_alias)
if operational_status is not None:
path = path + "&operStatus={}".format(operational_status)
if admin_status is not None:
path = path + "&adminStatus={}".format(admin_status)
if remote_id_state is not None:
path = path + "&remoteIdState={}".format(remote_id_state)
if fec_status is not None:
path = path + "&fecStatus={}".format(fec_status)
if fec_ratio is not None:
path = path + "&fecRatio={}".format(fec_ratio)
return self._get(path)
def get_physical_tunnel_details_for_appliance(
self,
ne_pk: str,
limit: int,
matching_alias: str = None,
state: str = None,
tunnel_id: bool = None,
alias: bool = None,
tag: bool = None,
source_ne_pk: bool = None,
dest_ne_pk: bool = None,
dest_tunnel_id: bool = None,
dest_tunnel_alias: bool = None,
operational_status: bool = None,
admin_status: bool = None,
remote_id_state: bool = None,
fec_status: bool = None,
fec_ratio: bool = None,
) -> dict:
"""Get physical tunnel details for specific appliance
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - tunnelsConfiguration
- GET
- /tunnels2/physical/{nePk}
:param ne_pk: Network Primary Key (nePk) of appliance, e.g. ``3.NE``
:type ne_pk: str
:param limit: Max number of tunnels to return in response
:type limit: int
:param matching_alias: Match tunnel alias on text string provided,
defaults to None
:type matching_alias: str, optional
:param state: Regular expression to match tunnel state,
e.g. ``Up`` ``Down``, defaults to None
:type state: str, optional
:param tunnel_id: Include tunnel id in response, defaults to None
:type tunnel_id: bool, optional
:param alias: Include alias name of tunnel in UI in response,
defaults to None
:type alias: bool, optional
:param tag: Include overlay name for bonded tunnel in response,
defaults to None
:type tag: bool, optional
:param source_ne_pk: Include nePk of appliance that the tunnel
belongs to in response, defaults to None
:type source_ne_pk: bool, optional
:param dest_ne_pk: Include nePk of destination appliance for the
tunnel in response, defaults to None
:type dest_ne_pk: bool, optional
:param dest_tunnel_id: Include tunnel id of opposite tunnel on the
destination appliance in response, defaults to None
:type dest_tunnel_id: bool, optional
:param dest_tunnel_alias: Include tunnel alias of opposite tunnel on
the destination appliance in response, defaults to None
:type dest_tunnel_alias: bool, optional
:param operation_status: Include current status of tunnel in
response, defaults to None
:type operational_status: bool, optional
:param admin_status: Include admin status of tunnel in response,
defaults to None
:type admin_status: bool, optional
:param remote_id_state: Include remote tunnel id state in response,
defaults to None
:type remote_id_state: bool, optional
:param fec_status: Include FEC status of the tunnel in response,
defaults to None
:type fec_status: bool, optional
:param fec_ratio: Include current FEC ratio of the tunnel in
response, defaults to None
:type fec_ratio: bool, optional
:return: Returns dictionary of tunnel details based on supplied
query details
:rtype: dict
"""
path = "/tunnels2/physical/{}?limit={}".format(ne_pk, limit)
if matching_alias is not None:
path = path + "&matchingAlias={}".format(matching_alias)
if state is not None:
path = path + "&state={}".format(state)
if tunnel_id is not None:
path = path + "&id={}".format(tunnel_id)
if alias is not None:
path = path + "&alias={}".format(alias)
if tag is not None:
path = path + "&tag={}".format(tag)
if source_ne_pk is not None:
path = path + "&srcNePk={}".format(source_ne_pk)
if dest_ne_pk is not None:
path = path + "&destNePk={}".format(dest_ne_pk)
if dest_tunnel_id is not None:
path = path + "&destTunnelId={}".format(dest_tunnel_id)
if dest_tunnel_alias is not None:
path = path + "&destTunnelAlias={}".format(dest_tunnel_alias)
if operational_status is not None:
path = path + "&operStatus={}".format(operational_status)
if admin_status is not None:
path = path + "&adminStatus={}".format(admin_status)
if remote_id_state is not None:
path = path + "&remoteIdState={}".format(remote_id_state)
if fec_status is not None:
path = path + "&fecStatus={}".format(fec_status)
if fec_ratio is not None:
path = path + "&fecRatio={}".format(fec_ratio)
return self._get(path)
def get_physical_tunnel_details_for_appliance_tunnel(
self,
ne_pk: str,
tunnel_id: str,
) -> dict:
"""Get physical tunnel details for specific tunnel on appliance
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - tunnelsConfiguration
- GET
- /tunnels2/physical/{nePk}/{tunnelId}
:param ne_pk: Network Primary Key (nePk) of appliance, e.g. ``3.NE``
:type ne_pk: str
:param tunnel_id: Tunnel id, e.g. ``tunnel_12``
:type tunnel_id: str
:return: Returns dictionary of tunnel details based on supplied
query details
:rtype: dict
"""
return self._get("/tunnels2/physical/{}/{}".format(ne_pk, tunnel_id))
def get_tunnels_between_appliances(
self,
ne_pk_list: list[str],
limit: int,
matching_alias: str = None,
overlay_id: str = None,
state: str = None,
) -> dict:
"""Get physical tunnel details for specific tunnel on appliance
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - tunnelsConfiguration
- POST
- /tunnels2/getTunnelsBetweenAppliances
:param ne_pk_list: List of one or more appliance Network Primary
Keys (nePk), e.g. ``["3.NE","5.NE"]``
:type ne_pk_list: list[str]
:param limit: Max number of tunnels to return in response
:type limit: int
:param matching_alias: Match tunnel alias on text string provided,
defaults to None
:type matching_alias: str, optional
:param overlay_id: The overlay ID to match tunnels on. Value of
``0`` for all physical tunnels, "all" for all bonded tunnels,
defaults to None
:type overlay_id: str, optional
:param state: Regular expression to match tunnel state,
e.g. ``Up`` ``Down``, defaults to None
:type state: str, optional
:return: Returns list of dictionaries of tunnel details between
provided appliances
:rtype: list
"""
path = "/tunnels2/getTunnelsBetweenAppliances?limit={}".format(limit)
if matching_alias is not None:
path = path + "&matchingAlias={}".format(matching_alias)
if overlay_id is not None:
path = path + "&overlayId={}".format(overlay_id)
if state is not None:
path = path + "&state={}".format(state)
data = {"ids": ne_pk_list}
return self._post(path, data=data)
def get_tunnels_between_appliances_config_data(
self,
ne_pk_list: list[str],
state: str = None,
) -> dict:
"""Get physical tunnel details for specific tunnel on appliance
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - tunnelsConfiguration
- POST
- /tunnels2/physical/state
:param ne_pk_list: List of one or more appliance Network Primary
Keys (nePk), e.g. ``["3.NE","5.NE"]``
:type ne_pk_list: list[str]
:param state: Regular expression to match tunnel state,
e.g. ``Up`` ``Down``, defaults to None
:type state: str, optional
:return: Returns dictionary of tunnel configuration details between
provided appliances
:rtype: dict
"""
path = "/tunnels2/physical/state"
if state is not None:
path = path + "&state={}".format(state)
data = {"ids": ne_pk_list}
return self._post(path, data=data)
def initiate_tunnel_traceroute(
self,
ne_pk: str,
tunnel_id: str,
) -> bool:
"""Initiate a traceroute over a specified tunnel on an appliance
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - tunnelsConfiguration
- POST
- /tunnels2/physical/traceroute/{id}
:param ne_pk: Network Primary Key (nePk) of appliance, e.g. ``3.NE``
:type ne_pk: str
:param tunnel_id: Tunnel id, e.g. ``tunnel_12``
:type tunnel_id: str
:return: Returns True/False based on successful call
:rtype: bool
"""
data = {"nePk": ne_pk}
return self._post(
"/tunnels/physical/traceroute/{}".format(tunnel_id),
data=data,
expected_status=[204],
return_type="bool",
)
def get_appliance_tunnel_ids(
self,
ne_pk: str,
state: str = None,
) -> dict:
"""Get tunnel id's on an appliance, can filter by state
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - tunnelsConfiguration
- GET
- /tunnels2/physical/tunnelIds/{nePk}
:param ne_pk: Network Primary Key (nePk) of appliance, e.g. ``3.NE``
:type ne_pk: str
:param state: Regular expression to match tunnel state,
e.g. ``Up`` ``Down``, defaults to None
:type state: str, optional
:return: Returns dictionary of tunnel count with single key
"totalTunnelCount"
:rtype: dict
"""
if state is not None:
return self._get(
"/tunnels/physical/tunnelIds/{}?state={}".format(ne_pk, state)
)
else:
return self._get("/tunnels/physical/tunnelIds/{}".format(ne_pk))
def get_tunnel_traceroute(
self,
ne_pk: str,
tunnel_id: str,
) -> bool:
"""Get status of a traceroute over a specified tunnel on an
appliance
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - tunnelsConfiguration
- POST
- /tunnels2/physical/tracerouteState/{id}
:param ne_pk: Network Primary Key (nePk) of appliance, e.g. ``3.NE``
:type ne_pk: str
:param tunnel_id: Tunnel id, e.g. ``tunnel_12``
:type tunnel_id: str
:return: Returns dictionary of traceroute hops and related details
(index, ip, min/max/avg rtt, etc.)
:rtype: dict
"""
data = {"nePk": ne_pk}
return self._post(
"/tunnels/physical/tracerouteState/{}".format(tunnel_id),
data=data,
)
def get_batch_appliance_tunnels_config(
self,
ne_pk: str,
tunnel_id_list: list,
) -> bool:
"""Get appliance tunnel configuration for specified tunnels
.. note::
This API Call is not in current Swagger as of Orch 9.0.3
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - n/a
- POST
- /tunnels/physical/config/getBatch/{nePk}
:param ne_pk: Network Primary Key (nePk) of appliance, e.g. ``3.NE``
:type ne_pk: str
:param tunnel_id_list: List of tunnel ids to retrieive config
details for, e.g. ``["tunnel_12", "tunnel_13"]``
:type tunnel_id: list
:return: Returns dictionary of tunnel configuration details from
specified tunnels
:rtype: dict
"""
data = tunnel_id_list
return self._post(
"/tunnels/physical/config/getBatch/{}".format(ne_pk),
data=data,
)
def get_batch_appliance_tunnels_state(
self,
ne_pk: str,
tunnel_id_list: list,
) -> bool:
"""Get appliance tunnel state for specified tunnels
.. note::
This API Call is not in current Swagger as of Orch 9.0.3
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - n/a
- POST
- /tunnels/physical/state/getBatch/{nePk}
:param ne_pk: Network Primary Key (nePk) of appliance, e.g. ``3.NE``
:type ne_pk: str
:param tunnel_id_list: List of tunnel ids to retrieive config
details for, e.g. ``["tunnel_12", "tunnel_13"]``
:type tunnel_id: list
:return: Returns dictionary of tunnel state details from specified
tunnels
:rtype: dict
"""
data = tunnel_id_list
return self._post(
"/tunnels/physical/state/getBatch/{}".format(ne_pk),
data=data,
)
| 31.104631 | 74 | 0.632955 | 2,347 | 18,134 | 4.756285 | 0.082659 | 0.021858 | 0.041387 | 0.053212 | 0.894025 | 0.867867 | 0.848159 | 0.828989 | 0.825405 | 0.81806 | 0 | 0.005282 | 0.269163 | 18,134 | 582 | 75 | 31.158076 | 0.837018 | 0.573288 | 0 | 0.80203 | 0 | 0 | 0.126886 | 0.061749 | 0 | 0 | 0 | 0 | 0 | 1 | 0.060914 | false | 0 | 0 | 0 | 0.126904 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
87ec9b4cd1b2ef96e79725204c5afc3087037691 | 68 | py | Python | tests/test_pattern/__init__.py | joshmeranda/undo | f54581223c0c157702dda6124691bb40fa2e2b31 | [
"MIT"
] | null | null | null | tests/test_pattern/__init__.py | joshmeranda/undo | f54581223c0c157702dda6124691bb40fa2e2b31 | [
"MIT"
] | null | null | null | tests/test_pattern/__init__.py | joshmeranda/undo | f54581223c0c157702dda6124691bb40fa2e2b31 | [
"MIT"
] | null | null | null | from .test_pattern import *
from .test_pattern_to_argparse import *
| 22.666667 | 39 | 0.823529 | 10 | 68 | 5.2 | 0.6 | 0.307692 | 0.576923 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117647 | 68 | 2 | 40 | 34 | 0.866667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
87f30f58e845604dd1bdb72b504249ff5453e3ec | 8,968 | py | Python | layers.py | YuanshengZhao/adiabaticbinary | 2db98957e3d570a3d4fa94d25aed65810576b898 | [
"MIT"
] | null | null | null | layers.py | YuanshengZhao/adiabaticbinary | 2db98957e3d570a3d4fa94d25aed65810576b898 | [
"MIT"
] | null | null | null | layers.py | YuanshengZhao/adiabaticbinary | 2db98957e3d570a3d4fa94d25aed65810576b898 | [
"MIT"
] | null | null | null | import tensorflow as tf
class BinaryConv2D(tf.keras.layers.Layer):
def __init__(self, num_chanel, ker_size=3, num_stride=1, ker_bias=False):
super(BinaryConv2D, self).__init__()
self.num_chanel = num_chanel
self.ker_size = ker_size
self.num_stride = num_stride
self.ker_bias = ker_bias
def build(self, input_shape):
self.kernel = self.add_weight(shape=[self.ker_size,self.ker_size,int(input_shape[-1]),self.num_chanel],
initializer=tf.keras.initializers.RandomUniform(minval=-.1, maxval=.1))
self.nmk = self.add_weight(initializer=tf.keras.initializers.Constant(1.))
self.bias = self.add_weight(trainable=self.ker_bias, shape=[self.num_chanel],initializer=tf.keras.initializers.Constant(0.))
self.kk = self.add_weight(trainable=False,initializer=tf.keras.initializers.Constant(1.))
def set_kk(self,kknew):
self.kk.assign(kknew)
def call(self, inputs):
if (self.kk < 1e3):
return self.nmk * tf.nn.conv2d(inputs, tf.math.tanh(self.kernel*self.kk)+self.bias, self.num_stride, "SAME")
else:
return self.nmk * tf.nn.conv2d(inputs, tf.math.sign(self.kernel) +self.bias, self.num_stride, "SAME")
class BinaryConv1D(tf.keras.layers.Layer):
def __init__(self, num_chanel, ker_size=3, num_stride=1, ker_bias=False):
super(BinaryConv1D, self).__init__()
self.num_chanel = num_chanel
self.ker_size = ker_size
self.num_stride = num_stride
self.ker_bias = ker_bias
def build(self, input_shape):
self.kernel = self.add_weight(shape=[self.ker_size,int(input_shape[-1]),self.num_chanel],
initializer=tf.keras.initializers.RandomUniform(minval=-.1, maxval=.1))
self.nmk = self.add_weight(initializer=tf.keras.initializers.Constant(1.))
self.bias = self.add_weight(trainable=self.ker_bias, shape=[self.num_chanel],initializer=tf.keras.initializers.Constant(0.))
self.kk = self.add_weight(trainable=False,initializer=tf.keras.initializers.Constant(1.))
def set_kk(self,kknew):
self.kk.assign(kknew)
def call(self, inputs):
if (self.kk < 1e3):
return self.nmk * tf.nn.conv1d(inputs, tf.math.tanh(self.kernel*self.kk)+self.bias, self.num_stride, "SAME")
else:
return self.nmk * tf.nn.conv1d(inputs, tf.math.sign(self.kernel) +self.bias, self.num_stride, "SAME")
class BinaryConv2DCL(BinaryConv2D):
def call(self, inputs):
self.add_loss(2e-1*(tf.math.reduce_sum(tf.nn.relu(tf.math.abs(self.kernel)-.2)**2)))
if (self.kk < 1e3):
return self.nmk * tf.nn.conv2d(inputs, tf.math.tanh(self.kernel*self.kk)+self.bias, self.num_stride, "SAME")
else:
return self.nmk * tf.nn.conv2d(inputs, tf.math.sign(self.kernel) +self.bias, self.num_stride, "SAME")
class BinaryDense(tf.keras.layers.Layer):
def __init__(self, num_outputs):
super(BinaryDense, self).__init__()
self.num_outputs = num_outputs
def build(self, input_shape):
self.kernel = self.add_weight(shape=[int(input_shape[-1]),self.num_outputs],initializer=tf.keras.initializers.RandomUniform(minval=-.1,maxval=.1))
self.bias = self.add_weight(shape=[self.num_outputs],initializer=tf.keras.initializers.Constant(0.))
self.nmk = self.add_weight(initializer=tf.keras.initializers.Constant(1.))
self.kk = self.add_weight(trainable=False,initializer=tf.keras.initializers.Constant(1.))
def set_kk(self,kknew):
self.kk.assign(kknew)
def call(self, inputs):
if(self.kk < 1e3):
return self.nmk * tf.matmul(inputs, tf.math.tanh(self.kernel*self.kk))+self.bias
else:
return self.nmk * tf.matmul(inputs, tf.math.sign(self.kernel)) +self.bias
class BinaryActivation(tf.keras.layers.Layer):
def __init__(self, ker_bias=False):
super(BinaryActivation, self).__init__()
self.ker_bias = ker_bias
def build(self, input_shape):
self.bias = self.add_weight(trainable=self.ker_bias,initializer=tf.keras.initializers.Constant(1.0))
self.kk = self.add_weight(trainable=False,initializer=tf.keras.initializers.Constant(1.0))
def set_kk(self,kkx):
self.kk.assign(kkx)
def call(self, inputs):
if(self.kk < 1e3):
return tf.math.tanh(inputs*self.kk)+self.bias
else:
return tf.math.sign(inputs)+self.bias
class BinaryActivationH(BinaryActivation):
def build(self, input_shape):
self.bias = self.add_weight(trainable=self.ker_bias,initializer=tf.keras.initializers.Constant(0.0))
self.kk = self.add_weight(trainable=False,initializer=tf.keras.initializers.Constant(1.0))
def call(self, inputs):
if(self.kk < 1e3):
return tf.nn.sigmoid(inputs*self.kk)+self.bias
else:
return tf.math.sign(inputs)*.5+(.5+self.bias)
class BinaryActivationCLU(BinaryActivation):
def build(self, input_shape):
self.bias = self.add_weight(trainable=self.ker_bias,initializer=tf.keras.initializers.Constant(0.0))
self.kk = self.add_weight(trainable=False,initializer=tf.keras.initializers.Constant(1.0))
def call(self, inputs):
if(self.kk < 1e3):
return tf.clip_by_value(inputs*self.kk,0,1)+self.bias
else:
return tf.math.sign(inputs)*.5+(.5+self.bias)
class BinaryActivationHT(BinaryActivation):
def call(self, inputs):
if(self.kk < 1e3):
return tf.clip_by_value(inputs*self.kk,-1,1)+self.bias
else:
return tf.math.sign(inputs)+self.bias
class BinaryActivationRL(BinaryActivation):
def build(self, input_shape):
self.bias = self.add_weight(trainable=self.ker_bias,initializer=tf.keras.initializers.Constant(0.0))
self.kk = self.add_weight(trainable=False,initializer=tf.keras.initializers.Constant(1.0))
def call(self, inputs):
if(self.kk < 1e3):
return tf.math.tanh(tf.nn.relu(inputs)*self.kk)+self.bias
else:
return tf.math.sign(inputs)/2+(.5+self.bias)
class BinaryActivationBS(tf.keras.layers.Layer):
def __init__(self):
super(BinaryActivationBS, self).__init__()
def build(self, input_shape):
self.bis2 = self.add_weight(initializer=tf.keras.initializers.Constant(0.0))
self.kk = self.add_weight(trainable=False,initializer=tf.keras.initializers.Constant(1.0))
self.bias = self.add_weight(trainable=False,shape=input_shape[1:],
initializer=tf.keras.initializers.Constant(0.0))
self.maxs = self.add_weight(trainable=False,shape=input_shape[1:],
initializer=tf.keras.initializers.Constant(0.0))
self.mins = self.add_weight(trainable=False,shape=input_shape[1:],
initializer=tf.keras.initializers.Constant(0.0))
def set_bias(self):
self.bias.assign(tf.clip_by_value(self.bias,self.mins,self.maxs)*.1+self.bias*.9)
# self.bias.assign(tf.clip_by_value(self.bias,
# self.mins+(self.maxs-self.mins)*5e-3*tf.random.uniform(shape=self.mins.shape),
# self.maxs-(self.maxs-self.mins)*5e-3*tf.random.uniform(shape=self.mins.shape))
# )
self.mins.assign(self.bias+1e5)
self.maxs.assign(self.bias-1e5)
def set_kk(self,kknew):
self.kk.assign(kknew)
def call(self, inputs,training=False):
if (training):
self.maxs.assign(tf.reduce_max([tf.reduce_max(inputs,axis=0),self.maxs],axis=0))
self.mins.assign(tf.reduce_min([tf.reduce_min(inputs,axis=0),self.mins],axis=0))
if (self.kk < 1e3):
return tf.math.sigmoid(self.kk*(inputs-self.bias))+self.bis2
else:
return tf.math.sign(inputs-self.bias)*.5+.5+self.bis2
class BinaryActivationP(tf.keras.layers.Layer):
def __init__(self, ker_bias=False):
super(BinaryActivationP, self).__init__()
self.ker_bias = ker_bias
def build(self, input_shape):
self.bias = self.add_weight(trainable=self.ker_bias,initializer=tf.keras.initializers.Constant(0.0))
self.kk = self.add_weight(trainable=False,initializer=tf.keras.initializers.Constant(1.0))
def set_kk(self,kkx):
self.kk.assign(kkx)
def call(self, inputs):
if(self.kk < 1e3):
return tf.nn.sigmoid(tf.nn.leaky_relu(inputs,.5)*self.kk)+self.bias
else:
return tf.math.sign(inputs)/2+(.5+self.bias) | 47.449735 | 155 | 0.636151 | 1,219 | 8,968 | 4.543068 | 0.076292 | 0.054893 | 0.06338 | 0.146262 | 0.862405 | 0.861502 | 0.849765 | 0.828277 | 0.802095 | 0.799567 | 0 | 0.018025 | 0.226695 | 8,968 | 189 | 156 | 47.449735 | 0.780534 | 0.032114 | 0 | 0.662252 | 0 | 0 | 0.00283 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.218543 | false | 0 | 0.006623 | 0 | 0.443709 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ea041348d06f123164e52982294774ec1184d14b | 16,664 | py | Python | scikitplot/tests/test_classifiers.py | leozhoujf/scikit-plot | 2dd3e6a76df77edcbd724c4db25575f70abb57cb | [
"MIT"
] | 2,360 | 2017-02-12T01:43:09.000Z | 2022-03-31T10:06:31.000Z | scikitplot/tests/test_classifiers.py | leozhoujf/scikit-plot | 2dd3e6a76df77edcbd724c4db25575f70abb57cb | [
"MIT"
] | 79 | 2017-02-12T21:42:08.000Z | 2022-02-28T03:00:44.000Z | scikitplot/tests/test_classifiers.py | leozhoujf/scikit-plot | 2dd3e6a76df77edcbd724c4db25575f70abb57cb | [
"MIT"
] | 302 | 2017-02-17T19:36:33.000Z | 2022-01-28T16:22:06.000Z | from __future__ import absolute_import
import unittest
import scikitplot
import warnings
from sklearn.datasets import load_iris as load_data
from sklearn.datasets import load_breast_cancer
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.exceptions import NotFittedError
import numpy as np
import matplotlib.pyplot as plt
import scikitplot.plotters as skplt
def convert_labels_into_string(y_true):
return ["A" if x==0 else x for x in y_true]
class TestClassifierFactory(unittest.TestCase):
def setUp(self):
class Classifier:
def __init__(self):
pass
def fit(self):
pass
def predict(self):
pass
def score(self):
pass
def predict_proba(self):
pass
class PartialClassifier:
def __init__(self):
pass
def fit(self):
pass
def predict(self):
pass
def score(self):
pass
class NotClassifier:
def __init__(self):
pass
self.Classifier = Classifier
self.PartialClassifier = PartialClassifier
self.NotClassifier = NotClassifier
def test_instance_validation(self):
clf = self.Classifier()
scikitplot.classifier_factory(clf)
not_clf = self.NotClassifier()
self.assertRaises(TypeError, scikitplot.classifier_factory, not_clf)
partial_clf = self.PartialClassifier()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
scikitplot.classifier_factory(partial_clf)
assert len(w) == 2
assert issubclass(w[-1].category, UserWarning)
assert " not in clf. Some plots may not be possible to generate." in str(w[-1].message)
def test_method_insertion(self):
clf = self.Classifier()
scikitplot.classifier_factory(clf)
assert hasattr(clf, 'plot_learning_curve')
assert hasattr(clf, 'plot_confusion_matrix')
assert hasattr(clf, 'plot_roc_curve')
assert hasattr(clf, 'plot_ks_statistic')
assert hasattr(clf, 'plot_precision_recall_curve')
assert hasattr(clf, 'plot_feature_importances')
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
scikitplot.classifier_factory(clf)
assert len(w) == 7
for warning in w[1:]:
assert issubclass(warning.category, UserWarning)
assert ' method already in clf. ' \
'Overriding anyway. This may ' \
'result in unintended behavior.' in str(warning.message)
class TestPlotLearningCurve(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.X, self.y = load_data(return_X_y=True)
p = np.random.permutation(len(self.X))
self.X, self.y = self.X[p], self.y[p]
def tearDown(self):
plt.close("all")
def test_string_classes(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_learning_curve(self.X, convert_labels_into_string(self.y))
def test_cv(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_learning_curve(self.X, self.y)
ax = clf.plot_learning_curve(self.X, self.y, cv=5)
def test_train_sizes(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_learning_curve(self.X, self.y, train_sizes=np.linspace(0.1, 1.0, 8))
def test_n_jobs(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_learning_curve(self.X, self.y, n_jobs=-1)
def test_ax(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
fig, ax = plt.subplots(1, 1)
out_ax = clf.plot_learning_curve(self.X, self.y)
assert ax is not out_ax
out_ax = clf.plot_learning_curve(self.X, self.y, ax=ax)
assert ax is out_ax
class TestPlotConfusionMatrix(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.X, self.y = load_data(return_X_y=True)
p = np.random.permutation(len(self.X))
self.X, self.y = self.X[p], self.y[p]
def tearDown(self):
plt.close("all")
def test_string_classes(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_confusion_matrix(self.X, convert_labels_into_string(self.y))
def test_cv(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_confusion_matrix(self.X, self.y)
ax = clf.plot_confusion_matrix(self.X, self.y, cv=5)
def test_normalize(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_confusion_matrix(self.X, self.y, normalize=True)
ax = clf.plot_confusion_matrix(self.X, self.y, normalize=False)
def test_labels(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_confusion_matrix(self.X, self.y, labels=[0, 1, 2])
def test_true_pred_labels(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
true_labels = [0, 1]
pred_labels = [0, 2]
ax = clf.plot_confusion_matrix(self.X, self.y, true_labels=true_labels,
pred_labels=pred_labels)
def test_cmap(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_confusion_matrix(self.X, self.y, cmap='nipy_spectral')
ax = clf.plot_confusion_matrix(self.X, self.y, cmap=plt.cm.nipy_spectral)
def test_do_cv(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_confusion_matrix(self.X, self.y)
self.assertRaises(NotFittedError, clf.plot_confusion_matrix, self.X, self.y, do_cv=False)
def test_shuffle(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_confusion_matrix(self.X, self.y, shuffle=True)
ax = clf.plot_confusion_matrix(self.X, self.y, shuffle=False)
def test_ax(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
fig, ax = plt.subplots(1, 1)
out_ax = clf.plot_confusion_matrix(self.X, self.y)
assert ax is not out_ax
out_ax = clf.plot_confusion_matrix(self.X, self.y, ax=ax)
assert ax is out_ax
def test_array_like(self):
ax = skplt.plot_confusion_matrix([0, 1], [1, 0])
class TestPlotROCCurve(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.X, self.y = load_data(return_X_y=True)
p = np.random.permutation(len(self.X))
self.X, self.y = self.X[p], self.y[p]
def tearDown(self):
plt.close("all")
def test_string_classes(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_roc_curve(self.X, convert_labels_into_string(self.y))
def test_predict_proba(self):
np.random.seed(0)
class DummyClassifier:
def __init__(self):
pass
def fit(self):
pass
def predict(self):
pass
def score(self):
pass
clf = DummyClassifier()
scikitplot.classifier_factory(clf)
self.assertRaises(TypeError, clf.plot_roc_curve, self.X, self.y)
def test_do_cv(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_roc_curve(self.X, self.y)
self.assertRaises(AttributeError, clf.plot_roc_curve, self.X, self.y,
do_cv=False)
def test_ax(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
fig, ax = plt.subplots(1, 1)
out_ax = clf.plot_roc_curve(self.X, self.y)
assert ax is not out_ax
out_ax = clf.plot_roc_curve(self.X, self.y, ax=ax)
assert ax is out_ax
def test_cmap(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_roc_curve(self.X, self.y, cmap='nipy_spectral')
ax = clf.plot_roc_curve(self.X, self.y, cmap=plt.cm.nipy_spectral)
def test_curve_diffs(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax_macro = clf.plot_roc_curve(self.X, self.y, curves='macro')
ax_micro = clf.plot_roc_curve(self.X, self.y, curves='micro')
ax_class = clf.plot_roc_curve(self.X, self.y, curves='each_class')
self.assertNotEqual(ax_macro, ax_micro, ax_class)
def test_invalid_curve_arg(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
self.assertRaises(ValueError, clf.plot_roc_curve, self.X, self.y,
curves='zzz')
def test_array_like(self):
ax = skplt.plot_roc_curve([0, 1], [[0.8, 0.2], [0.2, 0.8]])
class TestPlotKSStatistic(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.X, self.y = load_breast_cancer(return_X_y=True)
p = np.random.permutation(len(self.X))
self.X, self.y = self.X[p], self.y[p]
def tearDown(self):
plt.close("all")
def test_string_classes(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_ks_statistic(self.X, convert_labels_into_string(self.y))
def test_predict_proba(self):
np.random.seed(0)
class DummyClassifier:
def __init__(self):
pass
def fit(self):
pass
def predict(self):
pass
def score(self):
pass
clf = DummyClassifier()
scikitplot.classifier_factory(clf)
self.assertRaises(TypeError, clf.plot_ks_statistic, self.X, self.y)
def test_two_classes(self):
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
X, y = load_data(return_X_y=True)
self.assertRaises(ValueError, clf.plot_ks_statistic, X, y)
def test_do_cv(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_ks_statistic(self.X, self.y)
self.assertRaises(AttributeError, clf.plot_ks_statistic, self.X, self.y,
do_cv=False)
def test_ax(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
fig, ax = plt.subplots(1, 1)
out_ax = clf.plot_ks_statistic(self.X, self.y)
assert ax is not out_ax
out_ax = clf.plot_ks_statistic(self.X, self.y, ax=ax)
assert ax is out_ax
def test_array_like(self):
ax = skplt.plot_ks_statistic([0, 1], [[0.8, 0.2], [0.2, 0.8]])
class TestPlotPrecisionRecall(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.X, self.y = load_data(return_X_y=True)
p = np.random.permutation(len(self.X))
self.X, self.y = self.X[p], self.y[p]
def tearDown(self):
plt.close("all")
def test_string_classes(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_precision_recall_curve(self.X, convert_labels_into_string(self.y))
def test_predict_proba(self):
np.random.seed(0)
class DummyClassifier:
def __init__(self):
pass
def fit(self):
pass
def predict(self):
pass
def score(self):
pass
clf = DummyClassifier()
scikitplot.classifier_factory(clf)
self.assertRaises(TypeError, clf.plot_precision_recall_curve, self.X, self.y)
def test_do_cv(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_precision_recall_curve(self.X, self.y)
self.assertRaises(AttributeError, clf.plot_precision_recall_curve, self.X, self.y,
do_cv=False)
def test_ax(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
fig, ax = plt.subplots(1, 1)
out_ax = clf.plot_precision_recall_curve(self.X, self.y)
assert ax is not out_ax
out_ax = clf.plot_precision_recall_curve(self.X, self.y, ax=ax)
assert ax is out_ax
def test_curve_diffs(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax_micro = clf.plot_precision_recall_curve(self.X, self.y, curves='micro')
ax_class = clf.plot_precision_recall_curve(self.X, self.y, curves='each_class')
self.assertNotEqual(ax_micro, ax_class)
def test_cmap(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
ax = clf.plot_precision_recall_curve(self.X, self.y, cmap='nipy_spectral')
ax = clf.plot_precision_recall_curve(self.X, self.y, cmap=plt.cm.nipy_spectral)
def test_invalid_curve_arg(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
self.assertRaises(ValueError, clf.plot_precision_recall_curve, self.X, self.y,
curves='zzz')
def test_array_like(self):
ax = skplt.plot_precision_recall_curve([0, 1], [[0.8, 0.2], [0.2, 0.8]])
class TestFeatureImportances(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.X, self.y = load_data(return_X_y=True)
p = np.random.permutation(len(self.X))
self.X, self.y = self.X[p], self.y[p]
def tearDown(self):
plt.close("all")
def test_string_classes(self):
np.random.seed(0)
clf = RandomForestClassifier()
scikitplot.classifier_factory(clf)
clf.fit(self.X, convert_labels_into_string(self.y))
ax = clf.plot_feature_importances()
def test_feature_importances_in_clf(self):
np.random.seed(0)
clf = LogisticRegression()
scikitplot.classifier_factory(clf)
clf.fit(self.X, self.y)
self.assertRaises(TypeError, clf.plot_feature_importances)
def test_feature_names(self):
np.random.seed(0)
clf = RandomForestClassifier()
scikitplot.classifier_factory(clf)
clf.fit(self.X, self.y)
ax = clf.plot_feature_importances(feature_names=["a", "b", "c", "d"])
def test_max_num_features(self):
np.random.seed(0)
clf = RandomForestClassifier()
scikitplot.classifier_factory(clf)
clf.fit(self.X, self.y)
ax = clf.plot_feature_importances(max_num_features=2)
ax = clf.plot_feature_importances(max_num_features=4)
ax = clf.plot_feature_importances(max_num_features=6)
def test_order(self):
np.random.seed(0)
clf = RandomForestClassifier()
scikitplot.classifier_factory(clf)
clf.fit(self.X, self.y)
ax = clf.plot_feature_importances(order='ascending')
ax = clf.plot_feature_importances(order='descending')
ax = clf.plot_feature_importances(order=None)
def test_ax(self):
np.random.seed(0)
clf = RandomForestClassifier()
scikitplot.classifier_factory(clf)
clf.fit(self.X, self.y)
fig, ax = plt.subplots(1, 1)
out_ax = clf.plot_feature_importances()
assert ax is not out_ax
out_ax = clf.plot_feature_importances(ax=ax)
assert ax is out_ax
if __name__ == '__main__':
unittest.main()
| 32.357282 | 99 | 0.62398 | 2,163 | 16,664 | 4.604253 | 0.082755 | 0.040667 | 0.062356 | 0.063259 | 0.832815 | 0.808716 | 0.786525 | 0.774475 | 0.743749 | 0.718646 | 0 | 0.009371 | 0.269983 | 16,664 | 514 | 100 | 32.420233 | 0.809289 | 0 | 0 | 0.670732 | 0 | 0 | 0.024124 | 0.004321 | 0 | 0 | 0 | 0 | 0.092683 | 1 | 0.197561 | false | 0.053659 | 0.063415 | 0.002439 | 0.295122 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
ea426702df45f7e9b4352de4c14db8b4bc449dd7 | 109 | py | Python | exemplo_de_test/test_basico.py | brenodocarmo/curso-flask | 7ca48cc636cfc4d8e88d5df8cc364047a8577669 | [
"Unlicense"
] | 1 | 2020-07-26T18:47:43.000Z | 2020-07-26T18:47:43.000Z | exemplo_de_test/test_basico.py | brenodocarmo/curso-flask | 7ca48cc636cfc4d8e88d5df8cc364047a8577669 | [
"Unlicense"
] | null | null | null | exemplo_de_test/test_basico.py | brenodocarmo/curso-flask | 7ca48cc636cfc4d8e88d5df8cc364047a8577669 | [
"Unlicense"
] | null | null | null | def test_one_plus_one_is_two():
assert 1 + 1 == 2
def test_negative_1_plus_is_3():
assert 1 + 1 == 3
| 10.9 | 32 | 0.669725 | 22 | 109 | 2.863636 | 0.5 | 0.222222 | 0.253968 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094118 | 0.220183 | 109 | 9 | 33 | 12.111111 | 0.647059 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0.5 | true | 0 | 0 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ea7532a7166c35a78e8d6ae91fedf30fe396d7aa | 1,866 | py | Python | tests/cycle_component.py | lzmchina/OnceML | f30d9037d2e492d8d45b858f2be3b27fc5258356 | [
"MIT"
] | 1 | 2022-01-01T07:15:03.000Z | 2022-01-01T07:15:03.000Z | tests/cycle_component.py | lzmchina/OnceML | f30d9037d2e492d8d45b858f2be3b27fc5258356 | [
"MIT"
] | null | null | null | tests/cycle_component.py | lzmchina/OnceML | f30d9037d2e492d8d45b858f2be3b27fc5258356 | [
"MIT"
] | null | null | null | import time
from onceml.components.base import BaseComponent, BaseExecutor
class myExecutor1(BaseExecutor):
def Cycle(self, state, params, data_dir,input_channels=None, input_artifacts=None):
print('current component:', self.__class__)
print('params', params)
print('state', state)
print('input_channels', input_channels)
print('input_artifacts', input_artifacts)
for key, value in input_channels.items():
print(key)
print(value.__dict__)
print('input_artifacts', input_artifacts)
for key, value in input_artifacts.items():
print(key)
print(value.__dict__)
time.sleep(60)
return {'resulta': 'fdfdf', 'resultb': 25}
def pre_execute(self):
print('this is pre_execute')
class myComponent1(BaseComponent):
def __init__(self, executor, inputs=None, **args):
super().__init__(executor=executor, inputs=inputs, **args)
class myExecutor2(BaseExecutor):
def Cycle(self, state, params, data_dir,input_channels=None, input_artifacts=None):
print('current component:', self.__class__)
print('params', params)
print('state', state)
print('input_channels', input_channels)
print('input_artifacts', input_artifacts)
for key, value in input_channels.items():
print(key)
print(value.__dict__)
print('input_artifacts', input_artifacts)
for key, value in input_artifacts.items():
print(key)
print(value.__dict__)
time.sleep(60)
return {'resulta': 'fdfdf', 'resultb': 25}
def pre_execute(self):
print('this is pre_execute')
class myComponent2(BaseComponent):
def __init__(self, executor, inputs=None, **args):
super().__init__(executor=executor, inputs=inputs, **args) | 37.32 | 87 | 0.64791 | 209 | 1,866 | 5.4689 | 0.23445 | 0.146982 | 0.066492 | 0.08399 | 0.894138 | 0.894138 | 0.894138 | 0.894138 | 0.894138 | 0.894138 | 0 | 0.008392 | 0.233655 | 1,866 | 50 | 88 | 37.32 | 0.790909 | 0 | 0 | 0.863636 | 0 | 0 | 0.118907 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.136364 | false | 0 | 0.045455 | 0 | 0.318182 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
ea76be71c9f5af959ba65ff9c8894cd253c8e6cd | 2,797 | py | Python | isiscb/isisdata/migrations/0054_auto_20170205_2248.py | bgopalachary/IsisCB | c28e3f504eea60ebeff38318d8bb2071abb28ebb | [
"MIT"
] | 4 | 2016-01-25T20:35:33.000Z | 2020-04-07T15:39:52.000Z | isiscb/isisdata/migrations/0054_auto_20170205_2248.py | bgopalachary/IsisCB | c28e3f504eea60ebeff38318d8bb2071abb28ebb | [
"MIT"
] | 41 | 2015-08-19T17:34:41.000Z | 2022-03-11T23:19:01.000Z | isiscb/isisdata/migrations/0054_auto_20170205_2248.py | bgopalachary/IsisCB | c28e3f504eea60ebeff38318d8bb2071abb28ebb | [
"MIT"
] | 2 | 2020-11-25T20:18:18.000Z | 2021-06-24T15:15:41.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('isisdata', '0053_auto_20170205_2125'),
]
operations = [
migrations.AlterField(
model_name='authority',
name='classification_code',
field=models.CharField(help_text=b'alphanumeric code used in previous classification systems to describe classification terms. Primarily of historical interest only. Used primarily for Codes for the classificationTerms. however, can be used for other kinds of terms as appropriate.', max_length=255, null=True, db_index=True, blank=True),
),
migrations.AlterField(
model_name='authority',
name='classification_hierarchy',
field=models.CharField(help_text=b'Used for Classification Terms to describe where they fall in the hierarchy.', max_length=255, null=True, db_index=True, blank=True),
),
migrations.AlterField(
model_name='datasetrule',
name='dataset',
field=models.CharField(default=None, max_length=255, null=True, blank=True),
),
migrations.AlterField(
model_name='historicalauthority',
name='classification_code',
field=models.CharField(help_text=b'alphanumeric code used in previous classification systems to describe classification terms. Primarily of historical interest only. Used primarily for Codes for the classificationTerms. however, can be used for other kinds of terms as appropriate.', max_length=255, null=True, db_index=True, blank=True),
),
migrations.AlterField(
model_name='historicalauthority',
name='classification_hierarchy',
field=models.CharField(help_text=b'Used for Classification Terms to describe where they fall in the hierarchy.', max_length=255, null=True, db_index=True, blank=True),
),
migrations.AlterField(
model_name='historicalperson',
name='classification_code',
field=models.CharField(help_text=b'alphanumeric code used in previous classification systems to describe classification terms. Primarily of historical interest only. Used primarily for Codes for the classificationTerms. however, can be used for other kinds of terms as appropriate.', max_length=255, null=True, db_index=True, blank=True),
),
migrations.AlterField(
model_name='historicalperson',
name='classification_hierarchy',
field=models.CharField(help_text=b'Used for Classification Terms to describe where they fall in the hierarchy.', max_length=255, null=True, db_index=True, blank=True),
),
]
| 55.94 | 350 | 0.695388 | 326 | 2,797 | 5.843558 | 0.223926 | 0.073491 | 0.091864 | 0.106562 | 0.889239 | 0.88084 | 0.88084 | 0.856168 | 0.856168 | 0.856168 | 0 | 0.017399 | 0.219163 | 2,797 | 49 | 351 | 57.081633 | 0.854853 | 0.007508 | 0 | 0.744186 | 0 | 0.069767 | 0.443043 | 0.034247 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.046512 | 0 | 0.116279 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ea8ee5713095fdba38da7bcf287501b6b8d90d98 | 159,850 | py | Python | pyboto3/imagebuilder.py | gehad-shaat/pyboto3 | 4a0c2851a8bc04fb1c71c36086f7bb257e48181d | [
"MIT"
] | 91 | 2016-12-31T11:38:37.000Z | 2021-09-16T19:33:23.000Z | pyboto3/imagebuilder.py | gehad-shaat/pyboto3 | 4a0c2851a8bc04fb1c71c36086f7bb257e48181d | [
"MIT"
] | 7 | 2017-01-02T18:54:23.000Z | 2020-08-11T13:54:02.000Z | pyboto3/imagebuilder.py | gehad-shaat/pyboto3 | 4a0c2851a8bc04fb1c71c36086f7bb257e48181d | [
"MIT"
] | 26 | 2016-12-31T13:11:00.000Z | 2022-03-03T21:01:12.000Z | '''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
"""
pass
def cancel_image_creation(imageBuildVersionArn=None, clientToken=None):
"""
CancelImageCreation cancels the creation of Image. This operation can only be used on images in a non-terminal state.
See also: AWS API Documentation
Exceptions
:example: response = client.cancel_image_creation(
imageBuildVersionArn='string',
clientToken='string'
)
:type imageBuildVersionArn: string
:param imageBuildVersionArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image whose creation you want to cancel.\n
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token used to make this request idempotent.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'imageBuildVersionArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
imageBuildVersionArn (string) --
The Amazon Resource Name (ARN) of the image whose creation has been cancelled.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
:return: {
'requestId': 'string',
'clientToken': 'string',
'imageBuildVersionArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
"""
pass
def create_component(name=None, semanticVersion=None, description=None, changeDescription=None, platform=None, supportedOsVersions=None, data=None, uri=None, kmsKeyId=None, tags=None, clientToken=None):
"""
Creates a new component that can be used to build, validate, test, and assess your image.
See also: AWS API Documentation
Exceptions
:example: response = client.create_component(
name='string',
semanticVersion='string',
description='string',
changeDescription='string',
platform='Windows'|'Linux',
supportedOsVersions=[
'string',
],
data='string',
uri='string',
kmsKeyId='string',
tags={
'string': 'string'
},
clientToken='string'
)
:type name: string
:param name: [REQUIRED]\nThe name of the component.\n
:type semanticVersion: string
:param semanticVersion: [REQUIRED]\nThe semantic version of the component. This version follows the semantic version syntax. For example, major.minor.patch. This could be versioned like software (2.0.1) or like a date (2019.12.01).\n
:type description: string
:param description: The description of the component. Describes the contents of the component.
:type changeDescription: string
:param changeDescription: The change description of the component. Describes what change has been made in this version, or what makes this version different from other versions of this component.
:type platform: string
:param platform: [REQUIRED]\nThe platform of the component.\n
:type supportedOsVersions: list
:param supportedOsVersions: The operating system (OS) version supported by the component. If the OS information is available, a prefix match is performed against the parent image OS version during image recipe creation.\n\n(string) --\n\n
:type data: string
:param data: The data of the component. Used to specify the data inline. Either data or uri can be used to specify the data within the component.
:type uri: string
:param uri: The uri of the component. Must be an S3 URL and the requester must have permission to access the S3 bucket. If you use S3, you can specify component content up to your service quota. Either data or uri can be used to specify the data within the component.
:type kmsKeyId: string
:param kmsKeyId: The ID of the KMS key that should be used to encrypt this component.
:type tags: dict
:param tags: The tags of the component.\n\n(string) --\n(string) --\n\n\n\n
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token of the component.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'componentBuildVersionArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
componentBuildVersionArn (string) --
The Amazon Resource Name (ARN) of the component that was created by this request.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.InvalidVersionNumberException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.InvalidParameterCombinationException
:return: {
'requestId': 'string',
'clientToken': 'string',
'componentBuildVersionArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.InvalidVersionNumberException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.InvalidParameterCombinationException
"""
pass
def create_distribution_configuration(name=None, description=None, distributions=None, tags=None, clientToken=None):
"""
Creates a new distribution configuration. Distribution configurations define and configure the outputs of your pipeline.
See also: AWS API Documentation
Exceptions
:example: response = client.create_distribution_configuration(
name='string',
description='string',
distributions=[
{
'region': 'string',
'amiDistributionConfiguration': {
'name': 'string',
'description': 'string',
'amiTags': {
'string': 'string'
},
'launchPermission': {
'userIds': [
'string',
],
'userGroups': [
'string',
]
}
},
'licenseConfigurationArns': [
'string',
]
},
],
tags={
'string': 'string'
},
clientToken='string'
)
:type name: string
:param name: [REQUIRED]\nThe name of the distribution configuration.\n
:type description: string
:param description: The description of the distribution configuration.
:type distributions: list
:param distributions: [REQUIRED]\nThe distributions of the distribution configuration.\n\n(dict) --Defines the settings for a specific Region.\n\nregion (string) -- [REQUIRED]The target Region.\n\namiDistributionConfiguration (dict) --The specific AMI settings (for example, launch permissions, AMI tags).\n\nname (string) --The name of the distribution configuration.\n\ndescription (string) --The description of the distribution configuration.\n\namiTags (dict) --The tags to apply to AMIs distributed to this Region.\n\n(string) --\n(string) --\n\n\n\n\nlaunchPermission (dict) --Launch permissions can be used to configure which AWS accounts can use the AMI to launch instances.\n\nuserIds (list) --The AWS account ID.\n\n(string) --\n\n\nuserGroups (list) --The name of the group.\n\n(string) --\n\n\n\n\n\n\nlicenseConfigurationArns (list) --The License Manager Configuration to associate with the AMI in the specified Region.\n\n(string) --\n\n\n\n\n\n
:type tags: dict
:param tags: The tags of the distribution configuration.\n\n(string) --\n(string) --\n\n\n\n
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token of the distribution configuration.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'distributionConfigurationArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
distributionConfigurationArn (string) --
The Amazon Resource Name (ARN) of the distribution configuration that was created by this request.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.ResourceAlreadyExistsException
imagebuilder.Client.exceptions.InvalidParameterCombinationException
:return: {
'requestId': 'string',
'clientToken': 'string',
'distributionConfigurationArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.ResourceAlreadyExistsException
imagebuilder.Client.exceptions.InvalidParameterCombinationException
"""
pass
def create_image(imageRecipeArn=None, distributionConfigurationArn=None, infrastructureConfigurationArn=None, imageTestsConfiguration=None, enhancedImageMetadataEnabled=None, tags=None, clientToken=None):
"""
Creates a new image. This request will create a new image along with all of the configured output resources defined in the distribution configuration.
See also: AWS API Documentation
Exceptions
:example: response = client.create_image(
imageRecipeArn='string',
distributionConfigurationArn='string',
infrastructureConfigurationArn='string',
imageTestsConfiguration={
'imageTestsEnabled': True|False,
'timeoutMinutes': 123
},
enhancedImageMetadataEnabled=True|False,
tags={
'string': 'string'
},
clientToken='string'
)
:type imageRecipeArn: string
:param imageRecipeArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image recipe that defines how images are configured, tested, and assessed.\n
:type distributionConfigurationArn: string
:param distributionConfigurationArn: The Amazon Resource Name (ARN) of the distribution configuration that defines and configures the outputs of your pipeline.
:type infrastructureConfigurationArn: string
:param infrastructureConfigurationArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the infrastructure configuration that defines the environment in which your image will be built and tested.\n
:type imageTestsConfiguration: dict
:param imageTestsConfiguration: The image tests configuration of the image.\n\nimageTestsEnabled (boolean) --Defines if tests should be executed when building this image.\n\ntimeoutMinutes (integer) --The maximum time in minutes that tests are permitted to run.\n\n\n
:type enhancedImageMetadataEnabled: boolean
:param enhancedImageMetadataEnabled: Collects additional information about the image being created, including the operating system (OS) version and package list. This information is used to enhance the overall experience of using EC2 Image Builder. Enabled by default.
:type tags: dict
:param tags: The tags of the image.\n\n(string) --\n(string) --\n\n\n\n
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token used to make this request idempotent.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'imageBuildVersionArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
imageBuildVersionArn (string) --
The Amazon Resource Name (ARN) of the image that was created by this request.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
:return: {
'requestId': 'string',
'clientToken': 'string',
'imageBuildVersionArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
"""
pass
def create_image_pipeline(name=None, description=None, imageRecipeArn=None, infrastructureConfigurationArn=None, distributionConfigurationArn=None, imageTestsConfiguration=None, enhancedImageMetadataEnabled=None, schedule=None, status=None, tags=None, clientToken=None):
"""
Creates a new image pipeline. Image pipelines enable you to automate the creation and distribution of images.
See also: AWS API Documentation
Exceptions
:example: response = client.create_image_pipeline(
name='string',
description='string',
imageRecipeArn='string',
infrastructureConfigurationArn='string',
distributionConfigurationArn='string',
imageTestsConfiguration={
'imageTestsEnabled': True|False,
'timeoutMinutes': 123
},
enhancedImageMetadataEnabled=True|False,
schedule={
'scheduleExpression': 'string',
'pipelineExecutionStartCondition': 'EXPRESSION_MATCH_ONLY'|'EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE'
},
status='DISABLED'|'ENABLED',
tags={
'string': 'string'
},
clientToken='string'
)
:type name: string
:param name: [REQUIRED]\nThe name of the image pipeline.\n
:type description: string
:param description: The description of the image pipeline.
:type imageRecipeArn: string
:param imageRecipeArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image recipe that will be used to configure images created by this image pipeline.\n
:type infrastructureConfigurationArn: string
:param infrastructureConfigurationArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the infrastructure configuration that will be used to build images created by this image pipeline.\n
:type distributionConfigurationArn: string
:param distributionConfigurationArn: The Amazon Resource Name (ARN) of the distribution configuration that will be used to configure and distribute images created by this image pipeline.
:type imageTestsConfiguration: dict
:param imageTestsConfiguration: The image test configuration of the image pipeline.\n\nimageTestsEnabled (boolean) --Defines if tests should be executed when building this image.\n\ntimeoutMinutes (integer) --The maximum time in minutes that tests are permitted to run.\n\n\n
:type enhancedImageMetadataEnabled: boolean
:param enhancedImageMetadataEnabled: Collects additional information about the image being created, including the operating system (OS) version and package list. This information is used to enhance the overall experience of using EC2 Image Builder. Enabled by default.
:type schedule: dict
:param schedule: The schedule of the image pipeline.\n\nscheduleExpression (string) --The expression determines how often EC2 Image Builder evaluates your pipelineExecutionStartCondition .\n\npipelineExecutionStartCondition (string) --The condition configures when the pipeline should trigger a new image build. When the pipelineExecutionStartCondition is set to EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE , EC2 Image Builder will build a new image only when there are known changes pending. When it is set to EXPRESSION_MATCH_ONLY , it will build a new image every time the CRON expression matches the current time.\n\n\n
:type status: string
:param status: The status of the image pipeline.
:type tags: dict
:param tags: The tags of the image pipeline.\n\n(string) --\n(string) --\n\n\n\n
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token used to make this request idempotent.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'imagePipelineArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
imagePipelineArn (string) --
The Amazon Resource Name (ARN) of the image pipeline that was created by this request.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.ResourceAlreadyExistsException
:return: {
'requestId': 'string',
'clientToken': 'string',
'imagePipelineArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.ResourceAlreadyExistsException
"""
pass
def create_image_recipe(name=None, description=None, semanticVersion=None, components=None, parentImage=None, blockDeviceMappings=None, tags=None, clientToken=None):
"""
Creates a new image recipe. Image recipes define how images are configured, tested, and assessed.
See also: AWS API Documentation
Exceptions
:example: response = client.create_image_recipe(
name='string',
description='string',
semanticVersion='string',
components=[
{
'componentArn': 'string'
},
],
parentImage='string',
blockDeviceMappings=[
{
'deviceName': 'string',
'ebs': {
'encrypted': True|False,
'deleteOnTermination': True|False,
'iops': 123,
'kmsKeyId': 'string',
'snapshotId': 'string',
'volumeSize': 123,
'volumeType': 'standard'|'io1'|'gp2'|'sc1'|'st1'
},
'virtualName': 'string',
'noDevice': 'string'
},
],
tags={
'string': 'string'
},
clientToken='string'
)
:type name: string
:param name: [REQUIRED]\nThe name of the image recipe.\n
:type description: string
:param description: The description of the image recipe.
:type semanticVersion: string
:param semanticVersion: [REQUIRED]\nThe semantic version of the image recipe.\n
:type components: list
:param components: [REQUIRED]\nThe components of the image recipe.\n\n(dict) --Configuration details of the component.\n\ncomponentArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the component.\n\n\n\n\n
:type parentImage: string
:param parentImage: [REQUIRED]\nThe parent image of the image recipe. The value of the string can be the ARN of the parent image or an AMI ID. The format for the ARN follows this example: arn:aws:imagebuilder:us-west-2:aws:image/windows-server-2016-english-full-base-x86/2019.x.x . The ARN ends with /20xx.x.x , which communicates to EC2 Image Builder that you want to use the latest AMI created in 20xx (year). You can provide the specific version that you want to use, or you can use a wildcard in all of the fields. If you enter an AMI ID for the string value, you must have access to the AMI, and the AMI must be in the same Region in which you are using Image Builder.\n
:type blockDeviceMappings: list
:param blockDeviceMappings: The block device mappings of the image recipe.\n\n(dict) --Defines block device mappings for the instance used to configure your image.\n\ndeviceName (string) --The device to which these mappings apply.\n\nebs (dict) --Use to manage Amazon EBS-specific configuration for this mapping.\n\nencrypted (boolean) --Use to configure device encryption.\n\ndeleteOnTermination (boolean) --Use to configure delete on termination of the associated device.\n\niops (integer) --Use to configure device IOPS.\n\nkmsKeyId (string) --Use to configure the KMS key to use when encrypting the device.\n\nsnapshotId (string) --The snapshot that defines the device contents.\n\nvolumeSize (integer) --Use to override the device\'s volume size.\n\nvolumeType (string) --Use to override the device\'s volume type.\n\n\n\nvirtualName (string) --Use to manage instance ephemeral devices.\n\nnoDevice (string) --Use to remove a mapping from the parent image.\n\n\n\n\n
:type tags: dict
:param tags: The tags of the image recipe.\n\n(string) --\n(string) --\n\n\n\n
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token used to make this request idempotent.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'imageRecipeArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
imageRecipeArn (string) --
The Amazon Resource Name (ARN) of the image recipe that was created by this request.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.InvalidVersionNumberException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.ResourceAlreadyExistsException
:return: {
'requestId': 'string',
'clientToken': 'string',
'imageRecipeArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.InvalidVersionNumberException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.ResourceAlreadyExistsException
"""
pass
def create_infrastructure_configuration(name=None, description=None, instanceTypes=None, instanceProfileName=None, securityGroupIds=None, subnetId=None, logging=None, keyPair=None, terminateInstanceOnFailure=None, snsTopicArn=None, tags=None, clientToken=None):
"""
Creates a new infrastructure configuration. An infrastructure configuration defines the environment in which your image will be built and tested.
See also: AWS API Documentation
Exceptions
:example: response = client.create_infrastructure_configuration(
name='string',
description='string',
instanceTypes=[
'string',
],
instanceProfileName='string',
securityGroupIds=[
'string',
],
subnetId='string',
logging={
's3Logs': {
's3BucketName': 'string',
's3KeyPrefix': 'string'
}
},
keyPair='string',
terminateInstanceOnFailure=True|False,
snsTopicArn='string',
tags={
'string': 'string'
},
clientToken='string'
)
:type name: string
:param name: [REQUIRED]\nThe name of the infrastructure configuration.\n
:type description: string
:param description: The description of the infrastructure configuration.
:type instanceTypes: list
:param instanceTypes: The instance types of the infrastructure configuration. You can specify one or more instance types to use for this build. The service will pick one of these instance types based on availability.\n\n(string) --\n\n
:type instanceProfileName: string
:param instanceProfileName: [REQUIRED]\nThe instance profile to associate with the instance used to customize your EC2 AMI.\n
:type securityGroupIds: list
:param securityGroupIds: The security group IDs to associate with the instance used to customize your EC2 AMI.\n\n(string) --\n\n
:type subnetId: string
:param subnetId: The subnet ID in which to place the instance used to customize your EC2 AMI.
:type logging: dict
:param logging: The logging configuration of the infrastructure configuration.\n\ns3Logs (dict) --The Amazon S3 logging configuration.\n\ns3BucketName (string) --The Amazon S3 bucket in which to store the logs.\n\ns3KeyPrefix (string) --The Amazon S3 path in which to store the logs.\n\n\n\n\n
:type keyPair: string
:param keyPair: The key pair of the infrastructure configuration. This can be used to log on to and debug the instance used to create your image.
:type terminateInstanceOnFailure: boolean
:param terminateInstanceOnFailure: The terminate instance on failure setting of the infrastructure configuration. Set to false if you want Image Builder to retain the instance used to configure your AMI if the build or test phase of your workflow fails.
:type snsTopicArn: string
:param snsTopicArn: The SNS topic on which to send image build events.
:type tags: dict
:param tags: The tags of the infrastructure configuration.\n\n(string) --\n(string) --\n\n\n\n
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token used to make this request idempotent.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'infrastructureConfigurationArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
infrastructureConfigurationArn (string) --
The Amazon Resource Name (ARN) of the infrastructure configuration that was created by this request.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.ResourceAlreadyExistsException
:return: {
'requestId': 'string',
'clientToken': 'string',
'infrastructureConfigurationArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.ResourceAlreadyExistsException
"""
pass
def delete_component(componentBuildVersionArn=None):
"""
Deletes a component build version.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_component(
componentBuildVersionArn='string'
)
:type componentBuildVersionArn: string
:param componentBuildVersionArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the component build version to delete.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'componentBuildVersionArn': 'string'
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
componentBuildVersionArn (string) --The Amazon Resource Name (ARN) of the component build version that was deleted.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceDependencyException
:return: {
'requestId': 'string',
'componentBuildVersionArn': 'string'
}
"""
pass
def delete_distribution_configuration(distributionConfigurationArn=None):
"""
Deletes a distribution configuration.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_distribution_configuration(
distributionConfigurationArn='string'
)
:type distributionConfigurationArn: string
:param distributionConfigurationArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the distribution configuration to delete.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'distributionConfigurationArn': 'string'
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
distributionConfigurationArn (string) --The Amazon Resource Name (ARN) of the distribution configuration that was deleted.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceDependencyException
:return: {
'requestId': 'string',
'distributionConfigurationArn': 'string'
}
"""
pass
def delete_image(imageBuildVersionArn=None):
"""
Deletes an image.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_image(
imageBuildVersionArn='string'
)
:type imageBuildVersionArn: string
:param imageBuildVersionArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image to delete.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'imageBuildVersionArn': 'string'
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
imageBuildVersionArn (string) --The Amazon Resource Name (ARN) of the image that was deleted.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceDependencyException
:return: {
'requestId': 'string',
'imageBuildVersionArn': 'string'
}
"""
pass
def delete_image_pipeline(imagePipelineArn=None):
"""
Deletes an image pipeline.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_image_pipeline(
imagePipelineArn='string'
)
:type imagePipelineArn: string
:param imagePipelineArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image pipeline to delete.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'imagePipelineArn': 'string'
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
imagePipelineArn (string) --The Amazon Resource Name (ARN) of the image pipeline that was deleted.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceDependencyException
:return: {
'requestId': 'string',
'imagePipelineArn': 'string'
}
"""
pass
def delete_image_recipe(imageRecipeArn=None):
"""
Deletes an image recipe.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_image_recipe(
imageRecipeArn='string'
)
:type imageRecipeArn: string
:param imageRecipeArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image recipe to delete.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'imageRecipeArn': 'string'
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
imageRecipeArn (string) --The Amazon Resource Name (ARN) of the image recipe that was deleted.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceDependencyException
:return: {
'requestId': 'string',
'imageRecipeArn': 'string'
}
"""
pass
def delete_infrastructure_configuration(infrastructureConfigurationArn=None):
"""
Deletes an infrastructure configuration.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_infrastructure_configuration(
infrastructureConfigurationArn='string'
)
:type infrastructureConfigurationArn: string
:param infrastructureConfigurationArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the infrastructure configuration to delete.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'infrastructureConfigurationArn': 'string'
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
infrastructureConfigurationArn (string) --The Amazon Resource Name (ARN) of the infrastructure configuration that was deleted.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceDependencyException
:return: {
'requestId': 'string',
'infrastructureConfigurationArn': 'string'
}
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to\nClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid\nfor. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By\ndefault, the http method is whatever is used in the method\'s model.
"""
pass
def get_component(componentBuildVersionArn=None):
"""
Gets a component object.
See also: AWS API Documentation
Exceptions
:example: response = client.get_component(
componentBuildVersionArn='string'
)
:type componentBuildVersionArn: string
:param componentBuildVersionArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the component that you want to retrieve. Regex requires '/d+$' suffix.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'component': {
'arn': 'string',
'name': 'string',
'version': 'string',
'description': 'string',
'changeDescription': 'string',
'type': 'BUILD'|'TEST',
'platform': 'Windows'|'Linux',
'supportedOsVersions': [
'string',
],
'owner': 'string',
'data': 'string',
'kmsKeyId': 'string',
'encrypted': True|False,
'dateCreated': 'string',
'tags': {
'string': 'string'
}
}
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
component (dict) --The component object associated with the specified ARN.
arn (string) --The Amazon Resource Name (ARN) of the component.
name (string) --The name of the component.
version (string) --The version of the component.
description (string) --The description of the component.
changeDescription (string) --The change description of the component.
type (string) --The type of the component denotes whether the component is used to build the image or only to test it.
platform (string) --The platform of the component.
supportedOsVersions (list) --The operating system (OS) version supported by the component. If the OS information is available, a prefix match is performed against the parent image OS version during image recipe creation.
(string) --
owner (string) --The owner of the component.
data (string) --The data of the component.
kmsKeyId (string) --The KMS key identifier used to encrypt the component.
encrypted (boolean) --The encryption status of the component.
dateCreated (string) --The date that the component was created.
tags (dict) --The tags associated with the component.
(string) --
(string) --
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'component': {
'arn': 'string',
'name': 'string',
'version': 'string',
'description': 'string',
'changeDescription': 'string',
'type': 'BUILD'|'TEST',
'platform': 'Windows'|'Linux',
'supportedOsVersions': [
'string',
],
'owner': 'string',
'data': 'string',
'kmsKeyId': 'string',
'encrypted': True|False,
'dateCreated': 'string',
'tags': {
'string': 'string'
}
}
}
:returns:
(string) --
(string) --
"""
pass
def get_component_policy(componentArn=None):
"""
Gets a component policy.
See also: AWS API Documentation
Exceptions
:example: response = client.get_component_policy(
componentArn='string'
)
:type componentArn: string
:param componentArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the component whose policy you want to retrieve.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'policy': 'string'
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
policy (string) --The component policy.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'policy': 'string'
}
"""
pass
def get_distribution_configuration(distributionConfigurationArn=None):
"""
Gets a distribution configuration.
See also: AWS API Documentation
Exceptions
:example: response = client.get_distribution_configuration(
distributionConfigurationArn='string'
)
:type distributionConfigurationArn: string
:param distributionConfigurationArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the distribution configuration that you want to retrieve.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'distributionConfiguration': {
'arn': 'string',
'name': 'string',
'description': 'string',
'distributions': [
{
'region': 'string',
'amiDistributionConfiguration': {
'name': 'string',
'description': 'string',
'amiTags': {
'string': 'string'
},
'launchPermission': {
'userIds': [
'string',
],
'userGroups': [
'string',
]
}
},
'licenseConfigurationArns': [
'string',
]
},
],
'timeoutMinutes': 123,
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
}
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
distributionConfiguration (dict) --The distribution configuration object.
arn (string) --The Amazon Resource Name (ARN) of the distribution configuration.
name (string) --The name of the distribution configuration.
description (string) --The description of the distribution configuration.
distributions (list) --The distributions of the distribution configuration.
(dict) --Defines the settings for a specific Region.
region (string) --The target Region.
amiDistributionConfiguration (dict) --The specific AMI settings (for example, launch permissions, AMI tags).
name (string) --The name of the distribution configuration.
description (string) --The description of the distribution configuration.
amiTags (dict) --The tags to apply to AMIs distributed to this Region.
(string) --
(string) --
launchPermission (dict) --Launch permissions can be used to configure which AWS accounts can use the AMI to launch instances.
userIds (list) --The AWS account ID.
(string) --
userGroups (list) --The name of the group.
(string) --
licenseConfigurationArns (list) --The License Manager Configuration to associate with the AMI in the specified Region.
(string) --
timeoutMinutes (integer) --The maximum duration in minutes for this distribution configuration.
dateCreated (string) --The date on which this distribution configuration was created.
dateUpdated (string) --The date on which this distribution configuration was last updated.
tags (dict) --The tags of the distribution configuration.
(string) --
(string) --
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'distributionConfiguration': {
'arn': 'string',
'name': 'string',
'description': 'string',
'distributions': [
{
'region': 'string',
'amiDistributionConfiguration': {
'name': 'string',
'description': 'string',
'amiTags': {
'string': 'string'
},
'launchPermission': {
'userIds': [
'string',
],
'userGroups': [
'string',
]
}
},
'licenseConfigurationArns': [
'string',
]
},
],
'timeoutMinutes': 123,
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
}
}
:returns:
(string) --
"""
pass
def get_image(imageBuildVersionArn=None):
"""
Gets an image.
See also: AWS API Documentation
Exceptions
:example: response = client.get_image(
imageBuildVersionArn='string'
)
:type imageBuildVersionArn: string
:param imageBuildVersionArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image that you want to retrieve.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'image': {
'arn': 'string',
'name': 'string',
'version': 'string',
'platform': 'Windows'|'Linux',
'enhancedImageMetadataEnabled': True|False,
'osVersion': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
},
'imageRecipe': {
'arn': 'string',
'name': 'string',
'description': 'string',
'platform': 'Windows'|'Linux',
'owner': 'string',
'version': 'string',
'components': [
{
'componentArn': 'string'
},
],
'parentImage': 'string',
'blockDeviceMappings': [
{
'deviceName': 'string',
'ebs': {
'encrypted': True|False,
'deleteOnTermination': True|False,
'iops': 123,
'kmsKeyId': 'string',
'snapshotId': 'string',
'volumeSize': 123,
'volumeType': 'standard'|'io1'|'gp2'|'sc1'|'st1'
},
'virtualName': 'string',
'noDevice': 'string'
},
],
'dateCreated': 'string',
'tags': {
'string': 'string'
}
},
'sourcePipelineName': 'string',
'sourcePipelineArn': 'string',
'infrastructureConfiguration': {
'arn': 'string',
'name': 'string',
'description': 'string',
'instanceTypes': [
'string',
],
'instanceProfileName': 'string',
'securityGroupIds': [
'string',
],
'subnetId': 'string',
'logging': {
's3Logs': {
's3BucketName': 'string',
's3KeyPrefix': 'string'
}
},
'keyPair': 'string',
'terminateInstanceOnFailure': True|False,
'snsTopicArn': 'string',
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
},
'distributionConfiguration': {
'arn': 'string',
'name': 'string',
'description': 'string',
'distributions': [
{
'region': 'string',
'amiDistributionConfiguration': {
'name': 'string',
'description': 'string',
'amiTags': {
'string': 'string'
},
'launchPermission': {
'userIds': [
'string',
],
'userGroups': [
'string',
]
}
},
'licenseConfigurationArns': [
'string',
]
},
],
'timeoutMinutes': 123,
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
},
'imageTestsConfiguration': {
'imageTestsEnabled': True|False,
'timeoutMinutes': 123
},
'dateCreated': 'string',
'outputResources': {
'amis': [
{
'region': 'string',
'image': 'string',
'name': 'string',
'description': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
}
},
]
},
'tags': {
'string': 'string'
}
}
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
image (dict) --The image object.
arn (string) --The Amazon Resource Name (ARN) of the image.
name (string) --The name of the image.
version (string) --The semantic version of the image.
platform (string) --The platform of the image.
enhancedImageMetadataEnabled (boolean) --Collects additional information about the image being created, including the operating system (OS) version and package list. This information is used to enhance the overall experience of using EC2 Image Builder. Enabled by default.
osVersion (string) --The operating system version of the instance. For example, Amazon Linux 2, Ubuntu 18, or Microsoft Windows Server 2019.
state (dict) --The state of the image.
status (string) --The status of the image.
reason (string) --The reason for the image\'s status.
imageRecipe (dict) --The image recipe used when creating the image.
arn (string) --The Amazon Resource Name (ARN) of the image recipe.
name (string) --The name of the image recipe.
description (string) --The description of the image recipe.
platform (string) --The platform of the image recipe.
owner (string) --The owner of the image recipe.
version (string) --The version of the image recipe.
components (list) --The components of the image recipe.
(dict) --Configuration details of the component.
componentArn (string) --The Amazon Resource Name (ARN) of the component.
parentImage (string) --The parent image of the image recipe.
blockDeviceMappings (list) --The block device mappings to apply when creating images from this recipe.
(dict) --Defines block device mappings for the instance used to configure your image.
deviceName (string) --The device to which these mappings apply.
ebs (dict) --Use to manage Amazon EBS-specific configuration for this mapping.
encrypted (boolean) --Use to configure device encryption.
deleteOnTermination (boolean) --Use to configure delete on termination of the associated device.
iops (integer) --Use to configure device IOPS.
kmsKeyId (string) --Use to configure the KMS key to use when encrypting the device.
snapshotId (string) --The snapshot that defines the device contents.
volumeSize (integer) --Use to override the device\'s volume size.
volumeType (string) --Use to override the device\'s volume type.
virtualName (string) --Use to manage instance ephemeral devices.
noDevice (string) --Use to remove a mapping from the parent image.
dateCreated (string) --The date on which this image recipe was created.
tags (dict) --The tags of the image recipe.
(string) --
(string) --
sourcePipelineName (string) --The name of the image pipeline that created this image.
sourcePipelineArn (string) --The Amazon Resource Name (ARN) of the image pipeline that created this image.
infrastructureConfiguration (dict) --The infrastructure used when creating this image.
arn (string) --The Amazon Resource Name (ARN) of the infrastructure configuration.
name (string) --The name of the infrastructure configuration.
description (string) --The description of the infrastructure configuration.
instanceTypes (list) --The instance types of the infrastructure configuration.
(string) --
instanceProfileName (string) --The instance profile of the infrastructure configuration.
securityGroupIds (list) --The security group IDs of the infrastructure configuration.
(string) --
subnetId (string) --The subnet ID of the infrastructure configuration.
logging (dict) --The logging configuration of the infrastructure configuration.
s3Logs (dict) --The Amazon S3 logging configuration.
s3BucketName (string) --The Amazon S3 bucket in which to store the logs.
s3KeyPrefix (string) --The Amazon S3 path in which to store the logs.
keyPair (string) --The EC2 key pair of the infrastructure configuration.
terminateInstanceOnFailure (boolean) --The terminate instance on failure configuration of the infrastructure configuration.
snsTopicArn (string) --The SNS topic Amazon Resource Name (ARN) of the infrastructure configuration.
dateCreated (string) --The date on which the infrastructure configuration was created.
dateUpdated (string) --The date on which the infrastructure configuration was last updated.
tags (dict) --The tags of the infrastructure configuration.
(string) --
(string) --
distributionConfiguration (dict) --The distribution configuration used when creating this image.
arn (string) --The Amazon Resource Name (ARN) of the distribution configuration.
name (string) --The name of the distribution configuration.
description (string) --The description of the distribution configuration.
distributions (list) --The distributions of the distribution configuration.
(dict) --Defines the settings for a specific Region.
region (string) --The target Region.
amiDistributionConfiguration (dict) --The specific AMI settings (for example, launch permissions, AMI tags).
name (string) --The name of the distribution configuration.
description (string) --The description of the distribution configuration.
amiTags (dict) --The tags to apply to AMIs distributed to this Region.
(string) --
(string) --
launchPermission (dict) --Launch permissions can be used to configure which AWS accounts can use the AMI to launch instances.
userIds (list) --The AWS account ID.
(string) --
userGroups (list) --The name of the group.
(string) --
licenseConfigurationArns (list) --The License Manager Configuration to associate with the AMI in the specified Region.
(string) --
timeoutMinutes (integer) --The maximum duration in minutes for this distribution configuration.
dateCreated (string) --The date on which this distribution configuration was created.
dateUpdated (string) --The date on which this distribution configuration was last updated.
tags (dict) --The tags of the distribution configuration.
(string) --
(string) --
imageTestsConfiguration (dict) --The image tests configuration used when creating this image.
imageTestsEnabled (boolean) --Defines if tests should be executed when building this image.
timeoutMinutes (integer) --The maximum time in minutes that tests are permitted to run.
dateCreated (string) --The date on which this image was created.
outputResources (dict) --The output resources produced when creating this image.
amis (list) --The EC2 AMIs created by this image.
(dict) --Details of an EC2 AMI.
region (string) --The AWS Region of the EC2 AMI.
image (string) --The AMI ID of the EC2 AMI.
name (string) --The name of the EC2 AMI.
description (string) --The description of the EC2 AMI.
state (dict) --Image state shows the image status and the reason for that status.
status (string) --The status of the image.
reason (string) --The reason for the image\'s status.
tags (dict) --The tags of the image.
(string) --
(string) --
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'image': {
'arn': 'string',
'name': 'string',
'version': 'string',
'platform': 'Windows'|'Linux',
'enhancedImageMetadataEnabled': True|False,
'osVersion': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
},
'imageRecipe': {
'arn': 'string',
'name': 'string',
'description': 'string',
'platform': 'Windows'|'Linux',
'owner': 'string',
'version': 'string',
'components': [
{
'componentArn': 'string'
},
],
'parentImage': 'string',
'blockDeviceMappings': [
{
'deviceName': 'string',
'ebs': {
'encrypted': True|False,
'deleteOnTermination': True|False,
'iops': 123,
'kmsKeyId': 'string',
'snapshotId': 'string',
'volumeSize': 123,
'volumeType': 'standard'|'io1'|'gp2'|'sc1'|'st1'
},
'virtualName': 'string',
'noDevice': 'string'
},
],
'dateCreated': 'string',
'tags': {
'string': 'string'
}
},
'sourcePipelineName': 'string',
'sourcePipelineArn': 'string',
'infrastructureConfiguration': {
'arn': 'string',
'name': 'string',
'description': 'string',
'instanceTypes': [
'string',
],
'instanceProfileName': 'string',
'securityGroupIds': [
'string',
],
'subnetId': 'string',
'logging': {
's3Logs': {
's3BucketName': 'string',
's3KeyPrefix': 'string'
}
},
'keyPair': 'string',
'terminateInstanceOnFailure': True|False,
'snsTopicArn': 'string',
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
},
'distributionConfiguration': {
'arn': 'string',
'name': 'string',
'description': 'string',
'distributions': [
{
'region': 'string',
'amiDistributionConfiguration': {
'name': 'string',
'description': 'string',
'amiTags': {
'string': 'string'
},
'launchPermission': {
'userIds': [
'string',
],
'userGroups': [
'string',
]
}
},
'licenseConfigurationArns': [
'string',
]
},
],
'timeoutMinutes': 123,
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
},
'imageTestsConfiguration': {
'imageTestsEnabled': True|False,
'timeoutMinutes': 123
},
'dateCreated': 'string',
'outputResources': {
'amis': [
{
'region': 'string',
'image': 'string',
'name': 'string',
'description': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
}
},
]
},
'tags': {
'string': 'string'
}
}
}
:returns:
(string) --
"""
pass
def get_image_pipeline(imagePipelineArn=None):
"""
Gets an image pipeline.
See also: AWS API Documentation
Exceptions
:example: response = client.get_image_pipeline(
imagePipelineArn='string'
)
:type imagePipelineArn: string
:param imagePipelineArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image pipeline that you want to retrieve.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'imagePipeline': {
'arn': 'string',
'name': 'string',
'description': 'string',
'platform': 'Windows'|'Linux',
'enhancedImageMetadataEnabled': True|False,
'imageRecipeArn': 'string',
'infrastructureConfigurationArn': 'string',
'distributionConfigurationArn': 'string',
'imageTestsConfiguration': {
'imageTestsEnabled': True|False,
'timeoutMinutes': 123
},
'schedule': {
'scheduleExpression': 'string',
'pipelineExecutionStartCondition': 'EXPRESSION_MATCH_ONLY'|'EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE'
},
'status': 'DISABLED'|'ENABLED',
'dateCreated': 'string',
'dateUpdated': 'string',
'dateLastRun': 'string',
'dateNextRun': 'string',
'tags': {
'string': 'string'
}
}
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
imagePipeline (dict) --The image pipeline object.
arn (string) --The Amazon Resource Name (ARN) of the image pipeline.
name (string) --The name of the image pipeline.
description (string) --The description of the image pipeline.
platform (string) --The platform of the image pipeline.
enhancedImageMetadataEnabled (boolean) --Collects additional information about the image being created, including the operating system (OS) version and package list. This information is used to enhance the overall experience of using EC2 Image Builder. Enabled by default.
imageRecipeArn (string) --The Amazon Resource Name (ARN) of the image recipe associated with this image pipeline.
infrastructureConfigurationArn (string) --The Amazon Resource Name (ARN) of the infrastructure configuration associated with this image pipeline.
distributionConfigurationArn (string) --The Amazon Resource Name (ARN) of the distribution configuration associated with this image pipeline.
imageTestsConfiguration (dict) --The image tests configuration of the image pipeline.
imageTestsEnabled (boolean) --Defines if tests should be executed when building this image.
timeoutMinutes (integer) --The maximum time in minutes that tests are permitted to run.
schedule (dict) --The schedule of the image pipeline.
scheduleExpression (string) --The expression determines how often EC2 Image Builder evaluates your pipelineExecutionStartCondition .
pipelineExecutionStartCondition (string) --The condition configures when the pipeline should trigger a new image build. When the pipelineExecutionStartCondition is set to EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE , EC2 Image Builder will build a new image only when there are known changes pending. When it is set to EXPRESSION_MATCH_ONLY , it will build a new image every time the CRON expression matches the current time.
status (string) --The status of the image pipeline.
dateCreated (string) --The date on which this image pipeline was created.
dateUpdated (string) --The date on which this image pipeline was last updated.
dateLastRun (string) --The date on which this image pipeline was last run.
dateNextRun (string) --The date on which this image pipeline will next be run.
tags (dict) --The tags of this image pipeline.
(string) --
(string) --
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'imagePipeline': {
'arn': 'string',
'name': 'string',
'description': 'string',
'platform': 'Windows'|'Linux',
'enhancedImageMetadataEnabled': True|False,
'imageRecipeArn': 'string',
'infrastructureConfigurationArn': 'string',
'distributionConfigurationArn': 'string',
'imageTestsConfiguration': {
'imageTestsEnabled': True|False,
'timeoutMinutes': 123
},
'schedule': {
'scheduleExpression': 'string',
'pipelineExecutionStartCondition': 'EXPRESSION_MATCH_ONLY'|'EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE'
},
'status': 'DISABLED'|'ENABLED',
'dateCreated': 'string',
'dateUpdated': 'string',
'dateLastRun': 'string',
'dateNextRun': 'string',
'tags': {
'string': 'string'
}
}
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
"""
pass
def get_image_policy(imageArn=None):
"""
Gets an image policy.
See also: AWS API Documentation
Exceptions
:example: response = client.get_image_policy(
imageArn='string'
)
:type imageArn: string
:param imageArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image whose policy you want to retrieve.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'policy': 'string'
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
policy (string) --The image policy object.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'policy': 'string'
}
"""
pass
def get_image_recipe(imageRecipeArn=None):
"""
Gets an image recipe.
See also: AWS API Documentation
Exceptions
:example: response = client.get_image_recipe(
imageRecipeArn='string'
)
:type imageRecipeArn: string
:param imageRecipeArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image recipe that you want to retrieve.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'imageRecipe': {
'arn': 'string',
'name': 'string',
'description': 'string',
'platform': 'Windows'|'Linux',
'owner': 'string',
'version': 'string',
'components': [
{
'componentArn': 'string'
},
],
'parentImage': 'string',
'blockDeviceMappings': [
{
'deviceName': 'string',
'ebs': {
'encrypted': True|False,
'deleteOnTermination': True|False,
'iops': 123,
'kmsKeyId': 'string',
'snapshotId': 'string',
'volumeSize': 123,
'volumeType': 'standard'|'io1'|'gp2'|'sc1'|'st1'
},
'virtualName': 'string',
'noDevice': 'string'
},
],
'dateCreated': 'string',
'tags': {
'string': 'string'
}
}
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
imageRecipe (dict) --The image recipe object.
arn (string) --The Amazon Resource Name (ARN) of the image recipe.
name (string) --The name of the image recipe.
description (string) --The description of the image recipe.
platform (string) --The platform of the image recipe.
owner (string) --The owner of the image recipe.
version (string) --The version of the image recipe.
components (list) --The components of the image recipe.
(dict) --Configuration details of the component.
componentArn (string) --The Amazon Resource Name (ARN) of the component.
parentImage (string) --The parent image of the image recipe.
blockDeviceMappings (list) --The block device mappings to apply when creating images from this recipe.
(dict) --Defines block device mappings for the instance used to configure your image.
deviceName (string) --The device to which these mappings apply.
ebs (dict) --Use to manage Amazon EBS-specific configuration for this mapping.
encrypted (boolean) --Use to configure device encryption.
deleteOnTermination (boolean) --Use to configure delete on termination of the associated device.
iops (integer) --Use to configure device IOPS.
kmsKeyId (string) --Use to configure the KMS key to use when encrypting the device.
snapshotId (string) --The snapshot that defines the device contents.
volumeSize (integer) --Use to override the device\'s volume size.
volumeType (string) --Use to override the device\'s volume type.
virtualName (string) --Use to manage instance ephemeral devices.
noDevice (string) --Use to remove a mapping from the parent image.
dateCreated (string) --The date on which this image recipe was created.
tags (dict) --The tags of the image recipe.
(string) --
(string) --
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'imageRecipe': {
'arn': 'string',
'name': 'string',
'description': 'string',
'platform': 'Windows'|'Linux',
'owner': 'string',
'version': 'string',
'components': [
{
'componentArn': 'string'
},
],
'parentImage': 'string',
'blockDeviceMappings': [
{
'deviceName': 'string',
'ebs': {
'encrypted': True|False,
'deleteOnTermination': True|False,
'iops': 123,
'kmsKeyId': 'string',
'snapshotId': 'string',
'volumeSize': 123,
'volumeType': 'standard'|'io1'|'gp2'|'sc1'|'st1'
},
'virtualName': 'string',
'noDevice': 'string'
},
],
'dateCreated': 'string',
'tags': {
'string': 'string'
}
}
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
"""
pass
def get_image_recipe_policy(imageRecipeArn=None):
"""
Gets an image recipe policy.
See also: AWS API Documentation
Exceptions
:example: response = client.get_image_recipe_policy(
imageRecipeArn='string'
)
:type imageRecipeArn: string
:param imageRecipeArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image recipe whose policy you want to retrieve.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'policy': 'string'
}
Response Structure
(dict) --
requestId (string) --The request ID that uniquely identifies this request.
policy (string) --The image recipe policy object.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'policy': 'string'
}
"""
pass
def get_infrastructure_configuration(infrastructureConfigurationArn=None):
"""
Gets an infrastructure configuration.
See also: AWS API Documentation
Exceptions
:example: response = client.get_infrastructure_configuration(
infrastructureConfigurationArn='string'
)
:type infrastructureConfigurationArn: string
:param infrastructureConfigurationArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the infrastructure configuration that you want to retrieve.\n
:rtype: dict
ReturnsResponse Syntax{
'requestId': 'string',
'infrastructureConfiguration': {
'arn': 'string',
'name': 'string',
'description': 'string',
'instanceTypes': [
'string',
],
'instanceProfileName': 'string',
'securityGroupIds': [
'string',
],
'subnetId': 'string',
'logging': {
's3Logs': {
's3BucketName': 'string',
's3KeyPrefix': 'string'
}
},
'keyPair': 'string',
'terminateInstanceOnFailure': True|False,
'snsTopicArn': 'string',
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
}
}
Response Structure
(dict) --GetInfrastructureConfiguration response object.
requestId (string) --The request ID that uniquely identifies this request.
infrastructureConfiguration (dict) --The infrastructure configuration object.
arn (string) --The Amazon Resource Name (ARN) of the infrastructure configuration.
name (string) --The name of the infrastructure configuration.
description (string) --The description of the infrastructure configuration.
instanceTypes (list) --The instance types of the infrastructure configuration.
(string) --
instanceProfileName (string) --The instance profile of the infrastructure configuration.
securityGroupIds (list) --The security group IDs of the infrastructure configuration.
(string) --
subnetId (string) --The subnet ID of the infrastructure configuration.
logging (dict) --The logging configuration of the infrastructure configuration.
s3Logs (dict) --The Amazon S3 logging configuration.
s3BucketName (string) --The Amazon S3 bucket in which to store the logs.
s3KeyPrefix (string) --The Amazon S3 path in which to store the logs.
keyPair (string) --The EC2 key pair of the infrastructure configuration.
terminateInstanceOnFailure (boolean) --The terminate instance on failure configuration of the infrastructure configuration.
snsTopicArn (string) --The SNS topic Amazon Resource Name (ARN) of the infrastructure configuration.
dateCreated (string) --The date on which the infrastructure configuration was created.
dateUpdated (string) --The date on which the infrastructure configuration was last updated.
tags (dict) --The tags of the infrastructure configuration.
(string) --
(string) --
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'infrastructureConfiguration': {
'arn': 'string',
'name': 'string',
'description': 'string',
'instanceTypes': [
'string',
],
'instanceProfileName': 'string',
'securityGroupIds': [
'string',
],
'subnetId': 'string',
'logging': {
's3Logs': {
's3BucketName': 'string',
's3KeyPrefix': 'string'
}
},
'keyPair': 'string',
'terminateInstanceOnFailure': True|False,
'snsTopicArn': 'string',
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
}
}
:returns:
(string) --
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
ReturnsA paginator object.
"""
pass
def get_waiter(waiter_name=None):
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters\nsection of the service docs for a list of available waiters.
:rtype: botocore.waiter.Waiter
"""
pass
def import_component(name=None, semanticVersion=None, description=None, changeDescription=None, type=None, format=None, platform=None, data=None, uri=None, kmsKeyId=None, tags=None, clientToken=None):
"""
Imports a component and transforms its data into a component document.
See also: AWS API Documentation
Exceptions
:example: response = client.import_component(
name='string',
semanticVersion='string',
description='string',
changeDescription='string',
type='BUILD'|'TEST',
format='SHELL',
platform='Windows'|'Linux',
data='string',
uri='string',
kmsKeyId='string',
tags={
'string': 'string'
},
clientToken='string'
)
:type name: string
:param name: [REQUIRED]\nThe name of the component.\n
:type semanticVersion: string
:param semanticVersion: [REQUIRED]\nThe semantic version of the component. This version follows the semantic version syntax. For example, major.minor.patch. This could be versioned like software (2.0.1) or like a date (2019.12.01).\n
:type description: string
:param description: The description of the component. Describes the contents of the component.
:type changeDescription: string
:param changeDescription: The change description of the component. Describes what change has been made in this version, or what makes this version different from other versions of this component.
:type type: string
:param type: [REQUIRED]\nThe type of the component denotes whether the component is used to build the image or only to test it.\n
:type format: string
:param format: [REQUIRED]\nThe format of the resource that you want to import as a component.\n
:type platform: string
:param platform: [REQUIRED]\nThe platform of the component.\n
:type data: string
:param data: The data of the component. Used to specify the data inline. Either data or uri can be used to specify the data within the component.
:type uri: string
:param uri: The uri of the component. Must be an S3 URL and the requester must have permission to access the S3 bucket. If you use S3, you can specify component content up to your service quota. Either data or uri can be used to specify the data within the component.
:type kmsKeyId: string
:param kmsKeyId: The ID of the KMS key that should be used to encrypt this component.
:type tags: dict
:param tags: The tags of the component.\n\n(string) --\n(string) --\n\n\n\n
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token of the component.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'componentBuildVersionArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
componentBuildVersionArn (string) --
The Amazon Resource Name (ARN) of the imported component.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.InvalidVersionNumberException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.InvalidParameterCombinationException
:return: {
'requestId': 'string',
'clientToken': 'string',
'componentBuildVersionArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.InvalidVersionNumberException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.InvalidParameterCombinationException
"""
pass
def list_component_build_versions(componentVersionArn=None, maxResults=None, nextToken=None):
"""
Returns the list of component build versions for the specified semantic version.
See also: AWS API Documentation
Exceptions
:example: response = client.list_component_build_versions(
componentVersionArn='string',
maxResults=123,
nextToken='string'
)
:type componentVersionArn: string
:param componentVersionArn: [REQUIRED]\nThe component version Amazon Resource Name (ARN) whose versions you want to list.\n
:type maxResults: integer
:param maxResults: The maximum items to return in a request.
:type nextToken: string
:param nextToken: A token to specify where to start paginating. This is the NextToken from a previously truncated response.
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'componentSummaryList': [
{
'arn': 'string',
'name': 'string',
'version': 'string',
'platform': 'Windows'|'Linux',
'supportedOsVersions': [
'string',
],
'type': 'BUILD'|'TEST',
'owner': 'string',
'description': 'string',
'changeDescription': 'string',
'dateCreated': 'string',
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
componentSummaryList (list) --
The list of component summaries for the specified semantic version.
(dict) --
A high-level summary of a component.
arn (string) --
The Amazon Resource Name (ARN) of the component.
name (string) --
The name of the component.
version (string) --
The version of the component.
platform (string) --
The platform of the component.
supportedOsVersions (list) --
The operating system (OS) version supported by the component. If the OS information is available, a prefix match is performed against the parent image OS version during image recipe creation.
(string) --
type (string) --
The type of the component denotes whether the component is used to build the image or only to test it.
owner (string) --
The owner of the component.
description (string) --
The description of the component.
changeDescription (string) --
The change description of the component.
dateCreated (string) --
The date that the component was created.
tags (dict) --
The tags associated with the component.
(string) --
(string) --
nextToken (string) --
The next token used for paginated responses. When this is not empty, there are additional elements that the service has not included in this request. Use this token with the next request to retrieve additional objects.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidPaginationTokenException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'componentSummaryList': [
{
'arn': 'string',
'name': 'string',
'version': 'string',
'platform': 'Windows'|'Linux',
'supportedOsVersions': [
'string',
],
'type': 'BUILD'|'TEST',
'owner': 'string',
'description': 'string',
'changeDescription': 'string',
'dateCreated': 'string',
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_components(owner=None, filters=None, maxResults=None, nextToken=None):
"""
Returns the list of component build versions for the specified semantic version.
See also: AWS API Documentation
Exceptions
:example: response = client.list_components(
owner='Self'|'Shared'|'Amazon',
filters=[
{
'name': 'string',
'values': [
'string',
]
},
],
maxResults=123,
nextToken='string'
)
:type owner: string
:param owner: The owner defines which components you want to list. By default, this request will only show components owned by your account. You can use this field to specify if you want to view components owned by yourself, by Amazon, or those components that have been shared with you by other customers.
:type filters: list
:param filters: The filters.\n\n(dict) --A filter name and value pair that is used to return a more specific list of results from a list operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs.\n\nname (string) --The name of the filter. Filter names are case-sensitive.\n\nvalues (list) --The filter values. Filter values are case-sensitive.\n\n(string) --\n\n\n\n\n\n
:type maxResults: integer
:param maxResults: The maximum items to return in a request.
:type nextToken: string
:param nextToken: A token to specify where to start paginating. This is the NextToken from a previously truncated response.
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'componentVersionList': [
{
'arn': 'string',
'name': 'string',
'version': 'string',
'description': 'string',
'platform': 'Windows'|'Linux',
'supportedOsVersions': [
'string',
],
'type': 'BUILD'|'TEST',
'owner': 'string',
'dateCreated': 'string'
},
],
'nextToken': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
componentVersionList (list) --
The list of component semantic versions.
(dict) --
A high-level overview of a component semantic version.
arn (string) --
The Amazon Resource Name (ARN) of the component.
name (string) --
The name of the component.
version (string) --
The semantic version of the component.
description (string) --
The description of the component.
platform (string) --
The platform of the component.
supportedOsVersions (list) --
The operating system (OS) version supported by the component. If the OS information is available, a prefix match is performed against the parent image OS version during image recipe creation.
(string) --
type (string) --
The type of the component denotes whether the component is used to build the image or only to test it.
owner (string) --
The owner of the component.
dateCreated (string) --
The date that the component was created.
nextToken (string) --
The next token used for paginated responses. When this is not empty, there are additional elements that the service has not included in this request. Use this token with the next request to retrieve additional objects.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidPaginationTokenException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'componentVersionList': [
{
'arn': 'string',
'name': 'string',
'version': 'string',
'description': 'string',
'platform': 'Windows'|'Linux',
'supportedOsVersions': [
'string',
],
'type': 'BUILD'|'TEST',
'owner': 'string',
'dateCreated': 'string'
},
],
'nextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_distribution_configurations(filters=None, maxResults=None, nextToken=None):
"""
Returns a list of distribution configurations.
See also: AWS API Documentation
Exceptions
:example: response = client.list_distribution_configurations(
filters=[
{
'name': 'string',
'values': [
'string',
]
},
],
maxResults=123,
nextToken='string'
)
:type filters: list
:param filters: The filters.\n\n(dict) --A filter name and value pair that is used to return a more specific list of results from a list operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs.\n\nname (string) --The name of the filter. Filter names are case-sensitive.\n\nvalues (list) --The filter values. Filter values are case-sensitive.\n\n(string) --\n\n\n\n\n\n
:type maxResults: integer
:param maxResults: The maximum items to return in a request.
:type nextToken: string
:param nextToken: A token to specify where to start paginating. This is the NextToken from a previously truncated response.
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'distributionConfigurationSummaryList': [
{
'arn': 'string',
'name': 'string',
'description': 'string',
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
distributionConfigurationSummaryList (list) --
The list of distributions.
(dict) --
A high-level overview of a distribution configuration.
arn (string) --
The Amazon Resource Name (ARN) of the distribution configuration.
name (string) --
The name of the distribution configuration.
description (string) --
The description of the distribution configuration.
dateCreated (string) --
The date on which the distribution configuration was created.
dateUpdated (string) --
The date on which the distribution configuration was updated.
tags (dict) --
The tags associated with the distribution configuration.
(string) --
(string) --
nextToken (string) --
The next token used for paginated responses. When this is not empty, there are additional elements that the service has not included in this request. Use this token with the next request to retrieve additional objects.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidPaginationTokenException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'distributionConfigurationSummaryList': [
{
'arn': 'string',
'name': 'string',
'description': 'string',
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def list_image_build_versions(imageVersionArn=None, filters=None, maxResults=None, nextToken=None):
"""
Returns a list of distribution configurations.
See also: AWS API Documentation
Exceptions
:example: response = client.list_image_build_versions(
imageVersionArn='string',
filters=[
{
'name': 'string',
'values': [
'string',
]
},
],
maxResults=123,
nextToken='string'
)
:type imageVersionArn: string
:param imageVersionArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image whose build versions you want to retrieve.\n
:type filters: list
:param filters: The filters.\n\n(dict) --A filter name and value pair that is used to return a more specific list of results from a list operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs.\n\nname (string) --The name of the filter. Filter names are case-sensitive.\n\nvalues (list) --The filter values. Filter values are case-sensitive.\n\n(string) --\n\n\n\n\n\n
:type maxResults: integer
:param maxResults: The maximum items to return in a request.
:type nextToken: string
:param nextToken: A token to specify where to start paginating. This is the NextToken from a previously truncated response.
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'imageSummaryList': [
{
'arn': 'string',
'name': 'string',
'version': 'string',
'platform': 'Windows'|'Linux',
'osVersion': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
},
'owner': 'string',
'dateCreated': 'string',
'outputResources': {
'amis': [
{
'region': 'string',
'image': 'string',
'name': 'string',
'description': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
}
},
]
},
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
imageSummaryList (list) --
The list of image build versions.
(dict) --
An image summary.
arn (string) --
The Amazon Resource Name (ARN) of the image.
name (string) --
The name of the image.
version (string) --
The version of the image.
platform (string) --
The platform of the image.
osVersion (string) --
The operating system version of the instance. For example, Amazon Linux 2, Ubuntu 18, or Microsoft Windows Server 2019.
state (dict) --
The state of the image.
status (string) --
The status of the image.
reason (string) --
The reason for the image\'s status.
owner (string) --
The owner of the image.
dateCreated (string) --
The date on which this image was created.
outputResources (dict) --
The output resources produced when creating this image.
amis (list) --
The EC2 AMIs created by this image.
(dict) --
Details of an EC2 AMI.
region (string) --
The AWS Region of the EC2 AMI.
image (string) --
The AMI ID of the EC2 AMI.
name (string) --
The name of the EC2 AMI.
description (string) --
The description of the EC2 AMI.
state (dict) --
Image state shows the image status and the reason for that status.
status (string) --
The status of the image.
reason (string) --
The reason for the image\'s status.
tags (dict) --
The tags of the image.
(string) --
(string) --
nextToken (string) --
The next token used for paginated responses. When this is not empty, there are additional elements that the service has not included in this request. Use this token with the next request to retrieve additional objects.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidPaginationTokenException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'imageSummaryList': [
{
'arn': 'string',
'name': 'string',
'version': 'string',
'platform': 'Windows'|'Linux',
'osVersion': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
},
'owner': 'string',
'dateCreated': 'string',
'outputResources': {
'amis': [
{
'region': 'string',
'image': 'string',
'name': 'string',
'description': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
}
},
]
},
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def list_image_pipeline_images(imagePipelineArn=None, filters=None, maxResults=None, nextToken=None):
"""
Returns a list of images created by the specified pipeline.
See also: AWS API Documentation
Exceptions
:example: response = client.list_image_pipeline_images(
imagePipelineArn='string',
filters=[
{
'name': 'string',
'values': [
'string',
]
},
],
maxResults=123,
nextToken='string'
)
:type imagePipelineArn: string
:param imagePipelineArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image pipeline whose images you want to view.\n
:type filters: list
:param filters: The filters.\n\n(dict) --A filter name and value pair that is used to return a more specific list of results from a list operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs.\n\nname (string) --The name of the filter. Filter names are case-sensitive.\n\nvalues (list) --The filter values. Filter values are case-sensitive.\n\n(string) --\n\n\n\n\n\n
:type maxResults: integer
:param maxResults: The maximum items to return in a request.
:type nextToken: string
:param nextToken: A token to specify where to start paginating. This is the NextToken from a previously truncated response.
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'imageSummaryList': [
{
'arn': 'string',
'name': 'string',
'version': 'string',
'platform': 'Windows'|'Linux',
'osVersion': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
},
'owner': 'string',
'dateCreated': 'string',
'outputResources': {
'amis': [
{
'region': 'string',
'image': 'string',
'name': 'string',
'description': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
}
},
]
},
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
imageSummaryList (list) --
The list of images built by this pipeline.
(dict) --
An image summary.
arn (string) --
The Amazon Resource Name (ARN) of the image.
name (string) --
The name of the image.
version (string) --
The version of the image.
platform (string) --
The platform of the image.
osVersion (string) --
The operating system version of the instance. For example, Amazon Linux 2, Ubuntu 18, or Microsoft Windows Server 2019.
state (dict) --
The state of the image.
status (string) --
The status of the image.
reason (string) --
The reason for the image\'s status.
owner (string) --
The owner of the image.
dateCreated (string) --
The date on which this image was created.
outputResources (dict) --
The output resources produced when creating this image.
amis (list) --
The EC2 AMIs created by this image.
(dict) --
Details of an EC2 AMI.
region (string) --
The AWS Region of the EC2 AMI.
image (string) --
The AMI ID of the EC2 AMI.
name (string) --
The name of the EC2 AMI.
description (string) --
The description of the EC2 AMI.
state (dict) --
Image state shows the image status and the reason for that status.
status (string) --
The status of the image.
reason (string) --
The reason for the image\'s status.
tags (dict) --
The tags of the image.
(string) --
(string) --
nextToken (string) --
The next token used for paginated responses. When this is not empty, there are additional elements that the service has not included in this request. Use this token with the next request to retrieve additional objects.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidPaginationTokenException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'imageSummaryList': [
{
'arn': 'string',
'name': 'string',
'version': 'string',
'platform': 'Windows'|'Linux',
'osVersion': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
},
'owner': 'string',
'dateCreated': 'string',
'outputResources': {
'amis': [
{
'region': 'string',
'image': 'string',
'name': 'string',
'description': 'string',
'state': {
'status': 'PENDING'|'CREATING'|'BUILDING'|'TESTING'|'DISTRIBUTING'|'INTEGRATING'|'AVAILABLE'|'CANCELLED'|'FAILED'|'DEPRECATED'|'DELETED',
'reason': 'string'
}
},
]
},
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def list_image_pipelines(filters=None, maxResults=None, nextToken=None):
"""
Returns a list of image pipelines.
See also: AWS API Documentation
Exceptions
:example: response = client.list_image_pipelines(
filters=[
{
'name': 'string',
'values': [
'string',
]
},
],
maxResults=123,
nextToken='string'
)
:type filters: list
:param filters: The filters.\n\n(dict) --A filter name and value pair that is used to return a more specific list of results from a list operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs.\n\nname (string) --The name of the filter. Filter names are case-sensitive.\n\nvalues (list) --The filter values. Filter values are case-sensitive.\n\n(string) --\n\n\n\n\n\n
:type maxResults: integer
:param maxResults: The maximum items to return in a request.
:type nextToken: string
:param nextToken: A token to specify where to start paginating. This is the NextToken from a previously truncated response.
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'imagePipelineList': [
{
'arn': 'string',
'name': 'string',
'description': 'string',
'platform': 'Windows'|'Linux',
'enhancedImageMetadataEnabled': True|False,
'imageRecipeArn': 'string',
'infrastructureConfigurationArn': 'string',
'distributionConfigurationArn': 'string',
'imageTestsConfiguration': {
'imageTestsEnabled': True|False,
'timeoutMinutes': 123
},
'schedule': {
'scheduleExpression': 'string',
'pipelineExecutionStartCondition': 'EXPRESSION_MATCH_ONLY'|'EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE'
},
'status': 'DISABLED'|'ENABLED',
'dateCreated': 'string',
'dateUpdated': 'string',
'dateLastRun': 'string',
'dateNextRun': 'string',
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
imagePipelineList (list) --
The list of image pipelines.
(dict) --
Details of an image pipeline.
arn (string) --
The Amazon Resource Name (ARN) of the image pipeline.
name (string) --
The name of the image pipeline.
description (string) --
The description of the image pipeline.
platform (string) --
The platform of the image pipeline.
enhancedImageMetadataEnabled (boolean) --
Collects additional information about the image being created, including the operating system (OS) version and package list. This information is used to enhance the overall experience of using EC2 Image Builder. Enabled by default.
imageRecipeArn (string) --
The Amazon Resource Name (ARN) of the image recipe associated with this image pipeline.
infrastructureConfigurationArn (string) --
The Amazon Resource Name (ARN) of the infrastructure configuration associated with this image pipeline.
distributionConfigurationArn (string) --
The Amazon Resource Name (ARN) of the distribution configuration associated with this image pipeline.
imageTestsConfiguration (dict) --
The image tests configuration of the image pipeline.
imageTestsEnabled (boolean) --
Defines if tests should be executed when building this image.
timeoutMinutes (integer) --
The maximum time in minutes that tests are permitted to run.
schedule (dict) --
The schedule of the image pipeline.
scheduleExpression (string) --
The expression determines how often EC2 Image Builder evaluates your pipelineExecutionStartCondition .
pipelineExecutionStartCondition (string) --
The condition configures when the pipeline should trigger a new image build. When the pipelineExecutionStartCondition is set to EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE , EC2 Image Builder will build a new image only when there are known changes pending. When it is set to EXPRESSION_MATCH_ONLY , it will build a new image every time the CRON expression matches the current time.
status (string) --
The status of the image pipeline.
dateCreated (string) --
The date on which this image pipeline was created.
dateUpdated (string) --
The date on which this image pipeline was last updated.
dateLastRun (string) --
The date on which this image pipeline was last run.
dateNextRun (string) --
The date on which this image pipeline will next be run.
tags (dict) --
The tags of this image pipeline.
(string) --
(string) --
nextToken (string) --
The next token used for paginated responses. When this is not empty, there are additional elements that the service has not included in this request. Use this token with the next request to retrieve additional objects.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidPaginationTokenException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'imagePipelineList': [
{
'arn': 'string',
'name': 'string',
'description': 'string',
'platform': 'Windows'|'Linux',
'enhancedImageMetadataEnabled': True|False,
'imageRecipeArn': 'string',
'infrastructureConfigurationArn': 'string',
'distributionConfigurationArn': 'string',
'imageTestsConfiguration': {
'imageTestsEnabled': True|False,
'timeoutMinutes': 123
},
'schedule': {
'scheduleExpression': 'string',
'pipelineExecutionStartCondition': 'EXPRESSION_MATCH_ONLY'|'EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE'
},
'status': 'DISABLED'|'ENABLED',
'dateCreated': 'string',
'dateUpdated': 'string',
'dateLastRun': 'string',
'dateNextRun': 'string',
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def list_image_recipes(owner=None, filters=None, maxResults=None, nextToken=None):
"""
Returns a list of image recipes.
See also: AWS API Documentation
Exceptions
:example: response = client.list_image_recipes(
owner='Self'|'Shared'|'Amazon',
filters=[
{
'name': 'string',
'values': [
'string',
]
},
],
maxResults=123,
nextToken='string'
)
:type owner: string
:param owner: The owner defines which image recipes you want to list. By default, this request will only show image recipes owned by your account. You can use this field to specify if you want to view image recipes owned by yourself, by Amazon, or those image recipes that have been shared with you by other customers.
:type filters: list
:param filters: The filters.\n\n(dict) --A filter name and value pair that is used to return a more specific list of results from a list operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs.\n\nname (string) --The name of the filter. Filter names are case-sensitive.\n\nvalues (list) --The filter values. Filter values are case-sensitive.\n\n(string) --\n\n\n\n\n\n
:type maxResults: integer
:param maxResults: The maximum items to return in a request.
:type nextToken: string
:param nextToken: A token to specify where to start paginating. This is the NextToken from a previously truncated response.
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'imageRecipeSummaryList': [
{
'arn': 'string',
'name': 'string',
'platform': 'Windows'|'Linux',
'owner': 'string',
'parentImage': 'string',
'dateCreated': 'string',
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
imageRecipeSummaryList (list) --
The list of image pipelines.
(dict) --
A summary of an image recipe.
arn (string) --
The Amazon Resource Name (ARN) of the image recipe.
name (string) --
The name of the image recipe.
platform (string) --
The platform of the image recipe.
owner (string) --
The owner of the image recipe.
parentImage (string) --
The parent image of the image recipe.
dateCreated (string) --
The date on which this image recipe was created.
tags (dict) --
The tags of the image recipe.
(string) --
(string) --
nextToken (string) --
The next token used for paginated responses. When this is not empty, there are additional elements that the service has not included in this request. Use this token with the next request to retrieve additional objects.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidPaginationTokenException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'imageRecipeSummaryList': [
{
'arn': 'string',
'name': 'string',
'platform': 'Windows'|'Linux',
'owner': 'string',
'parentImage': 'string',
'dateCreated': 'string',
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def list_images(owner=None, filters=None, maxResults=None, nextToken=None):
"""
Returns the list of image build versions for the specified semantic version.
See also: AWS API Documentation
Exceptions
:example: response = client.list_images(
owner='Self'|'Shared'|'Amazon',
filters=[
{
'name': 'string',
'values': [
'string',
]
},
],
maxResults=123,
nextToken='string'
)
:type owner: string
:param owner: The owner defines which images you want to list. By default, this request will only show images owned by your account. You can use this field to specify if you want to view images owned by yourself, by Amazon, or those images that have been shared with you by other customers.
:type filters: list
:param filters: The filters.\n\n(dict) --A filter name and value pair that is used to return a more specific list of results from a list operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs.\n\nname (string) --The name of the filter. Filter names are case-sensitive.\n\nvalues (list) --The filter values. Filter values are case-sensitive.\n\n(string) --\n\n\n\n\n\n
:type maxResults: integer
:param maxResults: The maximum items to return in a request.
:type nextToken: string
:param nextToken: A token to specify where to start paginating. This is the NextToken from a previously truncated response.
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'imageVersionList': [
{
'arn': 'string',
'name': 'string',
'version': 'string',
'platform': 'Windows'|'Linux',
'osVersion': 'string',
'owner': 'string',
'dateCreated': 'string'
},
],
'nextToken': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
imageVersionList (list) --
The list of image semantic versions.
(dict) --
An image semantic version.
arn (string) --
The Amazon Resource Name (ARN) of the image semantic version.
name (string) --
The name of the image semantic version.
version (string) --
The semantic version of the image semantic version.
platform (string) --
The platform of the image semantic version.
osVersion (string) --
The operating system version of the instance. For example, Amazon Linux 2, Ubuntu 18, or Microsoft Windows Server 2019.
owner (string) --
The owner of the image semantic version.
dateCreated (string) --
The date at which this image semantic version was created.
nextToken (string) --
The next token used for paginated responses. When this is not empty, there are additional elements that the service has not included in this request. Use this token with the next request to retrieve additional objects.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidPaginationTokenException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'imageVersionList': [
{
'arn': 'string',
'name': 'string',
'version': 'string',
'platform': 'Windows'|'Linux',
'osVersion': 'string',
'owner': 'string',
'dateCreated': 'string'
},
],
'nextToken': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidPaginationTokenException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
"""
pass
def list_infrastructure_configurations(filters=None, maxResults=None, nextToken=None):
"""
Returns a list of infrastructure configurations.
See also: AWS API Documentation
Exceptions
:example: response = client.list_infrastructure_configurations(
filters=[
{
'name': 'string',
'values': [
'string',
]
},
],
maxResults=123,
nextToken='string'
)
:type filters: list
:param filters: The filters.\n\n(dict) --A filter name and value pair that is used to return a more specific list of results from a list operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs.\n\nname (string) --The name of the filter. Filter names are case-sensitive.\n\nvalues (list) --The filter values. Filter values are case-sensitive.\n\n(string) --\n\n\n\n\n\n
:type maxResults: integer
:param maxResults: The maximum items to return in a request.
:type nextToken: string
:param nextToken: A token to specify where to start paginating. This is the NextToken from a previously truncated response.
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'infrastructureConfigurationSummaryList': [
{
'arn': 'string',
'name': 'string',
'description': 'string',
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
infrastructureConfigurationSummaryList (list) --
The list of infrastructure configurations.
(dict) --
The infrastructure used when building EC2 AMIs.
arn (string) --
The Amazon Resource Name (ARN) of the infrastructure configuration.
name (string) --
The name of the infrastructure configuration.
description (string) --
The description of the infrastructure configuration.
dateCreated (string) --
The date on which the infrastructure configuration was created.
dateUpdated (string) --
The date on which the infrastructure configuration was last updated.
tags (dict) --
The tags of the infrastructure configuration.
(string) --
(string) --
nextToken (string) --
The next token used for paginated responses. When this is not empty, there are additional elements that the service has not included in this request. Use this token with the next request to retrieve additional objects.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidPaginationTokenException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'infrastructureConfigurationSummaryList': [
{
'arn': 'string',
'name': 'string',
'description': 'string',
'dateCreated': 'string',
'dateUpdated': 'string',
'tags': {
'string': 'string'
}
},
],
'nextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def list_tags_for_resource(resourceArn=None):
"""
Returns the list of tags for the specified resource.
See also: AWS API Documentation
Exceptions
:example: response = client.list_tags_for_resource(
resourceArn='string'
)
:type resourceArn: string
:param resourceArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the resource whose tags you want to retrieve.\n
:rtype: dict
ReturnsResponse Syntax{
'tags': {
'string': 'string'
}
}
Response Structure
(dict) --
tags (dict) --The tags for the specified resource.
(string) --
(string) --
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.InvalidParameterException
imagebuilder.Client.exceptions.ResourceNotFoundException
:return: {
'tags': {
'string': 'string'
}
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.InvalidParameterException
imagebuilder.Client.exceptions.ResourceNotFoundException
"""
pass
def put_component_policy(componentArn=None, policy=None):
"""
Applies a policy to a component. We recommend that you call the RAM API CreateResourceShare to share resources. If you call the Image Builder API PutComponentPolicy , you must also call the RAM API PromoteResourceShareCreatedFromPolicy in order for the resource to be visible to all principals with whom the resource is shared.
See also: AWS API Documentation
Exceptions
:example: response = client.put_component_policy(
componentArn='string',
policy='string'
)
:type componentArn: string
:param componentArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the component that this policy should be applied to.\n
:type policy: string
:param policy: [REQUIRED]\nThe policy to apply.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'componentArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
componentArn (string) --
The Amazon Resource Name (ARN) of the component that this policy was applied to.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidParameterValueException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'componentArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidParameterValueException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
"""
pass
def put_image_policy(imageArn=None, policy=None):
"""
Applies a policy to an image. We recommend that you call the RAM API CreateResourceShare to share resources. If you call the Image Builder API PutImagePolicy , you must also call the RAM API PromoteResourceShareCreatedFromPolicy in order for the resource to be visible to all principals with whom the resource is shared.
See also: AWS API Documentation
Exceptions
:example: response = client.put_image_policy(
imageArn='string',
policy='string'
)
:type imageArn: string
:param imageArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image that this policy should be applied to.\n
:type policy: string
:param policy: [REQUIRED]\nThe policy to apply.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'imageArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
imageArn (string) --
The Amazon Resource Name (ARN) of the image that this policy was applied to.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidParameterValueException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'imageArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidParameterValueException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
"""
pass
def put_image_recipe_policy(imageRecipeArn=None, policy=None):
"""
Applies a policy to an image recipe. We recommend that you call the RAM API CreateResourceShare to share resources. If you call the Image Builder API PutImageRecipePolicy , you must also call the RAM API PromoteResourceShareCreatedFromPolicy in order for the resource to be visible to all principals with whom the resource is shared.
See also: AWS API Documentation
Exceptions
:example: response = client.put_image_recipe_policy(
imageRecipeArn='string',
policy='string'
)
:type imageRecipeArn: string
:param imageRecipeArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image recipe that this policy should be applied to.\n
:type policy: string
:param policy: [REQUIRED]\nThe policy to apply.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'imageRecipeArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
imageRecipeArn (string) --
The Amazon Resource Name (ARN) of the image recipe that this policy was applied to.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidParameterValueException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
:return: {
'requestId': 'string',
'imageRecipeArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.InvalidParameterValueException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
"""
pass
def start_image_pipeline_execution(imagePipelineArn=None, clientToken=None):
"""
Manually triggers a pipeline to create an image.
See also: AWS API Documentation
Exceptions
:example: response = client.start_image_pipeline_execution(
imagePipelineArn='string',
clientToken='string'
)
:type imagePipelineArn: string
:param imagePipelineArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image pipeline that you want to manually invoke.\n
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token used to make this request idempotent.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'imageBuildVersionArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
imageBuildVersionArn (string) --
The Amazon Resource Name (ARN) of the image that was created by this request.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
:return: {
'requestId': 'string',
'clientToken': 'string',
'imageBuildVersionArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.ResourceNotFoundException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
"""
pass
def tag_resource(resourceArn=None, tags=None):
"""
Adds a tag to a resource.
See also: AWS API Documentation
Exceptions
:example: response = client.tag_resource(
resourceArn='string',
tags={
'string': 'string'
}
)
:type resourceArn: string
:param resourceArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the resource that you want to tag.\n
:type tags: dict
:param tags: [REQUIRED]\nThe tags to apply to the resource.\n\n(string) --\n(string) --\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.InvalidParameterException
imagebuilder.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def untag_resource(resourceArn=None, tagKeys=None):
"""
Removes a tag from a resource.
See also: AWS API Documentation
Exceptions
:example: response = client.untag_resource(
resourceArn='string',
tagKeys=[
'string',
]
)
:type resourceArn: string
:param resourceArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the resource that you want to untag.\n
:type tagKeys: list
:param tagKeys: [REQUIRED]\nThe tag keys to remove from the resource.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.InvalidParameterException
imagebuilder.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def update_distribution_configuration(distributionConfigurationArn=None, description=None, distributions=None, clientToken=None):
"""
Updates a new distribution configuration. Distribution configurations define and configure the outputs of your pipeline.
See also: AWS API Documentation
Exceptions
:example: response = client.update_distribution_configuration(
distributionConfigurationArn='string',
description='string',
distributions=[
{
'region': 'string',
'amiDistributionConfiguration': {
'name': 'string',
'description': 'string',
'amiTags': {
'string': 'string'
},
'launchPermission': {
'userIds': [
'string',
],
'userGroups': [
'string',
]
}
},
'licenseConfigurationArns': [
'string',
]
},
],
clientToken='string'
)
:type distributionConfigurationArn: string
:param distributionConfigurationArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the distribution configuration that you want to update.\n
:type description: string
:param description: The description of the distribution configuration.
:type distributions: list
:param distributions: [REQUIRED]\nThe distributions of the distribution configuration.\n\n(dict) --Defines the settings for a specific Region.\n\nregion (string) -- [REQUIRED]The target Region.\n\namiDistributionConfiguration (dict) --The specific AMI settings (for example, launch permissions, AMI tags).\n\nname (string) --The name of the distribution configuration.\n\ndescription (string) --The description of the distribution configuration.\n\namiTags (dict) --The tags to apply to AMIs distributed to this Region.\n\n(string) --\n(string) --\n\n\n\n\nlaunchPermission (dict) --Launch permissions can be used to configure which AWS accounts can use the AMI to launch instances.\n\nuserIds (list) --The AWS account ID.\n\n(string) --\n\n\nuserGroups (list) --The name of the group.\n\n(string) --\n\n\n\n\n\n\nlicenseConfigurationArns (list) --The License Manager Configuration to associate with the AMI in the specified Region.\n\n(string) --\n\n\n\n\n\n
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token of the distribution configuration.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'distributionConfigurationArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
distributionConfigurationArn (string) --
The Amazon Resource Name (ARN) of the distribution configuration that was updated by this request.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.InvalidParameterCombinationException
:return: {
'requestId': 'string',
'clientToken': 'string',
'distributionConfigurationArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
imagebuilder.Client.exceptions.InvalidParameterCombinationException
"""
pass
def update_image_pipeline(imagePipelineArn=None, description=None, imageRecipeArn=None, infrastructureConfigurationArn=None, distributionConfigurationArn=None, imageTestsConfiguration=None, enhancedImageMetadataEnabled=None, schedule=None, status=None, clientToken=None):
"""
Updates a new image pipeline. Image pipelines enable you to automate the creation and distribution of images.
See also: AWS API Documentation
Exceptions
:example: response = client.update_image_pipeline(
imagePipelineArn='string',
description='string',
imageRecipeArn='string',
infrastructureConfigurationArn='string',
distributionConfigurationArn='string',
imageTestsConfiguration={
'imageTestsEnabled': True|False,
'timeoutMinutes': 123
},
enhancedImageMetadataEnabled=True|False,
schedule={
'scheduleExpression': 'string',
'pipelineExecutionStartCondition': 'EXPRESSION_MATCH_ONLY'|'EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE'
},
status='DISABLED'|'ENABLED',
clientToken='string'
)
:type imagePipelineArn: string
:param imagePipelineArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image pipeline that you want to update.\n
:type description: string
:param description: The description of the image pipeline.
:type imageRecipeArn: string
:param imageRecipeArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the image recipe that will be used to configure images updated by this image pipeline.\n
:type infrastructureConfigurationArn: string
:param infrastructureConfigurationArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the infrastructure configuration that will be used to build images updated by this image pipeline.\n
:type distributionConfigurationArn: string
:param distributionConfigurationArn: The Amazon Resource Name (ARN) of the distribution configuration that will be used to configure and distribute images updated by this image pipeline.
:type imageTestsConfiguration: dict
:param imageTestsConfiguration: The image test configuration of the image pipeline.\n\nimageTestsEnabled (boolean) --Defines if tests should be executed when building this image.\n\ntimeoutMinutes (integer) --The maximum time in minutes that tests are permitted to run.\n\n\n
:type enhancedImageMetadataEnabled: boolean
:param enhancedImageMetadataEnabled: Collects additional information about the image being created, including the operating system (OS) version and package list. This information is used to enhance the overall experience of using EC2 Image Builder. Enabled by default.
:type schedule: dict
:param schedule: The schedule of the image pipeline.\n\nscheduleExpression (string) --The expression determines how often EC2 Image Builder evaluates your pipelineExecutionStartCondition .\n\npipelineExecutionStartCondition (string) --The condition configures when the pipeline should trigger a new image build. When the pipelineExecutionStartCondition is set to EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE , EC2 Image Builder will build a new image only when there are known changes pending. When it is set to EXPRESSION_MATCH_ONLY , it will build a new image every time the CRON expression matches the current time.\n\n\n
:type status: string
:param status: The status of the image pipeline.
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token used to make this request idempotent.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'imagePipelineArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
imagePipelineArn (string) --
The Amazon Resource Name (ARN) of the image pipeline that was updated by this request.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
:return: {
'requestId': 'string',
'clientToken': 'string',
'imagePipelineArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
"""
pass
def update_infrastructure_configuration(infrastructureConfigurationArn=None, description=None, instanceTypes=None, instanceProfileName=None, securityGroupIds=None, subnetId=None, logging=None, keyPair=None, terminateInstanceOnFailure=None, snsTopicArn=None, clientToken=None):
"""
Updates a new infrastructure configuration. An infrastructure configuration defines the environment in which your image will be built and tested.
See also: AWS API Documentation
Exceptions
:example: response = client.update_infrastructure_configuration(
infrastructureConfigurationArn='string',
description='string',
instanceTypes=[
'string',
],
instanceProfileName='string',
securityGroupIds=[
'string',
],
subnetId='string',
logging={
's3Logs': {
's3BucketName': 'string',
's3KeyPrefix': 'string'
}
},
keyPair='string',
terminateInstanceOnFailure=True|False,
snsTopicArn='string',
clientToken='string'
)
:type infrastructureConfigurationArn: string
:param infrastructureConfigurationArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the infrastructure configuration that you want to update.\n
:type description: string
:param description: The description of the infrastructure configuration.
:type instanceTypes: list
:param instanceTypes: The instance types of the infrastructure configuration. You can specify one or more instance types to use for this build. The service will pick one of these instance types based on availability.\n\n(string) --\n\n
:type instanceProfileName: string
:param instanceProfileName: [REQUIRED]\nThe instance profile to associate with the instance used to customize your EC2 AMI.\n
:type securityGroupIds: list
:param securityGroupIds: The security group IDs to associate with the instance used to customize your EC2 AMI.\n\n(string) --\n\n
:type subnetId: string
:param subnetId: The subnet ID to place the instance used to customize your EC2 AMI in.
:type logging: dict
:param logging: The logging configuration of the infrastructure configuration.\n\ns3Logs (dict) --The Amazon S3 logging configuration.\n\ns3BucketName (string) --The Amazon S3 bucket in which to store the logs.\n\ns3KeyPrefix (string) --The Amazon S3 path in which to store the logs.\n\n\n\n\n
:type keyPair: string
:param keyPair: The key pair of the infrastructure configuration. This can be used to log on to and debug the instance used to create your image.
:type terminateInstanceOnFailure: boolean
:param terminateInstanceOnFailure: The terminate instance on failure setting of the infrastructure configuration. Set to false if you want Image Builder to retain the instance used to configure your AMI if the build or test phase of your workflow fails.
:type snsTopicArn: string
:param snsTopicArn: The SNS topic on which to send image build events.
:type clientToken: string
:param clientToken: [REQUIRED]\nThe idempotency token used to make this request idempotent.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax
{
'requestId': 'string',
'clientToken': 'string',
'infrastructureConfigurationArn': 'string'
}
Response Structure
(dict) --
requestId (string) --
The request ID that uniquely identifies this request.
clientToken (string) --
The idempotency token used to make this request idempotent.
infrastructureConfigurationArn (string) --
The Amazon Resource Name (ARN) of the infrastructure configuration that was updated by this request.
Exceptions
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
:return: {
'requestId': 'string',
'clientToken': 'string',
'infrastructureConfigurationArn': 'string'
}
:returns:
imagebuilder.Client.exceptions.ServiceException
imagebuilder.Client.exceptions.ClientException
imagebuilder.Client.exceptions.ServiceUnavailableException
imagebuilder.Client.exceptions.InvalidRequestException
imagebuilder.Client.exceptions.IdempotentParameterMismatchException
imagebuilder.Client.exceptions.ForbiddenException
imagebuilder.Client.exceptions.CallRateLimitExceededException
imagebuilder.Client.exceptions.ResourceInUseException
"""
pass
| 29.984993 | 975 | 0.667082 | 15,654 | 159,850 | 6.795643 | 0.042737 | 0.076989 | 0.11976 | 0.017569 | 0.935908 | 0.923679 | 0.917108 | 0.910161 | 0.906514 | 0.896352 | 0 | 0.002308 | 0.243916 | 159,850 | 5,330 | 976 | 29.990619 | 0.877875 | 0.963103 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0.5 | 0.01087 | 0 | 0.51087 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 10 |
577a8d500a8c3772f8091fa52135dca22761e661 | 39,162 | py | Python | tests/test_historical.py | hy3440/nempy | ffc6c3e1a0becde8cbf6ba56d5885768dc1c0a37 | [
"BSD-3-Clause"
] | 24 | 2020-05-16T11:46:25.000Z | 2022-03-29T22:25:09.000Z | tests/test_historical.py | hy3440/nempy | ffc6c3e1a0becde8cbf6ba56d5885768dc1c0a37 | [
"BSD-3-Clause"
] | 6 | 2020-11-17T22:37:35.000Z | 2022-03-03T00:11:08.000Z | tests/test_historical.py | hy3440/nempy | ffc6c3e1a0becde8cbf6ba56d5885768dc1c0a37 | [
"BSD-3-Clause"
] | 12 | 2020-04-30T09:42:22.000Z | 2022-03-06T23:45:08.000Z | import sqlite3
import pandas as pd
from pandas.testing import assert_frame_equal
from datetime import datetime, timedelta
import random
import pickle
from nempy.historical_inputs import loaders, xml_cache, mms_db, units, \
interconnectors, constraints, demand
from tests import historical_market_builder
# These tests require some additional clean up and will probably not run on your machine. ##############################
def get_test_intervals(number=100):
start_time = datetime(year=2019, month=1, day=1, hour=0, minute=0)
end_time = datetime(year=2019, month=12, day=31, hour=0, minute=0)
difference = end_time - start_time
difference_in_5_min_intervals = difference.days * 12 * 24
random.seed(2)
intervals = random.sample(range(1, difference_in_5_min_intervals), number)
times = [start_time + timedelta(minutes=5 * i) for i in intervals]
times_formatted = [t.isoformat().replace('T', ' ').replace('-', '/') for t in times]
return times_formatted
def get_test_intervals_august_2020(number=100):
start_time = datetime(year=2020, month=8, day=1, hour=0, minute=0)
end_time = datetime(year=2020, month=8, day=31, hour=0, minute=0)
difference = end_time - start_time
difference_in_5_min_intervals = difference.days * 12 * 24
random.seed(2)
intervals = random.sample(range(1, difference_in_5_min_intervals), number)
times = [start_time + timedelta(minutes=5 * i) for i in intervals]
times_formatted = [t.isoformat().replace('T', ' ').replace('-', '/') for t in times]
return times_formatted
def test_ramp_rate_constraints():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
for interval in get_test_intervals(number=10):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.set_ramp_rate_limits()
market = market_builder.get_market_object()
market_overrider = historical_market_builder.MarketOverrider(market=market,
mms_db=mms_database,
interval=interval)
market_overrider.set_unit_dispatch_to_historical_values()
market_builder.dispatch()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache_manager,
interval=interval)
assert market_checker.measured_violation_equals_historical_violation(historical_name='ramp_rate',
nempy_constraints=['ramp_up', 'ramp_down'])
def test_ramp_rate_constraints_where_constraints_violated():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
with open('interval_with_violations.pickle', 'rb') as f:
interval_with_violations = pickle.load(f)
tests_to_run = 55
tests_run = 0
for interval, types in interval_with_violations.items():
if tests_run == tests_to_run:
break
if 'ramp_rate' in types:
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.set_ramp_rate_limits()
market = market_builder.get_market_object()
market_overrider = historical_market_builder.MarketOverrider(market=market,
mms_db=mms_database,
interval=interval)
market_overrider.set_unit_dispatch_to_historical_values()
market_builder.dispatch()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache_manager,
interval=interval)
assert market_checker.measured_violation_equals_historical_violation(historical_name='ramp_rate',
nempy_constraints=['ramp_up',
'ramp_down'])
tests_run += 1
assert tests_to_run == tests_run
def test_fast_start_constraints():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
for interval in get_test_intervals(number=10):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.set_fast_start_constraints()
market = market_builder.get_market_object()
market_overrider = historical_market_builder.MarketOverrider(market=market,
mms_db=mms_database,
interval=interval)
market_overrider.set_unit_dispatch_to_historical_values()
market_builder.dispatch()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache_manager,
interval=interval)
assert market_checker.measured_violation_equals_historical_violation('fast_start',
nempy_constraints=['fast_start'])
def test_fast_start_constraints_where_constraints_violated():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
with open('interval_with_violations.pickle', 'rb') as f:
interval_with_violations = pickle.load(f)
tests_to_run = 11
tests_run = 0
for interval, types in interval_with_violations.items():
if tests_run == tests_to_run:
break
if 'fast_start' in types:
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.set_fast_start_constraints()
market = market_builder.get_market_object()
market_overrider = historical_market_builder.MarketOverrider(market=market,
mms_db=mms_database,
interval=interval)
market_overrider.set_unit_dispatch_to_historical_values()
market_builder.dispatch()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache_manager,
interval=interval)
assert market_checker.measured_violation_equals_historical_violation('fast_start',
nempy_constraints=[
'fast_start'])
tests_run += 1
assert tests_to_run == tests_run
def test_capacity_constraints():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
for interval in get_test_intervals(number=10):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.add_interconnectors_to_market()
market_builder.set_unit_limit_constraints()
market = market_builder.get_market_object()
market_overrider = historical_market_builder.MarketOverrider(market=market,
mms_db=mms_database,
interval=interval)
market_overrider.set_unit_dispatch_to_historical_values()
market_overrider.set_interconnector_flow_to_historical_values()
market_builder.dispatch()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache_manager,
interval=interval)
assert market_checker.measured_violation_equals_historical_violation('unit_capacity',
nempy_constraints=['unit_bid_capacity'])
def test_capacity_constraint_where_constraints_violated():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
with open('interval_with_violations.pickle', 'rb') as f:
interval_with_violations = pickle.load(f)
tests_to_run = 10
tests_run = 0
for interval, types in interval_with_violations.items():
if tests_run == tests_to_run:
break
if 'unit_capacity' in types:
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.add_interconnectors_to_market()
market_builder.set_unit_limit_constraints()
market = market_builder.get_market_object()
market_overrider = historical_market_builder.MarketOverrider(market=market,
mms_db=mms_database,
interval=interval)
market_overrider.set_unit_dispatch_to_historical_values()
market_overrider.set_interconnector_flow_to_historical_values()
market_builder.dispatch()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache_manager,
interval=interval)
assert market_checker.measured_violation_equals_historical_violation('unit_capacity',
nempy_constraints=[
'unit_bid_capacity'])
tests_run += 1
assert tests_to_run == tests_run
def ignore_test_fcas_trapezium_scaled_availability():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms_august_2020.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache_august_2020')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
for interval in get_test_intervals_august_2020(number=10):
if interval != '2020/08/21 13:00:00':
continue
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.set_unit_fcas_constraints()
market_builder.set_unit_limit_constraints()
market = market_builder.get_market_object()
market_overrider = historical_market_builder.MarketOverrider(market=market,
mms_db=mms_database,
interval=interval)
market_overrider.set_unit_dispatch_to_historical_values()
market_builder.dispatch()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache,
interval=interval,
unit_inputs=unit_inputs)
avails = market_checker.do_fcas_availabilities_match_historical()
# I think NEMDE might be getting avail calcs wrong when units are operating on the slopes, and the slopes
# are vertical. They should be ignore 0 slope coefficients, maybe this is not happening because of floating
# point comparison.
if interval == '2019/01/29 18:10:00':
avails = avails[~(avails['unit'] == 'PPCCGT')]
if interval == '2019/01/07 19:35:00':
avails = avails[~(avails['unit'] == 'PPCCGT')]
#assert avails['error'].abs().max() < 1.1
def ignore_test_find_fcas_trapezium_scaled_availability_erros():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms_august_2020.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache_august_2020')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
outputs = []
for interval in get_test_intervals_august_2020(number=100):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
unit_inputs.get_processed_bids()
unit_inputs.add_fcas_trapezium_constraints()
traps = unit_inputs.get_fcas_regulation_trapeziums()
traps = traps[traps['service'] == 'lower_reg']
avails = mms_database.DISPATCHLOAD.get_data(interval)
avails = avails.loc[:, ['DUID', 'TOTALCLEARED', 'LOWERREG', 'LOWERREGACTUALAVAILABILITY']]
avails.columns = ['unit', 'total_cleared', 'lower_reg', 'lower_reg_actual_availability']
avails = avails[avails['lower_reg'] > avails['lower_reg_actual_availability'] + 0.1]
avails = pd.merge(avails, traps, on='unit')
avails['time'] = interval
outputs.append(avails)
pd.concat(outputs).to_csv('avails_august_2020.csv')
def test_all_units_and_service_dispatch_historically_present_in_market():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
for interval in get_test_intervals(number=1000):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market = market_builder.get_market_object()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache,
interval=interval)
assert market_checker.all_dispatch_units_and_services_have_decision_variables()
def test_slack_in_generic_constraints():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
for interval in get_test_intervals(number=100):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.add_interconnectors_to_market()
market_builder.add_generic_constraints()
market_builder.set_unit_fcas_constraints()
market_builder.set_unit_limit_constraints()
market_builder.set_region_demand_constraints()
market_builder.set_ramp_rate_limits()
market_builder.set_fast_start_constraints()
market_builder.set_solver('CBC')
market_builder.dispatch(calc_prices=True)
market = market_builder.get_market_object()
market_overrider = historical_market_builder.MarketOverrider(market=market,
mms_db=mms_database,
interval=interval)
market_overrider.set_unit_dispatch_to_historical_values()
market_overrider.set_interconnector_flow_to_historical_values()
market_builder.dispatch()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache,
interval=interval)
assert market_checker.is_generic_constraint_slack_correct()
assert market_checker.is_regional_demand_meet()
def test_slack_in_generic_constraints_with_fcas_interface():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
for interval in get_test_intervals(number=100):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.add_interconnectors_to_market()
market_builder.add_generic_constraints_with_fcas_requirements_interface()
market_builder.set_unit_fcas_constraints()
market_builder.set_unit_limit_constraints()
market_builder.set_region_demand_constraints()
market_builder.set_ramp_rate_limits()
market_builder.set_fast_start_constraints()
market_builder.set_solver('CBC')
market_builder.dispatch(calc_prices=True)
market = market_builder.get_market_object()
market_overrider = historical_market_builder.MarketOverrider(market=market,
mms_db=mms_database,
interval=interval)
market_overrider.set_unit_dispatch_to_historical_values()
market_overrider.set_interconnector_flow_to_historical_values()
market_builder.dispatch()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache,
interval=interval)
assert market_checker.is_generic_constraint_slack_correct()
assert market_checker.is_fcas_constraint_slack_correct()
assert market_checker.is_regional_demand_meet()
def test_hist_dispatch_values_meet_demand():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
for interval in get_test_intervals(number=100):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.add_interconnectors_to_market()
market = market_builder.get_market_object()
market_overrider = historical_market_builder.MarketOverrider(market=market,
mms_db=mms_database,
interval=interval)
market_overrider.set_unit_dispatch_to_historical_values()
market_overrider.set_interconnector_flow_to_historical_values()
market_builder.dispatch()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache,
interval=interval)
test_passed = market_checker.is_regional_demand_meet()
assert test_passed
con.close()
def test_against_10_interval_benchmark():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
outputs = []
for interval in get_test_intervals(number=10):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.add_interconnectors_to_market()
market_builder.add_generic_constraints_with_fcas_requirements_interface()
market_builder.set_unit_fcas_constraints()
market_builder.set_unit_limit_constraints()
market_builder.set_region_demand_constraints()
market_builder.set_ramp_rate_limits()
market_builder.set_fast_start_constraints()
market_builder.set_solver('GUROBI')
market_builder.dispatch(calc_prices=True)
market = market_builder.get_market_object()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache,
interval=interval)
price_comp = market_checker.get_price_comparison()
outputs.append(price_comp)
outputs = pd.concat(outputs)
outputs.to_csv('latest_10_interval_run.csv', index=False)
benchmark = pd.read_csv('10_interval_benchmark.csv')
assert_frame_equal(outputs.reset_index(drop=True), benchmark, check_exact=False, atol=1e-2)
def test_against_100_interval_benchmark():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
outputs = []
for interval in get_test_intervals(number=100):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.add_interconnectors_to_market()
market_builder.add_generic_constraints_with_fcas_requirements_interface()
market_builder.set_unit_fcas_constraints()
market_builder.set_unit_limit_constraints()
market_builder.set_region_demand_constraints()
market_builder.set_ramp_rate_limits()
market_builder.set_fast_start_constraints()
market_builder.set_solver('GUROBI')
market_builder.dispatch(calc_prices=True)
market = market_builder.get_market_object()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache,
interval=interval)
price_comp = market_checker.get_price_comparison()
outputs.append(price_comp)
outputs = pd.concat(outputs)
outputs.to_csv('latest_100_interval_run.csv', index=False)
benchmark = pd.read_csv('100_interval_benchmark.csv')
assert_frame_equal(outputs.reset_index(drop=True), benchmark, check_exact=False, atol=1e-2)
def test_against_1000_interval_benchmark():
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_database = mms_db.DBManager(con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
raw_inputs_loader = loaders.RawInputsLoader(nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_database)
outputs = []
for interval in get_test_intervals(number=1000):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
market_builder = historical_market_builder.SpotMarketBuilder(unit_inputs=unit_inputs,
interconnector_inputs=interconnector_inputs,
constraint_inputs=constraint_inputs,
demand_inputs=demand_inputs)
market_builder.add_unit_bids_to_market()
market_builder.add_interconnectors_to_market()
market_builder.add_generic_constraints_with_fcas_requirements_interface()
market_builder.set_unit_fcas_constraints()
market_builder.set_unit_limit_constraints()
market_builder.set_region_demand_constraints()
market_builder.set_ramp_rate_limits()
market_builder.set_fast_start_constraints()
market_builder.dispatch(calc_prices=True)
market = market_builder.get_market_object()
market_checker = historical_market_builder.MarketChecker(market=market,
mms_db=mms_database,
xml_cache=xml_cache,
interval=interval)
price_comp = market_checker.get_price_comparison()
outputs.append(price_comp)
outputs = pd.concat(outputs)
outputs.to_csv('latest_1000_interval_run.csv', index=False)
benchmark = pd.read_csv('1000_interval_benchmark.csv')
assert_frame_equal(outputs.reset_index(drop=True), benchmark.reset_index(drop=True), check_less_precise=3)
| 57.846381 | 122 | 0.6098 | 3,719 | 39,162 | 5.967733 | 0.071525 | 0.085519 | 0.0588 | 0.026674 | 0.928089 | 0.924214 | 0.916374 | 0.913175 | 0.913175 | 0.906326 | 0 | 0.009962 | 0.33101 | 39,162 | 676 | 123 | 57.931953 | 0.837169 | 0.00909 | 0 | 0.859745 | 0 | 0 | 0.066546 | 0.0569 | 0 | 0 | 0 | 0 | 0.03643 | 1 | 0.030965 | false | 0.003643 | 0.014572 | 0 | 0.04918 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
57ad691337fc6be9b00f0da3ab0adeafc11a4408 | 222 | py | Python | examples/bruteforce.py | abailie3/TravelingIntelligence | 33f77fbfecf5d7fc34dbb3db7230f4e14d4a9469 | [
"MIT"
] | 1 | 2018-03-14T11:28:32.000Z | 2018-03-14T11:28:32.000Z | examples/bruteforce.py | abailie3/TravelingIntelligence | 33f77fbfecf5d7fc34dbb3db7230f4e14d4a9469 | [
"MIT"
] | 1 | 2018-01-28T17:29:33.000Z | 2018-01-28T17:29:33.000Z | examples/bruteforce.py | abailie3/TravelingIntelligence | 33f77fbfecf5d7fc34dbb3db7230f4e14d4a9469 | [
"MIT"
] | null | null | null | from travelingintelligence.bruteforcemethod import BruteForce
from travelingintelligence.tsproblem import TSProblem
from travelingintelligence.tsvisualizer import ProblemVisualizer
# TODO: Add bruteforce method demo here. | 44.4 | 64 | 0.887387 | 21 | 222 | 9.380952 | 0.619048 | 0.380711 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085586 | 222 | 5 | 65 | 44.4 | 0.970443 | 0.171171 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
57f8ca96be4d88d72501ad09cff0c72347ce828d | 33,573 | py | Python | build/lib.macosx-10.9-x86_64-3.9/gators/imputers/tests/test_imputers.py | Aditya-Kapadiya/gators | d7c9967e3a8e304a601b6a92ad834d03d3e36338 | [
"Apache-2.0"
] | 4 | 2021-10-29T18:20:52.000Z | 2022-03-31T22:53:03.000Z | build/lib.macosx-10.9-x86_64-3.9/gators/imputers/tests/test_imputers.py | Aditya-Kapadiya/gators | d7c9967e3a8e304a601b6a92ad834d03d3e36338 | [
"Apache-2.0"
] | 1 | 2022-02-21T20:02:16.000Z | 2022-02-21T20:02:16.000Z | build/lib.macosx-10.9-x86_64-3.9/gators/imputers/tests/test_imputers.py | Aditya-Kapadiya/gators | d7c9967e3a8e304a601b6a92ad834d03d3e36338 | [
"Apache-2.0"
] | 5 | 2021-11-17T20:16:54.000Z | 2022-02-21T18:21:02.000Z | # License: Apache-2.0
import databricks.koalas as ks
import pandas as pd
import numpy as np
import pytest
from pandas.testing import assert_frame_equal
from gators.imputers.numerics_imputer import NumericsImputer
from gators.imputers.int_imputer import IntImputer
from gators.imputers.float_imputer import FloatImputer
from gators.imputers.object_imputer import ObjectImputer
ks.set_option('compute.default_index_type', 'distributed-sequence')
@pytest.fixture()
def data():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', None], 'F': ['a', 'a', 's', np.nan]})
X_int_expected = pd.DataFrame(
{'A': [0., 1., 1., -9.], 'B': [3., 4., 4., -9.]})
X_float_expected = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, 1.1], 'D': [2.1, 3.1, 4.1, 3.1]})
X_object_expected = pd.DataFrame(
{'E': ['q', 'w', 'w', 'MISSING'], 'F': ['a', 'a', 's', 'MISSING']})
obj_int = IntImputer(strategy='constant', value=-9).fit(X_int)
obj_float = FloatImputer(strategy='mean').fit(X_float)
obj_object = ObjectImputer(
strategy='constant', value='MISSING').fit(X_object)
X_dict = {
'int': X_int,
'float': X_float,
'object': X_object,
}
X_expected_dict = {
'int': X_int_expected,
'float': X_float_expected,
'object': X_object_expected,
}
objs_dict = {
'int': obj_int,
'float': obj_float,
'object': obj_object,
}
return objs_dict, X_dict, X_expected_dict
@pytest.fixture()
def data_num():
X_int = pd.DataFrame(
{'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]},
dtype=np.float32)
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]},
dtype=np.float32)
X_int_expected = pd.DataFrame(
{'A': [0., 1., 1., -9.], 'B': [3., 4., 4., -9.]},
dtype=np.float32)
X_float_expected = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, 1.1], 'D': [2.1, 3.1, 4.1, 3.1]},
dtype=np.float32)
obj_int = IntImputer(strategy='constant', value=-9).fit(X_int)
obj_float = FloatImputer(strategy='mean').fit(X_float)
X_dict = {
'int': X_int,
'float': X_float,
}
X_expected_dict = {
'int': X_int_expected,
'float': X_float_expected,
}
objs_dict = {
'int': obj_int,
'float': obj_float,
}
return objs_dict, X_dict, X_expected_dict
@pytest.fixture()
def data_no_missing():
X_int = pd.DataFrame({'A': [0, 1, 1, 8], 'B': [3, 4, 4, 8]}, dtype=int)
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, 9.], 'D': [2.1, 3.1, 4.1, 9.]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', 'x'], 'F': ['a', 'a', 's', 'x']})
obj_int = IntImputer(strategy='constant', value=-9).fit(X_int)
obj_float = FloatImputer(strategy='mean').fit(X_float)
obj_object = ObjectImputer(
strategy='constant', value='MISSING').fit(X_object)
X_dict = {
'int': X_int,
'float': X_float,
'object': X_object,
}
X_expected_dict = {
'int': X_int.copy(),
'float': X_float.copy(),
'object': X_object.copy(),
}
objs_dict = {
'int': obj_int,
'float': obj_float,
'object': obj_object,
}
return objs_dict, X_dict, X_expected_dict
@pytest.fixture
def data_full():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', np.nan], 'F': ['a', 'a', 's', None]})
X = pd.concat([X_int, X_float, X_object], axis=1)
X_expected = pd.DataFrame(
[[0.0, 3.0, 0.1, 2.1, 'q', 'a'],
[1.0, 4.0, 1.1, 3.1, 'w', 'a'],
[1.0, 4.0, 2.1, 4.1, 'w', 's'],
[-9.0, -9.0, 1.1, 3.1, 'w', 'a']],
columns=['A', 'B', 'C', 'D', 'E', 'F'],
)
obj_int = IntImputer(strategy='constant', value=-9).fit(X)
obj_float = FloatImputer(strategy='median').fit(X)
obj_object = ObjectImputer(strategy='most_frequent').fit(X)
objs_dict = {
'int': obj_int,
'float': obj_float,
'object': obj_object,
}
return objs_dict, X, X_expected
@pytest.fixture()
def data_ks():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', None], 'F': ['a', 'a', 's', np.nan]})
X_int_expected = pd.DataFrame(
{'A': [0., 1., 1., -9.], 'B': [3., 4., 4., -9.]})
X_float_expected = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, 1.1], 'D': [2.1, 3.1, 4.1, 3.1]})
X_object_expected = pd.DataFrame(
{'E': ['q', 'w', 'w', 'MISSING'], 'F': ['a', 'a', 's', 'MISSING']})
X_int_ks = ks.from_pandas(X_int)
X_float_ks = ks.from_pandas(X_float)
X_object_ks = ks.from_pandas(X_object)
obj_int = IntImputer(strategy='constant', value=-9).fit(X_int)
obj_float = FloatImputer(strategy='mean').fit(X_float)
obj_object = ObjectImputer(
strategy='constant', value='MISSING').fit(X_object)
X_dict = {
'int': X_int,
'float': X_float,
'object': X_object,
}
X_dict = {
'int': X_int_ks,
'float': X_float_ks,
'object': X_object_ks,
}
X_expected_dict = {
'int': X_int_expected,
'float': X_float_expected,
'object': X_object_expected,
}
objs_dict = {
'int': obj_int,
'float': obj_float,
'object': obj_object,
}
return objs_dict, X_dict, X_expected_dict
@pytest.fixture()
def data_num_ks():
X_int = ks.DataFrame(
{'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]},
dtype=np.float32)
X_float = ks.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]},
dtype=np.float32)
X_int_expected = pd.DataFrame(
{'A': [0., 1., 1., -9.], 'B': [3., 4., 4., -9.]},
dtype=np.float32)
X_float_expected = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, 1.1], 'D': [2.1, 3.1, 4.1, 3.1]},
dtype=np.float32)
obj_int = IntImputer(strategy='constant', value=-9).fit(X_int)
obj_float = FloatImputer(strategy='mean').fit(X_float)
X_dict = {
'int': X_int,
'float': X_float,
}
X_expected_dict = {
'int': X_int_expected,
'float': X_float_expected,
}
objs_dict = {
'int': obj_int,
'float': obj_float,
}
return objs_dict, X_dict, X_expected_dict
@pytest.fixture()
def data_no_missing_ks():
X_int = ks.DataFrame({'A': [0, 1, 1, 8], 'B': [3, 4, 4, 8]}, dtype=int)
X_float = ks.DataFrame(
{'C': [0.1, 1.1, 2.1, 9.], 'D': [2.1, 3.1, 4.1, 9.]})
X_object = ks.DataFrame(
{'E': ['q', 'w', 'w', 'x'], 'F': ['a', 'a', 's', 'x']})
obj_int = IntImputer(strategy='constant', value=-9).fit(X_int)
obj_float = FloatImputer(strategy='mean').fit(X_float)
obj_object = ObjectImputer(
strategy='constant', value='MISSING').fit(X_object)
X_dict = {
'int': X_int,
'float': X_float,
'object': X_object,
}
X_expected_dict = {
'int': X_int.to_pandas().copy(),
'float': X_float.to_pandas().copy(),
'object': X_object.to_pandas().copy(),
}
objs_dict = {
'int': obj_int,
'float': obj_float,
'object': obj_object,
}
return objs_dict, X_dict, X_expected_dict
@pytest.fixture
def data_full_ks():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', np.nan], 'F': ['a', 'a', 's', None]})
X = ks.from_pandas(pd.concat([X_int, X_float, X_object], axis=1))
X_expected = pd.DataFrame(
[[0.0, 3.0, 0.1, 2.1, 'q', 'a'],
[1.0, 4.0, 1.1, 3.1, 'w', 'a'],
[1.0, 4.0, 2.1, 4.1, 'w', 's'],
[-9.0, -9.0, 1.1, 3.1, 'w', 'a']],
columns=['A', 'B', 'C', 'D', 'E', 'F'],
)
obj_int = IntImputer(strategy='constant', value=-9).fit(X)
obj_float = FloatImputer(strategy='median').fit(X)
obj_object = ObjectImputer(strategy='most_frequent').fit(X)
objs_dict = {
'int': obj_int,
'float': obj_float,
'object': obj_object,
}
return objs_dict, X, X_expected
def test_int_pd(data):
objs_dict, X_dict, X_expected_dict = data
assert_frame_equal(
objs_dict['int'].transform(X_dict['int']), X_expected_dict['int'],
)
def test_float_pd(data):
objs_dict, X_dict, X_expected_dict = data
assert_frame_equal(
objs_dict['float'].transform(
X_dict['float']), X_expected_dict['float'],
)
def test_object_pd(data):
objs_dict, X_dict, X_expected_dict = data
assert_frame_equal(
objs_dict['object'].transform(
X_dict['object']), X_expected_dict['object'],
)
@pytest.mark.koalas
def test_int_ks(data_ks):
objs_dict, X_dict, X_expected_dict = data_ks
assert_frame_equal(
objs_dict['int'].transform(X_dict['int']).to_pandas(),
X_expected_dict['int'],)
@pytest.mark.koalas
def test_float_ks(data_ks):
objs_dict, X_dict, X_expected_dict = data_ks
assert_frame_equal(
objs_dict['float'].transform(X_dict['float']).to_pandas(),
X_expected_dict['float'])
@pytest.mark.koalas
def test_object_ks(data_ks):
objs_dict, X_dict, X_expected_dict = data_ks
assert_frame_equal(
objs_dict['object'].transform(X_dict['object']).to_pandas(),
X_expected_dict['object'],
)
def test_int_pd_np(data):
objs_dict, X_dict, X_expected_dict = data
X_new_np = objs_dict['int'].transform_numpy(X_dict['int'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['int'].columns)
assert_frame_equal(X_new, X_expected_dict['int'])
def test_float_pd_np(data):
objs_dict, X_dict, X_expected_dict = data
X_new_np = objs_dict['float'].transform_numpy(X_dict['float'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['float'].columns)
assert_frame_equal(X_new, X_expected_dict['float'])
def test_object_pd_np(data):
objs_dict, X_dict, X_expected_dict = data
X_new_np = objs_dict['object'].transform_numpy(X_dict['object'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['object'].columns)
assert_frame_equal(X_new, X_expected_dict['object'])
@pytest.mark.koalas
def test_int_ks_np(data_ks):
objs_dict, X_dict, X_expected_dict = data_ks
X_new_np = objs_dict['int'].transform_numpy(X_dict['int'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['int'].columns)
assert_frame_equal(X_new, X_expected_dict['int'])
@pytest.mark.koalas
def test_float_ks_np(data_ks):
objs_dict, X_dict, X_expected_dict = data_ks
X_new_np = objs_dict['float'].transform_numpy(
X_dict['float'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['float'].columns)
assert_frame_equal(X_new, X_expected_dict['float'])
@pytest.mark.koalas
def test_object_ks_np(data_ks):
objs_dict, X_dict, X_expected_dict = data_ks
X_new_np = objs_dict['object'].transform_numpy(
X_dict['object'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['object'].columns)
assert_frame_equal(X_new, X_expected_dict['object'])
def test_num_int_pd(data_num):
objs_dict, X_dict, X_expected_dict = data_num
assert_frame_equal(
objs_dict['int'].transform(X_dict['int']), X_expected_dict['int'],
)
def test_num_float_pd(data_num):
objs_dict, X_dict, X_expected_dict = data_num
assert_frame_equal(
objs_dict['float'].transform(
X_dict['float']), X_expected_dict['float'],
)
@pytest.mark.koalas
def test_num_int_ks(data_num_ks):
objs_dict, X_dict, X_expected_dict = data_num_ks
assert_frame_equal(objs_dict['int'].transform(
X_dict['int'].to_pandas()), X_expected_dict['int'],
)
@pytest.mark.koalas
def test_num_float_ks(data_num_ks):
objs_dict, X_dict, X_expected_dict = data_num_ks
assert_frame_equal(objs_dict['float'].transform(
X_dict['float'].to_pandas()), X_expected_dict['float'],
)
def test_num_int_pd_np(data_num):
objs_dict, X_dict, X_expected_dict = data_num
X_new_np = objs_dict['int'].transform_numpy(X_dict['int'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['int'].columns)
assert_frame_equal(X_new, X_expected_dict['int'])
def test_num_float_pd_np(data_num):
objs_dict, X_dict, X_expected_dict = data_num
X_new_np = objs_dict['float'].transform_numpy(X_dict['float'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['float'].columns)
assert_frame_equal(X_new, X_expected_dict['float'])
@pytest.mark.koalas
def test_num_int_ks_np(data_num_ks):
objs_dict, X_dict, X_expected_dict = data_num_ks
X_new_np = objs_dict['int'].transform_numpy(X_dict['int'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['int'].columns)
assert_frame_equal(X_new, X_expected_dict['int'])
@pytest.mark.koalas
def test_num_float_ks_np(data_num_ks):
objs_dict, X_dict, X_expected_dict = data_num_ks
X_new_np = objs_dict['float'].transform_numpy(
X_dict['float'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['float'].columns)
assert_frame_equal(X_new, X_expected_dict['float'])
def test_no_missing_int_pd(data_no_missing):
objs_dict, X_dict, X_expected_dict = data_no_missing
assert_frame_equal(
objs_dict['int'].transform(X_dict['int']), X_expected_dict['int'],
)
def test_no_missing_float_pd(data_no_missing):
objs_dict, X_dict, X_expected_dict = data_no_missing
assert_frame_equal(
objs_dict['float'].transform(
X_dict['float']), X_expected_dict['float'],
)
def test_no_missing_object_pd(data_no_missing):
objs_dict, X_dict, X_expected_dict = data_no_missing
assert_frame_equal(
objs_dict['object'].transform(
X_dict['object']), X_expected_dict['object'],
)
@pytest.mark.koalas
def test_no_missing_int_ks(data_no_missing_ks):
objs_dict, X_dict, X_expected_dict = data_no_missing_ks
assert_frame_equal(objs_dict['int'].transform(
X_dict['int'].to_pandas()), X_expected_dict['int'],
)
@pytest.mark.koalas
def test_no_missing_float_ks(data_no_missing_ks):
objs_dict, X_dict, X_expected_dict = data_no_missing_ks
assert_frame_equal(objs_dict['float'].transform(
X_dict['float'].to_pandas()), X_expected_dict['float'],
)
@pytest.mark.koalas
def test_no_missing_object_ks(data_no_missing_ks):
objs_dict, X_dict, X_expected_dict = data_no_missing_ks
assert_frame_equal(objs_dict['object'].transform(
X_dict['object'].to_pandas()), X_expected_dict['object'],
)
def test_no_missing_int_pd_np(data_no_missing):
objs_dict, X_dict, X_expected_dict = data_no_missing
X_new_np = objs_dict['int'].transform_numpy(X_dict['int'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['int'].columns)
assert_frame_equal(X_new, X_expected_dict['int'])
def test_no_missing_float_pd_np(data_no_missing):
objs_dict, X_dict, X_expected_dict = data_no_missing
X_new_np = objs_dict['float'].transform_numpy(X_dict['float'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['float'].columns)
assert_frame_equal(X_new, X_expected_dict['float'])
def test_no_missing_object_pd_np(data_no_missing):
objs_dict, X_dict, X_expected_dict = data_no_missing
X_new_np = objs_dict['object'].transform_numpy(X_dict['object'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['object'].columns)
assert_frame_equal(X_new, X_expected_dict['object'])
@pytest.mark.koalas
def test_no_missing_int_ks_np(data_no_missing_ks):
objs_dict, X_dict, X_expected_dict = data_no_missing_ks
X_new_np = objs_dict['int'].transform_numpy(X_dict['int'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['int'].columns)
assert_frame_equal(X_new, X_expected_dict['int'])
@pytest.mark.koalas
def test_no_missing_float_ks_np(data_no_missing_ks):
objs_dict, X_dict, X_expected_dict = data_no_missing_ks
X_new_np = objs_dict['float'].transform_numpy(
X_dict['float'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['float'].columns)
assert_frame_equal(X_new, X_expected_dict['float'])
@pytest.mark.koalas
def test_no_missing_object_ks_np(data_no_missing_ks):
objs_dict, X_dict, X_expected_dict = data_no_missing_ks
X_new_np = objs_dict['object'].transform_numpy(
X_dict['object'].to_numpy())
X_new = pd.DataFrame(X_new_np, columns=X_dict['object'].columns)
assert_frame_equal(X_new, X_expected_dict['object'])
def test_full_pd(data_full):
objs_dict, X, X_expected = data_full
X_new = objs_dict['object'].transform(X)
X_new = objs_dict['int'].transform(X_new)
X_new = objs_dict['float'].transform(X_new)
assert_frame_equal(X_new, X_expected)
@pytest.mark.koalas
def test_full_ks(data_full_ks):
objs_dict, X, X_expected = data_full_ks
X_new = objs_dict['object'].transform(X)
X_new = objs_dict['int'].transform(X_new)
X_new = objs_dict['float'].transform(X_new)
assert_frame_equal(X_new.to_pandas(), X_expected)
def test_full_pd_np(data_full):
objs_dict, X, X_expected = data_full
X_new = objs_dict['object'].transform_numpy(X.to_numpy())
X_new = objs_dict['int'].transform_numpy(X_new)
X_new = objs_dict['float'].transform_numpy(X_new)
X_new = pd.DataFrame(X_new, columns=['A', 'B', 'C', 'D', 'E', 'F'])
assert_frame_equal(X_new, X_expected.astype(object))
@pytest.mark.koalas
def test_full_ks_np(data_full_ks):
objs_dict, X, X_expected = data_full_ks
X_new = objs_dict['object'].transform_numpy(X.to_numpy())
X_new = objs_dict['int'].transform_numpy(X_new)
X_new = objs_dict['float'].transform_numpy(X_new)
X_new = pd.DataFrame(X_new, columns=['A', 'B', 'C', 'D', 'E', 'F'])
assert_frame_equal(X_new, X_expected.astype(object))
def test_imputers_columns_pd():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', np.nan], 'F': ['a', 'a', 's', None]})
X = pd.concat([X_int, X_float, X_object], axis=1)
X_expected = pd.DataFrame(
[[0.0, 3.0, 0.1, 2.1, 'q', 'a'],
[1.0, 4.0, 1.1, 3.1, 'w', 'a'],
[1.0, 4.0, 2.1, 4.1, 'w', 's'],
[-9.0, -99.0, -999.0, -9999.0, 'missing', 'MISSING']],
columns=['A', 'B', 'C', 'D', 'E', 'F'],
)
obj_int_A = IntImputer(
strategy='constant', value=-9, columns=['A']).fit(X)
obj_int_B = IntImputer(
strategy='constant', value=-99, columns=['B']).fit(X)
obj_float_C = FloatImputer(
strategy='constant', value=-999., columns=['C']).fit(X)
obj_float_D = FloatImputer(
strategy='constant', value=-9999., columns=['D']).fit(X)
obj_object_E = ObjectImputer(
strategy='constant', value='missing', columns=['E']).fit(X)
obj_object_F = ObjectImputer(
strategy='constant', value='MISSING', columns=['F']).fit(X)
X_new = obj_int_A.transform(X)
X_new = obj_int_B.transform(X_new)
X_new = obj_float_C.transform(X_new)
X_new = obj_float_D.transform(X_new)
X_new = obj_object_E.transform(X_new)
X_new = obj_object_F.transform(X_new)
assert_frame_equal(X_new, X_expected)
@pytest.mark.koalas
def test_imputers_columns_ks():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', np.nan], 'F': ['a', 'a', 's', None]})
X = pd.concat([X_int, X_float, X_object], axis=1)
X = ks.from_pandas(X)
X_expected = pd.DataFrame(
[[0.0, 3.0, 0.1, 2.1, 'q', 'a'],
[1.0, 4.0, 1.1, 3.1, 'w', 'a'],
[1.0, 4.0, 2.1, 4.1, 'w', 's'],
[-9.0, -99.0, -999.0, -9999.0, 'missing', 'MISSING']],
columns=['A', 'B', 'C', 'D', 'E', 'F'],
)
obj_int_A = IntImputer(
strategy='constant', value=-9, columns=['A']).fit(X)
obj_int_B = IntImputer(
strategy='constant', value=-99, columns=['B']).fit(X)
obj_float_C = FloatImputer(
strategy='constant', value=-999., columns=['C']).fit(X)
obj_float_D = FloatImputer(
strategy='constant', value=-9999., columns=['D']).fit(X)
obj_object_E = ObjectImputer(
strategy='constant', value='missing', columns=['E']).fit(X)
obj_object_F = ObjectImputer(
strategy='constant', value='MISSING', columns=['F']).fit(X)
X_new = obj_int_A.transform(X)
X_new = obj_int_B.transform(X_new)
X_new = obj_float_C.transform(X_new)
X_new = obj_float_D.transform(X_new)
X_new = obj_object_E.transform(X_new)
X_new = obj_object_F.transform(X_new)
assert_frame_equal(X_new.to_pandas(), X_expected)
def test_imputers_columns_pd_np():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', np.nan], 'F': ['a', 'a', 's', None]})
X = pd.concat([X_int, X_float, X_object], axis=1)
X_expected = pd.DataFrame(
[[0.0, 3.0, 0.1, 2.1, 'q', 'a'],
[1.0, 4.0, 1.1, 3.1, 'w', 'a'],
[1.0, 4.0, 2.1, 4.1, 'w', 's'],
[-9.0, -99.0, -999.0, -9999.0, 'missing', 'MISSING']],
columns=['A', 'B', 'C', 'D', 'E', 'F'],
)
obj_int_A = IntImputer(
strategy='constant', value=-9, columns=['A']).fit(X)
obj_int_B = IntImputer(
strategy='constant', value=-99, columns=['B']).fit(X)
obj_float_C = FloatImputer(
strategy='constant', value=-999., columns=['C']).fit(X)
obj_float_D = FloatImputer(
strategy='constant', value=-9999., columns=['D']).fit(X)
obj_object_E = ObjectImputer(
strategy='constant', value='missing', columns=['E']).fit(X)
obj_object_F = ObjectImputer(
strategy='constant', value='MISSING', columns=['F']).fit(X)
X_new = obj_int_A.transform_numpy(X.to_numpy())
X_new = obj_int_B.transform_numpy(X_new)
X_new = obj_float_C.transform_numpy(X_new)
X_new = obj_float_D.transform_numpy(X_new)
X_new = obj_object_E.transform_numpy(X_new)
X_new = obj_object_F.transform_numpy(X_new)
assert_frame_equal(
pd.DataFrame(X_new, columns=list('ABCDEF')),
X_expected.astype(object))
@pytest.mark.koalas
def test_imputers_columns_ks_np():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', np.nan], 'F': ['a', 'a', 's', None]})
X = pd.concat([X_int, X_float, X_object], axis=1)
X = ks.from_pandas(X)
X_expected = pd.DataFrame(
[[0.0, 3.0, 0.1, 2.1, 'q', 'a'],
[1.0, 4.0, 1.1, 3.1, 'w', 'a'],
[1.0, 4.0, 2.1, 4.1, 'w', 's'],
[-9.0, -99.0, -999.0, -9999.0, 'missing', 'MISSING']],
columns=['A', 'B', 'C', 'D', 'E', 'F'],
)
obj_int_A = IntImputer(
strategy='constant', value=-9, columns=['A']).fit(X)
obj_int_B = IntImputer(
strategy='constant', value=-99, columns=['B']).fit(X)
obj_float_C = FloatImputer(
strategy='constant', value=-999., columns=['C']).fit(X)
obj_float_D = FloatImputer(
strategy='constant', value=-9999., columns=['D']).fit(X)
obj_object_E = ObjectImputer(
strategy='constant', value='missing', columns=['E']).fit(X)
obj_object_F = ObjectImputer(
strategy='constant', value='MISSING', columns=['F']).fit(X)
X_new = obj_int_A.transform_numpy(X.to_numpy())
X_new = obj_int_B.transform_numpy(X_new)
X_new = obj_float_C.transform_numpy(X_new)
X_new = obj_float_D.transform_numpy(X_new)
X_new = obj_object_E.transform_numpy(X_new)
X_new = obj_object_F.transform_numpy(X_new)
assert_frame_equal(
pd.DataFrame(X_new, columns=list('ABCDEF')),
X_expected.astype(object))
def test_imputers_num_pd():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', np.nan], 'F': ['a', 'a', 's', None]})
X = pd.concat([X_int, X_float, X_object], axis=1)
X_expected = pd.DataFrame(
[[0.0, 3.0, 0.1, 2.1, 'q', 'a'],
[1.0, 4.0, 1.1, 3.1, 'w', 'a'],
[1.0, 4.0, 2.1, 4.1, 'w', 's'],
[-9.0, -9.0, -9.0, -9.0, 'MISSING', 'MISSING']],
columns=['A', 'B', 'C', 'D', 'E', 'F'],
)
obj_num = NumericsImputer(
strategy='constant', value=-9.).fit(X)
obj_object = ObjectImputer(
strategy='constant', value='MISSING').fit(X)
X_new = obj_num.transform(X)
X_new = obj_object.transform(X_new)
assert_frame_equal(X_new, X_expected)
@pytest.mark.koalas
def test_imputers_num_ks():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', np.nan], 'F': ['a', 'a', 's', None]})
X = pd.concat([X_int, X_float, X_object], axis=1)
X = ks.from_pandas(X)
X_expected = pd.DataFrame(
[[0.0, 3.0, 0.1, 2.1, 'q', 'a'],
[1.0, 4.0, 1.1, 3.1, 'w', 'a'],
[1.0, 4.0, 2.1, 4.1, 'w', 's'],
[-9.0, -9.0, -9.0, -9.0, 'MISSING', 'MISSING']],
columns=['A', 'B', 'C', 'D', 'E', 'F'],
)
obj_num = NumericsImputer(
strategy='constant', value=-9.).fit(X)
obj_object = ObjectImputer(
strategy='constant', value='MISSING').fit(X)
X_new = obj_num.transform(X)
X_new = obj_object.transform(X_new)
assert_frame_equal(X_new.to_pandas(), X_expected)
def test_imputers_num_pd_np():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', np.nan], 'F': ['a', 'a', 's', None]})
X = pd.concat([X_int, X_float, X_object], axis=1)
X_expected = pd.DataFrame(
[[0.0, 3.0, 0.1, 2.1, 'q', 'a'],
[1.0, 4.0, 1.1, 3.1, 'w', 'a'],
[1.0, 4.0, 2.1, 4.1, 'w', 's'],
[-9.0, -9.0, -9.0, -9.0, 'MISSING', 'MISSING']],
columns=['A', 'B', 'C', 'D', 'E', 'F'],
)
obj_num = NumericsImputer(
strategy='constant', value=-9.).fit(X)
obj_object = ObjectImputer(
strategy='constant', value='MISSING').fit(X)
X_new = obj_num.transform_numpy(X.to_numpy())
X_new = obj_object.transform_numpy(X_new)
assert_frame_equal(
pd.DataFrame(X_new, columns=list('ABCDEF')),
X_expected.astype(object))
@pytest.mark.koalas
def test_imputers_num_ks_np():
X_int = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
X_float = pd.DataFrame(
{'C': [0.1, 1.1, 2.1, np.nan], 'D': [2.1, 3.1, 4.1, np.nan]})
X_object = pd.DataFrame(
{'E': ['q', 'w', 'w', np.nan], 'F': ['a', 'a', 's', None]})
X = pd.concat([X_int, X_float, X_object], axis=1)
X = ks.from_pandas(X)
X_expected = pd.DataFrame(
[[0.0, 3.0, 0.1, 2.1, 'q', 'a'],
[1.0, 4.0, 1.1, 3.1, 'w', 'a'],
[1.0, 4.0, 2.1, 4.1, 'w', 's'],
[-9.0, -9.0, -9.0, -9.0, 'MISSING', 'MISSING']],
columns=['A', 'B', 'C', 'D', 'E', 'F'],
)
obj_num = NumericsImputer(
strategy='constant', value=-9.).fit(X)
obj_object = ObjectImputer(
strategy='constant', value='MISSING').fit(X)
X_new = obj_num.transform_numpy(X.to_numpy())
X_new = obj_object.transform_numpy(X_new)
assert_frame_equal(
pd.DataFrame(X_new, columns=list('ABCDEF')),
X_expected.astype(object))
def test_num_np():
X = pd.DataFrame({'A': [0, 1, np.nan]})
obj = NumericsImputer(strategy='mean').fit(X)
assert obj.transform_numpy(X.to_numpy()).tolist() == [[0.0], [1.0], [0.5]]
def test_imputers_stategy():
X = pd.DataFrame([])
with pytest.raises(TypeError):
_ = FloatImputer(strategy=0)
with pytest.raises(TypeError):
_ = NumericsImputer(strategy=0)
with pytest.raises(TypeError):
_ = IntImputer(strategy='constant', value='a').fit(X)
with pytest.raises(TypeError):
_ = FloatImputer(strategy='constant', value='a').fit(X)
with pytest.raises(TypeError):
_ = NumericsImputer(strategy='constant', value='a').fit(X)
with pytest.raises(TypeError):
_ = ObjectImputer(strategy='constant', value=1).fit(X)
with pytest.raises(ValueError):
_ = IntImputer(strategy='').fit(X)
with pytest.raises(ValueError):
_ = FloatImputer(strategy='').fit(X)
with pytest.raises(ValueError):
_ = NumericsImputer(strategy='').fit(X)
with pytest.raises(ValueError):
_ = ObjectImputer(strategy='').fit(X)
with pytest.raises(ValueError):
_ = FloatImputer(strategy='most_frequent').fit(X)
with pytest.raises(ValueError):
_ = NumericsImputer(strategy='most_frequent').fit(X)
with pytest.raises(ValueError):
_ = ObjectImputer(strategy='mean').fit(X)
with pytest.raises(ValueError):
_ = ObjectImputer(strategy='median').fit(X)
with pytest.raises(ValueError):
_ = ObjectImputer(strategy='constant').fit(X)
with pytest.raises(ValueError):
_ = FloatImputer(strategy='constant').fit(X)
with pytest.raises(ValueError):
_ = NumericsImputer(strategy='constant').fit(X)
with pytest.raises(ValueError):
_ = IntImputer(strategy='constant').fit(X)
with pytest.raises(ValueError):
_ = ObjectImputer(strategy='abc').fit(X)
def test_compute_stategy():
with pytest.raises(ValueError):
X = pd.DataFrame(
np.arange(9).reshape(3, 3) + .1, columns=list('qwe'))
X.iloc[:, 0] = np.nan
_ = FloatImputer(strategy='mean').fit(X)
def test_imputers_input_data():
with pytest.raises(TypeError):
_ = FloatImputer(strategy='mean').fit(np.array([[]]))
with pytest.raises(TypeError):
_ = IntImputer(strategy='most_frequent').fit(np.array([[]]))
with pytest.raises(TypeError):
_ = ObjectImputer(strategy='most_frequent').fit(np.array([[]]))
with pytest.raises(TypeError):
_ = ObjectImputer(strategy='most_frequent', columns='a')
def test_imputers_transform_input_data():
with pytest.raises(TypeError):
_ = FloatImputer(strategy='mean').fit_transform(np.array([]))
with pytest.raises(TypeError):
_ = IntImputer(strategy='most_frequent').fit(
np.array([])).transform(np.array([]))
with pytest.raises(TypeError):
_ = ObjectImputer(strategy='most_frequent').transform(np.array([]))
def test_warnings_empty_columns(data):
objs_dict, X_dict, X_expected_dict = data
with pytest.warns(Warning):
obj = FloatImputer(strategy='mean')
obj.fit(X_dict['int'])
with pytest.warns(Warning):
obj = IntImputer(strategy='mean')
obj.fit(X_dict['float'])
with pytest.warns(Warning):
obj = ObjectImputer(strategy='most_frequent')
obj.fit(X_dict['int'])
with pytest.warns(Warning):
obj = NumericsImputer(strategy='mean')
obj.fit(X_dict['object'])
def test_empty_columns_float():
X = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
obj = FloatImputer(strategy='mean')
_ = obj.fit(X)
assert_frame_equal(obj.transform(X.copy()), X)
assert np.allclose(obj.transform_numpy(X.to_numpy()),
X.to_numpy(), equal_nan=True)
def test_empty_columns_int():
X = pd.DataFrame({'A': [0.1, 1, 1, np.nan], 'B': [3.1, 4, 4, np.nan]})
obj = IntImputer(strategy='mean')
_ = obj.fit(X)
assert_frame_equal(obj.transform(X.copy()), X)
assert np.allclose(obj.transform_numpy(X.to_numpy()),
X.to_numpy(), equal_nan=True)
def test_empty_columns_object():
X = pd.DataFrame({'A': [0, 1, 1, np.nan], 'B': [3, 4, 4, np.nan]})
obj = ObjectImputer(strategy='most_frequent')
_ = obj.fit(X)
assert_frame_equal(obj.fit_transform(X.copy()), X)
assert_frame_equal(
pd.DataFrame(obj.transform_numpy(X.to_numpy())),
pd.DataFrame(X.to_numpy()))
def test_num_idx_columns_empty():
X = pd.DataFrame({'A': ['a', 'b', 'b', 'c']})
obj = NumericsImputer(strategy='mean').fit(X)
_ = obj.fit(X)
assert_frame_equal(obj.transform(X.copy()), X)
assert_frame_equal(
pd.DataFrame(obj.transform_numpy(X.to_numpy())),
pd.DataFrame(X.to_numpy()))
| 36.061224 | 79 | 0.603104 | 5,279 | 33,573 | 3.562796 | 0.021595 | 0.033177 | 0.053222 | 0.026957 | 0.947097 | 0.938803 | 0.920566 | 0.910995 | 0.877286 | 0.861495 | 0 | 0.031952 | 0.205761 | 33,573 | 930 | 80 | 36.1 | 0.673392 | 0.000566 | 0 | 0.77182 | 0 | 0 | 0.065451 | 0.000775 | 0 | 0 | 0 | 0 | 0.067332 | 1 | 0.077307 | false | 0 | 0.011222 | 0 | 0.098504 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
17e1754772563cea6c176c436f7ab0e4389b7c6b | 501 | py | Python | Python/Data Science/HE_ML_Hackathon/pred_hotstar.py | vbsteja/code | 0c8f4dc579f5de21b6c55fe6e65c3c8eb5473687 | [
"Apache-2.0"
] | null | null | null | Python/Data Science/HE_ML_Hackathon/pred_hotstar.py | vbsteja/code | 0c8f4dc579f5de21b6c55fe6e65c3c8eb5473687 | [
"Apache-2.0"
] | null | null | null | Python/Data Science/HE_ML_Hackathon/pred_hotstar.py | vbsteja/code | 0c8f4dc579f5de21b6c55fe6e65c3c8eb5473687 | [
"Apache-2.0"
] | null | null | null | import pandas as pd
from sklearn.ensemble import RandomForestClassifier
df_train = pd.read_json("~/Documents/dataset/hotstar/train_data.json")
df_test = pd.read_json("~/Documents/dataset/hotstar/test_data.json")
df_train.head()
def pred_hotstar():
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
df_train = pd.read_json("~/Documents/dataset/hotstar/train_data.json")
df_test = pd.read_json("~/Documents/dataset/hotstar/test_data.json")
df_train.head()
| 29.470588 | 74 | 0.768463 | 71 | 501 | 5.211268 | 0.28169 | 0.075676 | 0.108108 | 0.205405 | 0.962162 | 0.962162 | 0.962162 | 0.962162 | 0.962162 | 0.962162 | 0 | 0 | 0.113772 | 501 | 16 | 75 | 31.3125 | 0.833333 | 0 | 0 | 0.909091 | 0 | 0 | 0.339321 | 0.339321 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.363636 | 0 | 0.454545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 9 |
17e2b72d6a736461e98f3dafbd02a42ab2d15b86 | 35 | py | Python | gunicorn_config.py | NipunBhalla/image-similarity | b037f5be3c6e4da32c56587ba3b98e3557e2e285 | [
"MIT"
] | null | null | null | gunicorn_config.py | NipunBhalla/image-similarity | b037f5be3c6e4da32c56587ba3b98e3557e2e285 | [
"MIT"
] | null | null | null | gunicorn_config.py | NipunBhalla/image-similarity | b037f5be3c6e4da32c56587ba3b98e3557e2e285 | [
"MIT"
] | null | null | null | bind = "0.0.0.0:5000"
timeout = 120 | 17.5 | 21 | 0.628571 | 8 | 35 | 2.75 | 0.625 | 0.272727 | 0.272727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.366667 | 0.142857 | 35 | 2 | 22 | 17.5 | 0.366667 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
17ea72d3e6ba290df0c46f4b7cb8fad5d3013762 | 167 | py | Python | tests/parser/disjunction.4.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/disjunction.4.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/disjunction.4.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | input = """
k |l | a.
a | v.
a | v.
a | v.
v | a | w.
:- v.
:- w.
"""
output = """
k |l | a.
a | v.
a | v.
a | v.
v | a | w.
:- v.
:- w.
"""
| 6.68 | 12 | 0.233533 | 30 | 167 | 1.3 | 0.233333 | 0.307692 | 0.307692 | 0.410256 | 0.717949 | 0.717949 | 0.717949 | 0.717949 | 0.717949 | 0.717949 | 0 | 0 | 0.467066 | 167 | 24 | 13 | 6.958333 | 0.438202 | 0 | 0 | 0.888889 | 0 | 0 | 0.789116 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
a4d2a34e8a864b04f31e3e71d8de3f5baecfbc41 | 12,456 | py | Python | lib/rucio/tests/test_objectstore.py | maatthias/rucio-old | 8600cdc0838886a2f076f2f88850770877fc505f | [
"Apache-2.0"
] | 1 | 2019-03-04T09:09:42.000Z | 2019-03-04T09:09:42.000Z | lib/rucio/tests/test_objectstore.py | pujanm/rucio | 355a997a5ea213c427a5d841ab151ceb01073eb4 | [
"Apache-2.0"
] | null | null | null | lib/rucio/tests/test_objectstore.py | pujanm/rucio | 355a997a5ea213c427a5d841ab151ceb01073eb4 | [
"Apache-2.0"
] | null | null | null | # Copyright European Organization for Nuclear Research (CERN)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Authors:
# - Wen Guan, <wen.guan@cern.ch>, 2016
# - Hannes Hansen, <hannes.jakob.hansen@cern.ch>, 2019
#
# PY3K COMPATIBLE
try:
# PY2
import commands
except ImportError:
# PY3
import subprocess as commands
from nose.tools import raises
from six import string_types
from rucio.client.objectstoreclient import ObjectStoreClient
from rucio.common import objectstore
from rucio.common import exception
class TestObjectStoreCommon:
def setup(self):
self.url = 's3+https://cephgw.usatlas.bnl.gov:8443/rucio_bucket/test_public'
self.rse = 'BNL-OSG2_ES'
ret = objectstore.get_signed_urls([self.url], rse=self.rse, operation='write')
if isinstance(ret[self.url], Exception):
raise ret[self.url]
command = 'curl --request PUT --upload-file /bin/hostname "%s"' % str(ret[self.url])
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
if 'AccessDenied' in output:
raise Exception(output)
def test_connect(self):
""" OBJECTSTORE (COMMON): Connect """
objectstore.connect(self.rse, self.url)
def test_get_signed_urls_read(self):
""" OBJECTSTORE (COMMON): Get signed urls for read """
ret = objectstore.get_signed_urls([self.url], rse=self.rse, operation='read')
if isinstance(ret[self.url], Exception):
raise ret[self.url]
# read
command = 'curl "%s" > /dev/null' % ret[self.url]
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
# write
command = 'curl --request PUT --upload-file /bin/hostname "%s"' % str(ret[self.url])
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
if 'AccessDenied' not in output:
raise Exception(output)
def test_get_signed_urls_write(self):
""" OBJECTSTORE (COMMON): Get signed urls for write """
ret = objectstore.get_signed_urls([self.url], rse=self.rse, operation='write')
if isinstance(ret[self.url], Exception):
raise ret[self.url]
# write
command = 'curl --request PUT --upload-file /bin/hostname "%s"' % str(ret[self.url])
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
if 'AccessDenied' in output:
raise Exception(output)
# read
command = 'curl "%s" > /dev/null' % ret[self.url]
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
@raises(exception.SourceNotFound)
def test_get_signed_urls_read_not_exists(self):
""" OBJECTSTORE (COMMON): Get signed not exist urls for read """
url = '%s_not_exist' % (self.url)
ret = objectstore.get_signed_urls([url], rse=self.rse, operation='read')
if isinstance(ret[url], Exception):
raise ret[url]
raise Exception("Respone not as expected: should catch SourceNotFound")
def test_get_metadata(self):
""" OBJECTSTORE (COMMON): Get metadata """
url = self.url
ret = objectstore.get_metadata([url], rse=self.rse)
if isinstance(ret[url], Exception):
raise ret[url]
if 'filesize' not in ret[url]:
raise Exception("Respone not as expected: should return {'filesize': filesize}, but it returns: %s" % ret[url])
def test_rename(self):
""" OBJECTSTORE (COMMON): Rename """
url = self.url
new_url = '%s_new' % url
objectstore.rename(url, new_url, rse=self.rse)
ret = objectstore.get_metadata([url], rse=self.rse)
if not isinstance(ret[url], exception.SourceNotFound):
raise ret[url]
ret = objectstore.get_metadata([new_url], rse=self.rse)
if isinstance(ret[new_url], Exception):
raise ret[new_url]
if 'filesize' not in ret[new_url]:
raise Exception("Respone not as expected: should return {'filesize': filesize}, but it returns: %s" % ret[url])
@raises(exception.SourceNotFound)
def test_get_metadata_not_exist(self):
""" OBJECTSTORE (COMMON): Get metadata for not exist url """
url = '%s_not_exist' % (self.url)
ret = objectstore.get_metadata([url], rse=self.rse)
if isinstance(ret[url], Exception):
raise ret[url]
raise Exception("Respone not as expected: should catch SourceNotFound")
def test_delete(self):
""" OBJECTSTORE (COMMON): Delete urls """
urls = []
for i in range(10):
url = '%s_%s' % (self.url, i)
urls.append(url)
ret = objectstore.get_signed_urls(urls, rse=self.rse, operation='write')
for url in urls:
if isinstance(url, Exception):
raise ret[self.url]
# write
command = 'curl --request PUT --upload-file /bin/hostname "%s"' % ret[url]
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
if 'AccessDenied' in output:
raise Exception(output)
ret = objectstore.delete(urls, rse=self.rse)
for url in urls:
if isinstance(ret[url], Exception):
raise ret[url]
def test_delete_dir(self):
""" OBJECTSTORE (COMMON): Delete dir """
urls = []
for i in range(10):
url = '%s_%s' % (self.url, i)
urls.append(url)
ret = objectstore.get_signed_urls(urls, rse=self.rse, operation='write')
for url in urls:
if isinstance(url, Exception):
raise ret[self.url]
# write
command = 'curl --request PUT --upload-file /bin/hostname "%s"' % ret[url]
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
if 'AccessDenied' in output:
raise Exception(output)
status, output = objectstore.delete_dir(self.url, rse=self.rse)
if status:
raise Exception(output)
class TestObjectStoreClients:
def setup(self):
self.os_client = ObjectStoreClient()
self.url = 's3+https://cephgw.usatlas.bnl.gov:8443/rucio_bucket/test_public'
self.rse = 'BNL-OSG2_ES'
ret = objectstore.get_signed_urls([self.url], rse=self.rse, operation='write')
if isinstance(ret[self.url], Exception):
raise ret[self.url]
command = 'curl --request PUT --upload-file /bin/hostname "%s"' % ret[self.url]
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
if 'AccessDenied' in output:
raise Exception(output)
def test_connect(self):
""" OBJECTSTORE (CLIENT): Connect """
self.os_client.connect(self.rse, self.url)
def test_get_signed_url_read(self):
""" OBJECTSTORE (CLIENT): Get signed url for read """
ret = self.os_client.get_signed_url(self.url, rse=self.rse, operation='read')
if not isinstance(ret, string_types):
raise Exception("Return %s is not as expected.")
# read
command = 'curl "%s" > /dev/null' % str(ret)
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
# write
command = 'curl --request PUT --upload-file /bin/hostname "%s"' % ret
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
if 'AccessDenied' not in output:
raise Exception(output)
def test_get_signed_url_write(self):
""" OBJECTSTORE (CLIENT): Get signed url for write """
ret = self.os_client.get_signed_url(self.url, rse=self.rse, operation='write')
if not isinstance(ret, string_types):
raise Exception("Return %s is not as expected.")
# write
command = 'curl --request PUT --upload-file /bin/hostname "%s"' % ret
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
if 'AccessDenied' in output:
raise Exception(output)
# read
command = 'curl "%s" > /dev/null' % ret
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
@raises(exception.SourceNotFound)
def test_get_signed_url_read_not_exists(self):
""" OBJECTSTORE (CLIENT): Get signed not exist url for read """
url = '%s_not_exist' % (self.url)
self.os_client.get_signed_url(url, rse=self.rse, operation='read')
raise Exception("Respone not as expected: should catch SourceNotFound")
def test_get_signed_urls_read(self):
""" OBJECTSTORE (CLIENT): Get signed urls for read """
ret = self.os_client.get_signed_urls([self.url], rse=self.rse, operation='read')
if isinstance(ret[self.url], Exception):
raise ret[self.url]
# read
command = 'curl "%s" > /dev/null' % ret[self.url]
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
# write
command = 'curl --request PUT --upload-file /bin/hostname "%s"' % ret[self.url]
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
if 'AccessDenied' not in output:
raise Exception(output)
def test_get_signed_urls_write(self):
""" OBJECTSTORE (CLIENT): Get signed urls for write """
ret = self.os_client.get_signed_urls([self.url], rse=self.rse, operation='write')
if isinstance(ret[self.url], Exception):
raise ret[self.url]
# write
command = 'curl --request PUT --upload-file /bin/hostname "%s"' % ret[self.url]
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
if 'AccessDenied' in output:
raise Exception(output)
# read
command = 'curl "%s" > /dev/null' % ret[self.url]
status, output = commands.getstatusoutput(command)
if status:
raise Exception(output)
@raises(exception.SourceNotFound)
def test_get_signed_urls_read_not_exists(self):
""" OBJECTSTORE (CLIENT): Get signed not exist urls for read """
url = '%s_not_exist' % (self.url)
self.os_client.get_signed_urls([url], rse=self.rse, operation='read')
raise Exception("Respone not as expected: should catch SourceNotFound")
def test_get_metadata(self):
""" OBJECTSTORE (CLIENT): Get metadata """
url = self.url
ret = self.os_client.get_metadata([url], rse=self.rse)
if isinstance(ret[url], Exception):
raise ret[url]
if 'filesize' not in ret[url]:
raise Exception("Respone not as expected: should return {'filesize': filesize}, but it returns: %s" % ret[url])
@raises(exception.SourceNotFound)
def test_get_metadata_not_exist(self):
""" OBJECTSTORE (CLIENT): Get metadata for not exist url """
url = '%s_not_exist' % (self.url)
self.os_client.get_metadata([url], rse=self.rse)
raise Exception("Respone not as expected: should catch SourceNotFound")
def test_rename(self):
""" OBJECTSTORE (CLIENT): Rename """
url = self.url
new_url = '%s_new' % url
self.os_client.rename(url, new_url, rse=self.rse)
try:
self.os_client.get_metadata([url], rse=self.rse)
except exception.SourceNotFound:
pass
ret = self.os_client.get_metadata([new_url], rse=self.rse)
if isinstance(ret[new_url], Exception):
raise ret[new_url]
if 'filesize' not in ret[new_url]:
raise Exception("Respone not as expected: should return {'filesize': filesize}, but it returns: %s" % ret[new_url])
| 38.803738 | 127 | 0.612877 | 1,502 | 12,456 | 4.986019 | 0.090546 | 0.044866 | 0.072106 | 0.038189 | 0.870744 | 0.851516 | 0.83616 | 0.809721 | 0.792496 | 0.752704 | 0 | 0.003412 | 0.270552 | 12,456 | 320 | 128 | 38.925 | 0.820823 | 0.104849 | 0 | 0.829694 | 0 | 0 | 0.156159 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.091703 | false | 0.004367 | 0.034935 | 0 | 0.135371 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a4d82d3a8c9f9f3d9f6b5800fc7edd8e4e4e7f2c | 9,115 | py | Python | project/api/migrations/0063_auto_20210718_1521.py | hlystovea/BBBS | 7164ef67615e45d750e965bf958af229b56d49e3 | [
"BSD-3-Clause"
] | null | null | null | project/api/migrations/0063_auto_20210718_1521.py | hlystovea/BBBS | 7164ef67615e45d750e965bf958af229b56d49e3 | [
"BSD-3-Clause"
] | 2 | 2021-06-07T14:06:05.000Z | 2021-06-18T16:27:29.000Z | project/api/migrations/0063_auto_20210718_1521.py | hlystovea/BBBS | 7164ef67615e45d750e965bf958af229b56d49e3 | [
"BSD-3-Clause"
] | 2 | 2021-07-27T20:40:18.000Z | 2021-09-12T16:48:19.000Z | # Generated by Django 3.2.3 on 2021-07-18 08:21
import api.validators
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0062_auto_20210717_0033'),
]
operations = [
migrations.AlterField(
model_name='article',
name='image',
field=models.ImageField(blank=True, help_text='Поддерживаемые форматы jpg, jpeg, gif, png, bmp. Размер до 10М.', null=True, upload_to='articles/', validators=[api.validators.file_size_validator, api.validators.image_extension_validator], verbose_name='Изображение'),
),
migrations.AlterField(
model_name='article',
name='image_url',
field=models.URLField(blank=True, help_text='Альтернативный способ загрузки изображения. Приоритет у файла.', max_length=192, null=True, verbose_name='Ссылка на изображение'),
),
migrations.AlterField(
model_name='article',
name='output_to_main',
field=models.BooleanField(default=False, help_text='Статьи с этой меткой будут отображаться на главной странице сайта.', verbose_name='Отображать на главной странице'),
),
migrations.AlterField(
model_name='article',
name='pinned_full_size',
field=models.BooleanField(default=False, help_text='Статья с этой меткой будет отображаться в полноразмерном формате вверху страницы.', verbose_name='Закрепить'),
),
migrations.AlterField(
model_name='book',
name='url',
field=models.URLField(verbose_name='Ссылка на книгу'),
),
migrations.AlterField(
model_name='booktype',
name='slug',
field=models.SlugField(unique=True, verbose_name='Слаг (Ссылка)'),
),
migrations.AlterField(
model_name='catalog',
name='image',
field=models.ImageField(blank=True, help_text='Поддерживаемые форматы jpg, jpeg, gif, png, bmp. Размер до 10М.', null=True, upload_to='catalogs/', validators=[api.validators.file_size_validator, api.validators.image_extension_validator], verbose_name='Изображение'),
),
migrations.AlterField(
model_name='catalog',
name='image_url',
field=models.URLField(help_text='Альтернативный способ загрузки изображения. Приоритет у файла.', max_length=192, verbose_name='Ссылка на изображение'),
),
migrations.AlterField(
model_name='catalog',
name='raw_html',
field=models.TextField(help_text='Поле для html кода страницы.', max_length=4000000, verbose_name='HTML'),
),
migrations.AlterField(
model_name='city',
name='is_primary',
field=models.BooleanField(default=False, help_text='Города с этой меткой будут отображаться в начале списка.', verbose_name='Приоритет вывода'),
),
migrations.AlterField(
model_name='diary',
name='image',
field=models.ImageField(blank=True, help_text='Поддерживаемые форматы jpg, jpeg, gif, png, bmp. Размер до 10М.', null=True, upload_to='diaries/', validators=[api.validators.file_size_validator, api.validators.image_extension_validator], verbose_name='Изображение'),
),
migrations.AlterField(
model_name='history',
name='image',
field=models.ImageField(blank=True, help_text='Поддерживаемые форматы jpg, jpeg, gif, png, bmp. Размер до 10М.', null=True, upload_to='history/', validators=[api.validators.file_size_validator, api.validators.image_extension_validator], verbose_name='Изображение'),
),
migrations.AlterField(
model_name='history',
name='output_to_main',
field=models.BooleanField(default=False, help_text='Истории с этой меткой будут отображаться на главной странице сайта.', verbose_name='Отображать на главной странице'),
),
migrations.AlterField(
model_name='history',
name='raw_html',
field=models.TextField(help_text='Поле для html кода страницы.', max_length=4000000, verbose_name='HTML'),
),
migrations.AlterField(
model_name='movie',
name='image',
field=models.ImageField(blank=True, help_text='Поддерживаемые форматы jpg, jpeg, gif, png, bmp. Размер до 10М.', null=True, upload_to='movies/', validators=[api.validators.file_size_validator, api.validators.image_extension_validator], verbose_name='Изображение'),
),
migrations.AlterField(
model_name='movie',
name='output_to_main',
field=models.BooleanField(default=False, help_text='Фильмы с этой меткой будут отображаться на главной странице сайта.', verbose_name='Отображать на главной странице'),
),
migrations.AlterField(
model_name='place',
name='image',
field=models.ImageField(blank=True, help_text='Поддерживаемые форматы jpg, jpeg, gif, png, bmp. Размер до 10М.', null=True, upload_to='places/', validators=[api.validators.file_size_validator, api.validators.image_extension_validator], verbose_name='Изображение'),
),
migrations.AlterField(
model_name='place',
name='image_url',
field=models.URLField(blank=True, help_text='Альтернативный способ загрузки изображения. Приоритет у файла.', null=True, verbose_name='Ссылка на изображение'),
),
migrations.AlterField(
model_name='place',
name='moderation_flag',
field=models.BooleanField(default=False, help_text='Места без этой метки не будут отображаться на сайте.', verbose_name='Отметка о модерации'),
),
migrations.AlterField(
model_name='place',
name='output_to_main',
field=models.BooleanField(default=False, help_text='Места с этой меткой будут отображаться на главной странице сайта.', verbose_name='Отображать на главной странице'),
),
migrations.AlterField(
model_name='question',
name='output_to_main',
field=models.BooleanField(default=False, help_text='Вопросы с этой меткой будут отображаться на главной странице сайта.', verbose_name='Отображать на главной странице'),
),
migrations.AlterField(
model_name='right',
name='raw_html',
field=models.TextField(help_text='Поле для html кода страницы.', max_length=4000000, verbose_name='HTML'),
),
migrations.AlterField(
model_name='tag',
name='category',
field=models.CharField(choices=[('Книги', 'Книги'), ('Фильмы', 'Фильмы'), ('Места', 'Места'), ('Вопросы', 'Вопросы'), ('Права', 'Права'), ('Видеоролики', 'Видеоролики'), ('События', 'События')], max_length=50, verbose_name='Категория'),
),
migrations.AlterField(
model_name='tag',
name='name',
field=models.CharField(max_length=50, verbose_name='Название'),
),
migrations.AlterField(
model_name='video',
name='duration',
field=models.PositiveIntegerField(validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(86400)], verbose_name='Длина видео в сек.'),
),
migrations.AlterField(
model_name='video',
name='image',
field=models.ImageField(blank=True, help_text='Поддерживаемые форматы jpg, jpeg, gif, png, bmp. Размер до 10М.', null=True, upload_to='videos/', validators=[api.validators.file_size_validator, api.validators.image_extension_validator], verbose_name='Изображение'),
),
migrations.AlterField(
model_name='video',
name='output_to_main',
field=models.BooleanField(default=False, help_text='Видео с этой меткой будут отображаться на главной странице сайта.', verbose_name='Отображать на главной странице'),
),
migrations.AlterField(
model_name='video',
name='pinned_full_size',
field=models.BooleanField(default=False, help_text='Видео с этой меткой будет отображаться в полноразмерном формате вверху страницы.', verbose_name='Закрепить'),
),
migrations.AlterField(
model_name='video',
name='resource_group',
field=models.BooleanField(default=False, help_text='Видео с этой меткой не будут показаны не авторизованным пользователям.', verbose_name='Ресурсная группа'),
),
]
| 56.614907 | 291 | 0.623039 | 941 | 9,115 | 5.873539 | 0.181722 | 0.104939 | 0.131174 | 0.152162 | 0.816356 | 0.803329 | 0.76461 | 0.733671 | 0.724082 | 0.713407 | 0 | 0.012248 | 0.265496 | 9,115 | 160 | 292 | 56.96875 | 0.813294 | 0.004937 | 0 | 0.681818 | 1 | 0 | 0.319916 | 0.002536 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.019481 | 0 | 0.038961 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
35053e1ec2c578046fa71f3731b06e49df988602 | 193 | py | Python | tests/conftest.py | Himon-SYNCRAFT/taskplus | 9e6293840941d0cb4fd7bac0f8ff66f8e72cc62b | [
"BSD-3-Clause"
] | null | null | null | tests/conftest.py | Himon-SYNCRAFT/taskplus | 9e6293840941d0cb4fd7bac0f8ff66f8e72cc62b | [
"BSD-3-Clause"
] | null | null | null | tests/conftest.py | Himon-SYNCRAFT/taskplus | 9e6293840941d0cb4fd7bac0f8ff66f8e72cc62b | [
"BSD-3-Clause"
] | null | null | null | import pytest
from taskplus.apps.rest.app import create_app
from taskplus.apps.rest.settings import TestConfig
@pytest.fixture(scope='function')
def app():
return create_app(TestConfig)
| 19.3 | 50 | 0.792746 | 27 | 193 | 5.592593 | 0.555556 | 0.15894 | 0.211921 | 0.264901 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.11399 | 193 | 9 | 51 | 21.444444 | 0.883041 | 0 | 0 | 0 | 0 | 0 | 0.041451 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | true | 0 | 0.5 | 0.166667 | 0.833333 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 8 |
3507e7cdf44d9991a2298f2b76cff5a37656e96f | 144 | py | Python | tests/test_prime.py | maryann2013/AshaLib | f9fc5c208e5ae351f6e6116e919984ebfd071010 | [
"MIT"
] | null | null | null | tests/test_prime.py | maryann2013/AshaLib | f9fc5c208e5ae351f6e6116e919984ebfd071010 | [
"MIT"
] | null | null | null | tests/test_prime.py | maryann2013/AshaLib | f9fc5c208e5ae351f6e6116e919984ebfd071010 | [
"MIT"
] | null | null | null | from AshaLib.prime_numbers import is_prime
def test_false():
assert is_prime(10) is False
def test_true():
assert is_prime(11) is True | 20.571429 | 42 | 0.75 | 25 | 144 | 4.08 | 0.52 | 0.205882 | 0.254902 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033613 | 0.173611 | 144 | 7 | 43 | 20.571429 | 0.823529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.4 | 1 | 0.4 | true | 0 | 0.2 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
35428ac37ba11277c892d834360b541781a3c933 | 5,917 | py | Python | source/light_curve_simulation.py | dominickeehan/bayesian-microlensing | bf95b8346019e6a6262e42e4c5c8e5b870c903b5 | [
"MIT"
] | 1 | 2021-10-13T00:41:02.000Z | 2021-10-13T00:41:02.000Z | source/light_curve_simulation.py | dominickeehan/bayesian-microlensing | bf95b8346019e6a6262e42e4c5c8e5b870c903b5 | [
"MIT"
] | null | null | null | source/light_curve_simulation.py | dominickeehan/bayesian-microlensing | bf95b8346019e6a6262e42e4c5c8e5b870c903b5 | [
"MIT"
] | null | null | null | """Light curve simulation for microlensing.
Functions to generate simple synthetic light curves, accurate in the context
of ROMAN. Functions to calculate likelihood given a lensing model.
"""
import MulensModel as mm
import math
import numpy as np
def read_light_curve(file_name):
"""Read in light curve data.
Observations must be between 0 and 72 days. Expects
photometry data with three columns: time, flux, and error.
Args:
file_name: [str] CSV file name.
Returns:
data: [mulensdata] Object for light curve.
"""
with open(file_name) as file:
array = np.loadtxt(file, delimiter = ",")
data = mm.MulensData(data_list = [array[:, 0], array[:, 1], array[:, 2]], phot_fmt = "flux", chi2_fmt = "flux")
return data
def synthetic_single(theta, n_epochs, sn, seed = 42):
"""Generate a synthetic single lens light curve.
Simulates noise based on guassian flux process.
Produces equispaced observations from 0 to 72 days.
In this simplified case, amplification = flux.
Otherwise based on ROMAN photometric specifications.
Args:
theta: [state] Single lens model parameters.
n_epochs: [int] The number of flux observations.
sn: [float] The signal to noise baseline.
seed: [optional, int] A random seed.
Returns:
data: [mulensdata] Object for a synthetic light curve.
"""
# Create MulensModel.
model = mm.Model(dict(zip(["t_0", "u_0", "t_E"], theta.truth[1:])))
model.set_magnification_methods([0., "point_source", 72.])
# Exact signal (fs=1, fb=0).
epochs = np.linspace(0, 72, n_epochs + 1)[:n_epochs]
truth_signal = (model.magnification(epochs)-1)*theta.truth[0]+1
# Simulate noise in gaussian errored flux space.
np.random.seed(seed)
noise = np.random.normal(0.0, np.sqrt(truth_signal) / sn, n_epochs)
noise_sd = np.sqrt(truth_signal) / sn
signal = truth_signal + noise
data = mm.MulensData(data_list = [epochs, signal, noise_sd], phot_fmt = "flux", chi2_fmt = "flux")
return data
def synthetic_binary(theta, n_epochs, sn, seed = 42):
"""Generate a synthetic single lens light curve.
Simulates noise based on guassian flux process.
In this simplified case, amplification = flux.
Produces equispaced observations from 0 to 72 days.
Otherwise based on ROMAN photometric specifications.
Args:
theta: [state] Binary lens model parameters.
n_epochs: [int] The number of flux observations.
sn: [float] The signal to noise baseline.
seed: [optional, int] A random seed.
Returns:
data: [mulensdata] Object for a synthetic light curve.
"""
# Create MulensModel.
model = mm.Model(dict(zip(["t_0", "u_0", "t_E", "q", "s", "alpha"], theta.truth[1:])))
model.set_magnification_methods([0., "point_source", 72.])
# Exact signal (fs=1, fb=0).
epochs = np.linspace(0, 72, n_epochs + 1)[:n_epochs]
truth_signal = (model.magnification(epochs)-1)*theta.truth[0]+1
# Simulate noise in gaussian errored flux space.
np.random.seed(seed)
noise = np.random.normal(0.0, np.sqrt(truth_signal) / sn, n_epochs)
noise_sd = np.sqrt(truth_signal) / sn
signal = truth_signal + noise
data = mm.MulensData(data_list = [epochs, signal, noise_sd], phot_fmt = "flux", chi2_fmt = "flux")
return data
def binary_log_likelihood(self, theta):
"""Calculate the log likelihood of a state in a model.
Uses the point source approximation from MulensModel to calculate
the log likelihood that a binary state produced the model's data.
Data must be over the range 0 to 72 days.
Args:
theta: [state] Binary model parameters.
Returns:
log_likelihood: [float] The resulting log likelihood.
"""
try: # MulensModel may throw errors
model = mm.Model(dict(zip(["t_0", "u_0", "t_E", "q", "s", "alpha"], theta.truth[1:])))
model.set_magnification_methods([0., "point_source", 72.])
a = model.magnification(self.data.time) # The proposed magnification signal.
y = self.data.flux # The observed flux signal.
# Fit proposed flux as least squares solution.
#F = least_squares_signal(a, y)
F = (a-1)*theta.truth[0]+1
sd = self.data.err_flux
chi2 = np.sum((y - F)**2/sd**2)
except: # If MulensModel crashes, return true likelihood zero.
return -math.inf
return -chi2/2 # Transform chi2 to log likelihood.
def least_squares_signal(a, y):
# Fit proposed flux as least squares solution.
A = np.vstack([a, np.ones(len(a))]).T
f_s, f_b = np.linalg.lstsq(A, y, rcond = None)[0]
F = f_s*a + f_b # The least squares signal.
return F
def single_log_likelihood(self, theta):
"""Calculate the log likelihood of a state in a model.
Uses the point source approximation from MulensModel to calculate
the log likelihood that a single state produced the model's data.
Data must be over the range 0 to 72 days.
Args:
theta: [state] Single model parameters.
Returns:
log_likelihood: [float] The resulting log likelihood.
"""
try: # MulensModel may throw errors
model = mm.Model(dict(zip(["t_0", "u_0", "t_E"], theta.truth[1:])))
model.set_magnification_methods([0., "point_source", 72.])
a = model.magnification(self.data.time) # The proposed magnification signal.
y = self.data.flux # The observed flux signal.
# Fit proposed flux as least squares solution.
#F = least_squares_signal(a, y)
F = (a-1)*theta.truth[0]+1
sd = self.data.err_flux
chi2 = np.sum((y - F)**2/sd**2)
except: # If MulensModel crashes, return true likelihood zero.
return -math.inf
return -chi2/2 # Transform chi2 to log likelihood. | 33.811429 | 115 | 0.652527 | 841 | 5,917 | 4.495838 | 0.197384 | 0.041259 | 0.00529 | 0.009521 | 0.84475 | 0.825179 | 0.805607 | 0.795821 | 0.773076 | 0.741338 | 0 | 0.01992 | 0.236437 | 5,917 | 175 | 116 | 33.811429 | 0.816954 | 0.490789 | 0 | 0.728814 | 1 | 0 | 0.044825 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.101695 | false | 0 | 0.050847 | 0 | 0.288136 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
10402390c0c18578a81c1e79f94c2d89fb895e94 | 1,518 | py | Python | tests/test_1883.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | tests/test_1883.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | tests/test_1883.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import pytest
"""
Test 1883. Minimum Skips to Arrive at Meeting On Time
"""
@pytest.fixture(scope="session")
def init_variables_1883():
from src.leetcode_1883_minimum_skips_to_arrive_at_meeting_on_time import Solution
solution = Solution()
def _init_variables_1883():
return solution
yield _init_variables_1883
class TestClass1883:
def test_solution_0(self, init_variables_1883):
assert init_variables_1883().minSkips([1, 3, 2], 4, 2) == 1
def test_solution_1(self, init_variables_1883):
assert init_variables_1883().minSkips([7, 3, 5, 5], 2, 10) == 2
def test_solution_2(self, init_variables_1883):
assert init_variables_1883().minSkips([7, 3, 5, 5], 1, 10) == -1
#!/usr/bin/env python
import pytest
"""
Test 1883. Minimum Skips to Arrive at Meeting On Time
"""
@pytest.fixture(scope="session")
def init_variables_1883():
from src.leetcode_1883_minimum_skips_to_arrive_at_meeting_on_time import Solution
solution = Solution()
def _init_variables_1883():
return solution
yield _init_variables_1883
class TestClass1883:
def test_solution_0(self, init_variables_1883):
assert init_variables_1883().minSkips([1, 3, 2], 4, 2) == 1
def test_solution_1(self, init_variables_1883):
assert init_variables_1883().minSkips([7, 3, 5, 5], 2, 10) == 2
def test_solution_2(self, init_variables_1883):
assert init_variables_1883().minSkips([7, 3, 5, 5], 1, 10) == -1
| 24.095238 | 85 | 0.703557 | 222 | 1,518 | 4.495496 | 0.184685 | 0.234469 | 0.306613 | 0.126253 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0.118123 | 0.185771 | 1,518 | 62 | 86 | 24.483871 | 0.68932 | 0.02635 | 0 | 1 | 0 | 0 | 0.01034 | 0 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.333333 | false | 0 | 0.133333 | 0.066667 | 0.6 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 11 |
105fe82646a156afdf04febeb480a7d498e5a218 | 19,659 | py | Python | test/unit/test_natural_language_classifier_v1.py | laggraw/python-sdk | 80b33065b8d526a9a5f9a62dc892a6fba53c703f | [
"Apache-2.0"
] | null | null | null | test/unit/test_natural_language_classifier_v1.py | laggraw/python-sdk | 80b33065b8d526a9a5f9a62dc892a6fba53c703f | [
"Apache-2.0"
] | 2 | 2020-01-18T23:42:45.000Z | 2020-01-18T23:52:44.000Z | test/unit/test_natural_language_classifier_v1.py | truthiswill/python-sdk-1 | e0e5f833e4935f9b52c17c4fae653c08b2bc323f | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# (C) Copyright IBM Corp. 2015, 2020.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from ibm_cloud_sdk_core.authenticators.no_auth_authenticator import NoAuthAuthenticator
import inspect
import json
import pytest
import responses
import tempfile
import ibm_watson.natural_language_classifier_v1
from ibm_watson.natural_language_classifier_v1 import *
base_url = 'https://gateway.watsonplatform.net/natural-language-classifier/api'
##############################################################################
# Start of Service: ClassifyText
##############################################################################
# region
#-----------------------------------------------------------------------------
# Test Class for classify
#-----------------------------------------------------------------------------
class TestClassify():
#--------------------------------------------------------
# Test 1: Send fake data and check response
#--------------------------------------------------------
@responses.activate
def test_classify_response(self):
body = self.construct_full_body()
response = fake_response_Classification_json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 2: Send only required fake data and check response
#--------------------------------------------------------
@responses.activate
def test_classify_required_response(self):
# Check response with required params
body = self.construct_required_body()
response = fake_response_Classification_json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 3: Send empty data and check response
#--------------------------------------------------------
@responses.activate
def test_classify_empty(self):
check_empty_required_params(self, fake_response_Classification_json)
check_missing_required_params(self)
assert len(responses.calls) == 0
#-----------
#- Helpers -
#-----------
def make_url(self, body):
endpoint = '/v1/classifiers/{0}/classify'.format(body['classifier_id'])
url = '{0}{1}'.format(base_url, endpoint)
return url
def add_mock_response(self, url, response):
responses.add(responses.POST,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
def call_service(self, body):
service = NaturalLanguageClassifierV1(
authenticator=NoAuthAuthenticator(),)
service.set_service_url(base_url)
output = service.classify(**body)
return output
def construct_full_body(self):
body = dict()
body['classifier_id'] = "string1"
body.update({
"text": "string1",
})
return body
def construct_required_body(self):
body = dict()
body['classifier_id'] = "string1"
body.update({
"text": "string1",
})
return body
#-----------------------------------------------------------------------------
# Test Class for classify_collection
#-----------------------------------------------------------------------------
class TestClassifyCollection():
#--------------------------------------------------------
# Test 1: Send fake data and check response
#--------------------------------------------------------
@responses.activate
def test_classify_collection_response(self):
body = self.construct_full_body()
response = fake_response_ClassificationCollection_json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 2: Send only required fake data and check response
#--------------------------------------------------------
@responses.activate
def test_classify_collection_required_response(self):
# Check response with required params
body = self.construct_required_body()
response = fake_response_ClassificationCollection_json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 3: Send empty data and check response
#--------------------------------------------------------
@responses.activate
def test_classify_collection_empty(self):
check_empty_required_params(
self, fake_response_ClassificationCollection_json)
check_missing_required_params(self)
assert len(responses.calls) == 0
#-----------
#- Helpers -
#-----------
def make_url(self, body):
endpoint = '/v1/classifiers/{0}/classify_collection'.format(
body['classifier_id'])
url = '{0}{1}'.format(base_url, endpoint)
return url
def add_mock_response(self, url, response):
responses.add(responses.POST,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
def call_service(self, body):
service = NaturalLanguageClassifierV1(
authenticator=NoAuthAuthenticator(),)
service.set_service_url(base_url)
output = service.classify_collection(**body)
return output
def construct_full_body(self):
body = dict()
body['classifier_id'] = "string1"
body.update({
"collection": [],
})
return body
def construct_required_body(self):
body = dict()
body['classifier_id'] = "string1"
body.update({
"collection": [],
})
return body
# endregion
##############################################################################
# End of Service: ClassifyText
##############################################################################
##############################################################################
# Start of Service: ManageClassifiers
##############################################################################
# region
#-----------------------------------------------------------------------------
# Test Class for create_classifier
#-----------------------------------------------------------------------------
class TestCreateClassifier():
#--------------------------------------------------------
# Test 1: Send fake data and check response
#--------------------------------------------------------
@responses.activate
def test_create_classifier_response(self):
body = self.construct_full_body()
response = fake_response_Classifier_json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 2: Send only required fake data and check response
#--------------------------------------------------------
@responses.activate
def test_create_classifier_required_response(self):
# Check response with required params
body = self.construct_required_body()
response = fake_response_Classifier_json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 3: Send empty data and check response
#--------------------------------------------------------
@responses.activate
def test_create_classifier_empty(self):
check_empty_required_params(self, fake_response_Classifier_json)
check_missing_required_params(self)
assert len(responses.calls) == 0
#-----------
#- Helpers -
#-----------
def make_url(self, body):
endpoint = '/v1/classifiers'
url = '{0}{1}'.format(base_url, endpoint)
return url
def add_mock_response(self, url, response):
responses.add(responses.POST,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
def call_service(self, body):
service = NaturalLanguageClassifierV1(
authenticator=NoAuthAuthenticator(),)
service.set_service_url(base_url)
output = service.create_classifier(**body)
return output
def construct_full_body(self):
body = dict()
body['training_metadata'] = tempfile.NamedTemporaryFile()
body['training_data'] = tempfile.NamedTemporaryFile()
return body
def construct_required_body(self):
body = dict()
body['training_metadata'] = tempfile.NamedTemporaryFile()
body['training_data'] = tempfile.NamedTemporaryFile()
return body
#-----------------------------------------------------------------------------
# Test Class for list_classifiers
#-----------------------------------------------------------------------------
class TestListClassifiers():
#--------------------------------------------------------
# Test 1: Send fake data and check response
#--------------------------------------------------------
@responses.activate
def test_list_classifiers_response(self):
body = self.construct_full_body()
response = fake_response_ClassifierList_json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 2: Send only required fake data and check response
#--------------------------------------------------------
@responses.activate
def test_list_classifiers_required_response(self):
# Check response with required params
body = self.construct_required_body()
response = fake_response_ClassifierList_json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 3: Send empty data and check response
#--------------------------------------------------------
@responses.activate
def test_list_classifiers_empty(self):
check_empty_response(self)
assert len(responses.calls) == 1
#-----------
#- Helpers -
#-----------
def make_url(self, body):
endpoint = '/v1/classifiers'
url = '{0}{1}'.format(base_url, endpoint)
return url
def add_mock_response(self, url, response):
responses.add(responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
def call_service(self, body):
service = NaturalLanguageClassifierV1(
authenticator=NoAuthAuthenticator(),)
service.set_service_url(base_url)
output = service.list_classifiers(**body)
return output
def construct_full_body(self):
body = dict()
return body
def construct_required_body(self):
body = dict()
return body
#-----------------------------------------------------------------------------
# Test Class for get_classifier
#-----------------------------------------------------------------------------
class TestGetClassifier():
#--------------------------------------------------------
# Test 1: Send fake data and check response
#--------------------------------------------------------
@responses.activate
def test_get_classifier_response(self):
body = self.construct_full_body()
response = fake_response_Classifier_json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 2: Send only required fake data and check response
#--------------------------------------------------------
@responses.activate
def test_get_classifier_required_response(self):
# Check response with required params
body = self.construct_required_body()
response = fake_response_Classifier_json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 3: Send empty data and check response
#--------------------------------------------------------
@responses.activate
def test_get_classifier_empty(self):
check_empty_required_params(self, fake_response_Classifier_json)
check_missing_required_params(self)
assert len(responses.calls) == 0
#-----------
#- Helpers -
#-----------
def make_url(self, body):
endpoint = '/v1/classifiers/{0}'.format(body['classifier_id'])
url = '{0}{1}'.format(base_url, endpoint)
return url
def add_mock_response(self, url, response):
responses.add(responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
def call_service(self, body):
service = NaturalLanguageClassifierV1(
authenticator=NoAuthAuthenticator(),)
service.set_service_url(base_url)
output = service.get_classifier(**body)
return output
def construct_full_body(self):
body = dict()
body['classifier_id'] = "string1"
return body
def construct_required_body(self):
body = dict()
body['classifier_id'] = "string1"
return body
#-----------------------------------------------------------------------------
# Test Class for delete_classifier
#-----------------------------------------------------------------------------
class TestDeleteClassifier():
#--------------------------------------------------------
# Test 1: Send fake data and check response
#--------------------------------------------------------
@responses.activate
def test_delete_classifier_response(self):
body = self.construct_full_body()
response = fake_response__json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 2: Send only required fake data and check response
#--------------------------------------------------------
@responses.activate
def test_delete_classifier_required_response(self):
# Check response with required params
body = self.construct_required_body()
response = fake_response__json
send_request(self, body, response)
assert len(responses.calls) == 1
#--------------------------------------------------------
# Test 3: Send empty data and check response
#--------------------------------------------------------
@responses.activate
def test_delete_classifier_empty(self):
check_empty_required_params(self, fake_response__json)
check_missing_required_params(self)
assert len(responses.calls) == 0
#-----------
#- Helpers -
#-----------
def make_url(self, body):
endpoint = '/v1/classifiers/{0}'.format(body['classifier_id'])
url = '{0}{1}'.format(base_url, endpoint)
return url
def add_mock_response(self, url, response):
responses.add(responses.DELETE,
url,
body=json.dumps(response),
status=200,
content_type='')
def call_service(self, body):
service = NaturalLanguageClassifierV1(
authenticator=NoAuthAuthenticator(),)
service.set_service_url(base_url)
output = service.delete_classifier(**body)
return output
def construct_full_body(self):
body = dict()
body['classifier_id'] = "string1"
return body
def construct_required_body(self):
body = dict()
body['classifier_id'] = "string1"
return body
# endregion
##############################################################################
# End of Service: ManageClassifiers
##############################################################################
def check_empty_required_params(obj, response):
"""Test function to assert that the operation will throw an error when given empty required data
Args:
obj: The generated test function
"""
body = obj.construct_full_body()
body = {k: None for k in body.keys()}
error = False
try:
send_request(obj, body, response)
except ValueError as e:
error = True
assert error
def check_missing_required_params(obj):
"""Test function to assert that the operation will throw an error when missing required data
Args:
obj: The generated test function
"""
body = obj.construct_full_body()
url = obj.make_url(body)
error = False
try:
send_request(obj, {}, {}, url=url)
except TypeError as e:
error = True
assert error
def check_empty_response(obj):
"""Test function to assert that the operation will return an empty response when given an empty request
Args:
obj: The generated test function
"""
body = obj.construct_full_body()
url = obj.make_url(body)
send_request(obj, {}, {}, url=url)
def send_request(obj, body, response, url=None):
"""Test function to create a request, send it, and assert its accuracy to the mock response
Args:
obj: The generated test function
body: Dict filled with fake data for calling the service
response_str: Mock response string
"""
if not url:
url = obj.make_url(body)
obj.add_mock_response(url, response)
output = obj.call_service(body)
assert responses.calls[0].request.url.startswith(url)
assert output.get_result() == response
####################
## Mock Responses ##
####################
fake_response__json = None
fake_response_Classification_json = """{"classifier_id": "fake_classifier_id", "url": "fake_url", "text": "fake_text", "top_class": "fake_top_class", "classes": []}"""
fake_response_ClassificationCollection_json = """{"classifier_id": "fake_classifier_id", "url": "fake_url", "collection": []}"""
fake_response_Classifier_json = """{"name": "fake_name", "url": "fake_url", "status": "fake_status", "classifier_id": "fake_classifier_id", "created": "2017-05-16T13:56:54.957Z", "status_description": "fake_status_description", "language": "fake_language"}"""
fake_response_ClassifierList_json = """{"classifiers": []}"""
fake_response_Classifier_json = """{"name": "fake_name", "url": "fake_url", "status": "fake_status", "classifier_id": "fake_classifier_id", "created": "2017-05-16T13:56:54.957Z", "status_description": "fake_status_description", "language": "fake_language"}"""
| 36.07156 | 259 | 0.521542 | 1,788 | 19,659 | 5.526846 | 0.116331 | 0.034001 | 0.021858 | 0.03643 | 0.815321 | 0.801862 | 0.781421 | 0.777879 | 0.771605 | 0.739425 | 0 | 0.009176 | 0.212778 | 19,659 | 544 | 260 | 36.137868 | 0.629362 | 0.294878 | 0 | 0.783871 | 0 | 0.009677 | 0.09953 | 0.023016 | 0 | 0 | 0 | 0 | 0.070968 | 1 | 0.167742 | false | 0 | 0.029032 | 0 | 0.293548 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
10adaff1f339754ebcf8632423413771ddf6741c | 12,755 | py | Python | mmaction/datasets/epickitchens_mmsada.py | ovshake/mmaction2 | 71e92e9d4c28190d485ba153aae5200bf71f70b1 | [
"Apache-2.0"
] | null | null | null | mmaction/datasets/epickitchens_mmsada.py | ovshake/mmaction2 | 71e92e9d4c28190d485ba153aae5200bf71f70b1 | [
"Apache-2.0"
] | null | null | null | mmaction/datasets/epickitchens_mmsada.py | ovshake/mmaction2 | 71e92e9d4c28190d485ba153aae5200bf71f70b1 | [
"Apache-2.0"
] | null | null | null | import copy
import os.path as osp
import mmcv
from .base import BaseDataset
from .builder import DATASETS
import numpy as np
import os.path as osp
from .pipelines import Compose
import pandas as pd
@DATASETS.register_module()
class EpicKitchensMMSADA(BaseDataset):
def __init__(self,
domain,
pipeline,
test_mode=False,
sample_by_class=False,
filename_tmpl='frame_{:010d}.jpg'):
self.split = 'train' if not test_mode else 'test'
self.test_mode = test_mode
self.metadata_paths = []
if not isinstance(domain, list):
domain = [domain]
for d in domain:
metadata_path = f"/data/abhishek/projects/MM-SADA_Domain_Adaptation_Splits/{d.upper()}_{self.split}.pkl"
self.metadata_paths.append(metadata_path)
if osp.exists('/local_datasets/EPIC_KITCHENS_UDA'):
self.datapath = '/local_datasets/EPIC_KITCHENS_UDA/frames_rgb_flow/rgb'
else:
self.datapath = '/data/dataset/EPIC_KITCHENS_UDA/frames_rgb_flow/rgb'
self.domain_to_participant_map = {"P08": "D1", "P01": "D2", "P22": "D3"}
super(EpicKitchensMMSADA, self).__init__(ann_file=None,
pipeline=pipeline,
test_mode=test_mode,
sample_by_class=sample_by_class)
self.filename_tmpl = filename_tmpl
def load_annotations(self):
video_infos = []
for metadata_path in self.metadata_paths:
df = pd.read_pickle(metadata_path)
for _, line in df.iterrows():
participant_id = line['participant_id']
video_id = line['video_id']
start_frame = int(line['start_frame'])
end_frame = int(line['stop_frame'])
label = line['verb_class']
frame_dir = f"{self.datapath}/{self.split}/{self.domain_to_participant_map[participant_id]}/{video_id}"
total_frames = end_frame - start_frame + 1
label = int(label)
video_infos.append(
dict(
frame_dir=frame_dir,
total_frames=total_frames,
label=label,
start_index=start_frame,
end_index=end_frame
)
)
return video_infos
def prepare_train_frames(self, idx):
results = copy.deepcopy(self.video_infos[idx])
results['filename_tmpl'] = self.filename_tmpl
results['modality'] = self.modality
return self.pipeline(results)
def prepare_test_frames(self, idx):
results = copy.deepcopy(self.video_infos[idx])
results['filename_tmpl'] = self.filename_tmpl
results['modality'] = self.modality
return self.pipeline(results)
@DATASETS.register_module()
class EpicKitchensSlowFastMMSADA(BaseDataset):
def __init__(self,
domain,
slow_pipeline,
fast_pipeline,
test_mode=False,
sample_by_class=False,
filename_tmpl='frame_{:010d}.jpg'):
self.split = 'train' if not test_mode else 'test'
self.test_mode = test_mode
self.metadata_paths = []
if not isinstance(domain, list):
domain = [domain]
for d in domain:
metadata_path = f"/data/abhishek/projects/MM-SADA_Domain_Adaptation_Splits/{d.upper()}_{self.split}.pkl"
self.metadata_paths.append(metadata_path)
if osp.exists('/local_datasets/EPIC_KITCHENS_UDA'):
self.datapath = '/local_datasets/EPIC_KITCHENS_UDA/frames_rgb_flow/rgb'
else:
self.datapath = '/data/dataset/EPIC_KITCHENS_UDA/frames_rgb_flow/rgb'
self.domain_to_participant_map = {"P08": "D1", "P01": "D2", "P22": "D3"}
super().__init__(ann_file=None, pipeline=slow_pipeline, test_mode=test_mode, sample_by_class=sample_by_class)
self.filename_tmpl = filename_tmpl
self.slow_pipeline = Compose(slow_pipeline)
self.fast_pipeline = Compose(fast_pipeline)
def load_annotations(self):
video_infos = []
for metadata_path in self.metadata_paths:
df = pd.read_pickle(metadata_path)
for _, line in df.iterrows():
participant_id = line['participant_id']
video_id = line['video_id']
start_frame = int(line['start_frame'])
end_frame = int(line['stop_frame'])
label = line['verb_class']
frame_dir = f"{self.datapath}/{self.split}/{self.domain_to_participant_map[participant_id]}/{video_id}"
total_frames = end_frame - start_frame + 1
label = int(label)
video_infos.append(
dict(
frame_dir=frame_dir,
total_frames=total_frames,
label=label,
start_index=start_frame,
end_index=end_frame
)
)
return video_infos
def prepare_train_frames(self, idx):
results = copy.deepcopy(self.video_infos[idx])
results['filename_tmpl'] = self.filename_tmpl
results['modality'] = self.modality
return self.slow_pipeline(results), self.fast_pipeline(results)
def prepare_test_frames(self, idx):
results = copy.deepcopy(self.video_infos[idx])
results['filename_tmpl'] = self.filename_tmpl
results['modality'] = self.modality
return self.slow_pipeline(results)
@DATASETS.register_module()
class EpicKitchensTemporalSpatialMMSADA(BaseDataset):
def __init__(self,
domain,
pathway_A,
pathway_B,
clip_len,
test_mode=False,
sample_by_class=False,
filename_tmpl='frame_{:010d}.jpg'):
self.split = 'train' if not test_mode else 'test'
self.test_mode = test_mode
self.metadata_paths = []
if not isinstance(domain, list):
domain = [domain]
for d in domain:
metadata_path = f"/data/abhishek/projects/MM-SADA_Domain_Adaptation_Splits/{d.upper()}_{self.split}.pkl"
self.metadata_paths.append(metadata_path)
if osp.exists('/local_datasets/EPIC_KITCHENS_UDA'):
self.datapath = '/local_datasets/EPIC_KITCHENS_UDA/frames_rgb_flow/rgb'
else:
self.datapath = '/data/dataset/EPIC_KITCHENS_UDA/frames_rgb_flow/rgb'
self.domain_to_participant_map = {"P08": "D1", "P01": "D2", "P22": "D3"}
super().__init__(ann_file=None, pipeline=pathway_A, test_mode=test_mode, sample_by_class=sample_by_class)
self.filename_tmpl = filename_tmpl
self.pathway_A = Compose(pathway_A)
self.pathway_B = Compose(pathway_B)
self.clip_len = clip_len
def load_annotations(self):
video_infos = []
for metadata_path in self.metadata_paths:
df = pd.read_pickle(metadata_path)
for _, line in df.iterrows():
participant_id = line['participant_id']
video_id = line['video_id']
start_frame = int(line['start_frame'])
end_frame = int(line['stop_frame'])
label = line['verb_class']
frame_dir = f"{self.datapath}/{self.split}/{self.domain_to_participant_map[participant_id]}/{video_id}"
total_frames = end_frame - start_frame + 1
label = int(label)
video_infos.append(
dict(
frame_dir=frame_dir,
total_frames=total_frames,
label=label,
start_index=start_frame,
end_index=end_frame
)
)
return video_infos
def prepare_train_frames(self, idx):
results = copy.deepcopy(self.video_infos[idx])
start_index = results['start_index']
end_index = results['end_index']
num_frames = self.clip_len
results['filename_tmpl'] = self.filename_tmpl
results['modality'] = self.modality
pathway_A_start_index = np.random.randint(start_index, max(end_index - num_frames, start_index + 1))
pathway_B_start_index = np.random.randint(start_index, max(end_index - num_frames, start_index + 1))
pathway_A_results = copy.deepcopy(results)
pathway_B_results = copy.deepcopy(results)
pathway_A_results['start_index'] = pathway_A_start_index
pathway_A_results['total_frames'] = self.clip_len
pathway_B_results['start_index'] = pathway_B_start_index
pathway_B_results['total_frames'] = self.clip_len
return self.pathway_A(pathway_A_results), self.pathway_B(pathway_B_results)
def prepare_test_frames(self, idx):
results = copy.deepcopy(self.video_infos[idx])
results['filename_tmpl'] = self.filename_tmpl
results['modality'] = self.modality
return self.pathway_A(results)
@DATASETS.register_module()
class EpicKitchensMultipleContrastiveSpaces(BaseDataset):
def __init__(self,
domain,
pipelines,
test_mode=False,
sample_by_class=False,
filename_tmpl='frame_{:010d}.jpg'):
self.split = 'train' if not test_mode else 'test'
self.test_mode = test_mode
self.metadata_paths = []
if not isinstance(domain, list):
domain = [domain]
for d in domain:
metadata_path = f"/data/abhishek/projects/MM-SADA_Domain_Adaptation_Splits/{d.upper()}_{self.split}.pkl"
self.metadata_paths.append(metadata_path)
if osp.exists('/local_datasets/EPIC_KITCHENS_UDA'):
self.datapath = '/local_datasets/EPIC_KITCHENS_UDA/frames_rgb_flow/rgb'
else:
self.datapath = '/data/dataset/EPIC_KITCHENS_UDA/frames_rgb_flow/rgb'
self.domain_to_participant_map = {"P08": "D1", "P01": "D2", "P22": "D3"}
msg = 'Atleast one pathway is necessary'
assert len(pipelines) > 0, msg
super().__init__(ann_file=None, pipeline=pipelines[0], test_mode=test_mode, sample_by_class=sample_by_class)
self.filename_tmpl = filename_tmpl
self.pipelines = []
for pipeline in pipelines:
self.pipelines.append(Compose(pipeline))
def load_annotations(self):
video_infos = []
for metadata_path in self.metadata_paths:
df = pd.read_pickle(metadata_path)
for _, line in df.iterrows():
participant_id = line['participant_id']
video_id = line['video_id']
start_frame = int(line['start_frame'])
end_frame = int(line['stop_frame'])
label = line['verb_class']
frame_dir = f"{self.datapath}/{self.split}/{self.domain_to_participant_map[participant_id]}/{video_id}"
total_frames = end_frame - start_frame + 1
label = int(label)
video_infos.append(
dict(
frame_dir=frame_dir,
total_frames=total_frames,
label=label,
start_index=start_frame,
end_index=end_frame
)
)
return video_infos
def prepare_train_frames(self, idx):
results = copy.deepcopy(self.video_infos[idx])
results['filename_tmpl'] = self.filename_tmpl
results['modality'] = self.modality
pathways = []
for pipeline in self.pipelines:
pathways.append(pipeline(results))
return pathways
def prepare_test_frames(self, idx):
results = copy.deepcopy(self.video_infos[idx])
results['filename_tmpl'] = self.filename_tmpl
results['modality'] = self.modality
pathways = []
for pipeline in self.pipelines:
pathways.append(pipeline(results))
return pathways | 40.110063 | 120 | 0.575931 | 1,393 | 12,755 | 4.95262 | 0.09476 | 0.048703 | 0.022612 | 0.018553 | 0.878098 | 0.839107 | 0.818814 | 0.818814 | 0.818814 | 0.818814 | 0 | 0.006561 | 0.330851 | 12,755 | 318 | 121 | 40.110063 | 0.801757 | 0 | 0 | 0.781955 | 0 | 0.015038 | 0.147538 | 0.097209 | 0 | 0 | 0 | 0 | 0.003759 | 1 | 0.06015 | false | 0 | 0.033835 | 0 | 0.154135 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
10beba0aaeb1aaafb81200ad4c1a2904347e6348 | 724 | py | Python | api/dc/views.py | klebed/esdc-ce | 2c9e4591f344247d345a83880ba86777bb794460 | [
"Apache-2.0"
] | 97 | 2016-11-15T14:44:23.000Z | 2022-03-13T18:09:15.000Z | api/dc/views.py | klebed/esdc-ce | 2c9e4591f344247d345a83880ba86777bb794460 | [
"Apache-2.0"
] | 334 | 2016-11-17T19:56:57.000Z | 2022-03-18T10:45:53.000Z | api/dc/views.py | klebed/esdc-ce | 2c9e4591f344247d345a83880ba86777bb794460 | [
"Apache-2.0"
] | 33 | 2017-01-02T16:04:13.000Z | 2022-02-07T19:20:24.000Z | # noinspection PyUnresolvedReferences
from api.dc.base.views import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.dc.node.views import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.dc.storage.views import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.dc.image.views import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.dc.network.views import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.dc.template.views import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.dc.iso.views import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.dc.domain.views import * # noqa: F401,F403
| 42.588235 | 54 | 0.790055 | 88 | 724 | 6.5 | 0.204545 | 0.475524 | 0.531469 | 0.573427 | 0.923077 | 0.807692 | 0.807692 | 0.807692 | 0.807692 | 0.807692 | 0 | 0.075472 | 0.121547 | 724 | 16 | 55 | 45.25 | 0.823899 | 0.573204 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 11 |
52b28798db9ea739dc93fe8a08ef87d9000f8676 | 21,071 | py | Python | grammars/gen/Legal_refListener.py | OpenLawsGR/judgments2AKN | 0c6217349cde36058d5599800e289fdf0d3eaf23 | [
"MIT"
] | 5 | 2019-11-28T17:02:59.000Z | 2021-02-05T17:39:49.000Z | grammars/gen/Legal_refListener.py | OpenLawsGR/judgments2AKN | 0c6217349cde36058d5599800e289fdf0d3eaf23 | [
"MIT"
] | null | null | null | grammars/gen/Legal_refListener.py | OpenLawsGR/judgments2AKN | 0c6217349cde36058d5599800e289fdf0d3eaf23 | [
"MIT"
] | null | null | null | # Generated from /home/plessas/EDBM34/grammars/Legal_ref.g4 by ANTLR 4.7.2
from antlr4 import *
# This class defines a complete listener for a parse tree produced by Legal_refParser.
class Legal_refListener(ParseTreeListener):
# Enter a parse tree produced by Legal_refParser#all_text.
def enterAll_text(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#all_text.
def exitAll_text(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#legal_text.
def enterLegal_text(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#legal_text.
def exitLegal_text(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#other_text.
def enterOther_text(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#other_text.
def exitOther_text(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#legal_reference.
def enterLegal_reference(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#legal_reference.
def exitLegal_reference(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#euLegislation.
def enterEuLegislation(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#euLegislation.
def exitEuLegislation(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#singleEULegislation.
def enterSingleEULegislation(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#singleEULegislation.
def exitSingleEULegislation(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#completeEULegislation.
def enterCompleteEULegislation(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#completeEULegislation.
def exitCompleteEULegislation(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#eu_regulation.
def enterEu_regulation(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#eu_regulation.
def exitEu_regulation(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#eu_directive.
def enterEu_directive(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#eu_directive.
def exitEu_directive(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#eu.
def enterEu(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#eu.
def exitEu(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#eok.
def enterEok(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#eok.
def exitEok(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#legalOpinion.
def enterLegalOpinion(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#legalOpinion.
def exitLegalOpinion(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#singleLegalOpinion.
def enterSingleLegalOpinion(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#singleLegalOpinion.
def exitSingleLegalOpinion(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#completeLegalOpinion.
def enterCompleteLegalOpinion(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#completeLegalOpinion.
def exitCompleteLegalOpinion(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#nsk.
def enterNsk(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#nsk.
def exitNsk(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#legislation.
def enterLegislation(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#legislation.
def exitLegislation(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#singleLegislation.
def enterSingleLegislation(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#singleLegislation.
def exitSingleLegislation(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#par_mult.
def enterPar_mult(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#par_mult.
def exitPar_mult(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#case_mult.
def enterCase_mult(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#case_mult.
def exitCase_mult(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#passage_mult.
def enterPassage_mult(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#passage_mult.
def exitPassage_mult(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#element_mult.
def enterElement_mult(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#element_mult.
def exitElement_mult(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#multipleLegislation.
def enterMultipleLegislation(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#multipleLegislation.
def exitMultipleLegislation(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#multipleCompleteLegislation_1.
def enterMultipleCompleteLegislation_1(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#multipleCompleteLegislation_1.
def exitMultipleCompleteLegislation_1(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#completeLegislation.
def enterCompleteLegislation(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#completeLegislation.
def exitCompleteLegislation(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#incompleteLegislation.
def enterIncompleteLegislation(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#incompleteLegislation.
def exitIncompleteLegislation(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitLegalElement.
def enterExplicitLegalElement(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitLegalElement.
def exitExplicitLegalElement(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitLegalElement.
def enterImplicitLegalElement(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitLegalElement.
def exitImplicitLegalElement(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitPoint.
def enterExplicitPoint(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitPoint.
def exitExplicitPoint(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitPart.
def enterExplicitPart(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitPart.
def exitExplicitPart(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitChapter.
def enterImplicitChapter(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitChapter.
def exitImplicitChapter(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitChapter.
def enterExplicitChapter(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitChapter.
def exitExplicitChapter(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitArthro.
def enterImplicitArthro(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitArthro.
def exitImplicitArthro(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitArthro.
def enterExplicitArthro(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitArthro.
def exitExplicitArthro(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitArthro_1.
def enterExplicitArthro_1(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitArthro_1.
def exitExplicitArthro_1(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitPar.
def enterImplicitPar(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitPar.
def exitImplicitPar(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitPar.
def enterExplicitPar(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitPar.
def exitExplicitPar(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitSubPar.
def enterImplicitSubPar(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitSubPar.
def exitImplicitSubPar(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitSubPar.
def enterExplicitSubPar(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitSubPar.
def exitExplicitSubPar(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitPeriptwsi.
def enterImplicitPeriptwsi(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitPeriptwsi.
def exitImplicitPeriptwsi(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitPeriptwsi.
def enterExplicitPeriptwsi(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitPeriptwsi.
def exitExplicitPeriptwsi(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitStoixeio.
def enterImplicitStoixeio(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitStoixeio.
def exitImplicitStoixeio(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitStoixeio.
def enterExplicitStoixeio(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitStoixeio.
def exitExplicitStoixeio(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitEdafio.
def enterImplicitEdafio(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitEdafio.
def exitImplicitEdafio(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitEdafio.
def enterExplicitEdafio(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitEdafio.
def exitExplicitEdafio(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitParartima.
def enterExplicitParartima(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitParartima.
def exitExplicitParartima(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitLegalType.
def enterImplicitLegalType(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitLegalType.
def exitImplicitLegalType(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitLegalType.
def enterExplicitLegalType(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitLegalType.
def exitExplicitLegalType(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitKwdikas.
def enterImplicitKwdikas(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitKwdikas.
def exitImplicitKwdikas(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitKwdikas.
def enterExplicitKwdikas(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitKwdikas.
def exitExplicitKwdikas(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#legislative_type.
def enterLegislative_type(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#legislative_type.
def exitLegislative_type(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#acts.
def enterActs(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#acts.
def exitActs(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#presidential_decree.
def enterPresidential_decree(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#presidential_decree.
def exitPresidential_decree(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#compulsory_law.
def enterCompulsory_law(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#compulsory_law.
def exitCompulsory_law(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#decree_law.
def enterDecree_law(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#decree_law.
def exitDecree_law(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#decree.
def enterDecree(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#decree.
def exitDecree(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#royal_decree.
def enterRoyal_decree(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#royal_decree.
def exitRoyal_decree(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#syntagma.
def enterSyntagma(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#syntagma.
def exitSyntagma(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#special.
def enterSpecial(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#special.
def exitSpecial(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#range_id.
def enterRange_id(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#range_id.
def exitRange_id(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#arthra.
def enterArthra(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#arthra.
def exitArthra(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#m1.
def enterM1(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#m1.
def exitM1(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#m2.
def enterM2(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#m2.
def exitM2(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#next_all.
def enterNext_all(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#next_all.
def exitNext_all(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#courtDecision.
def enterCourtDecision(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#courtDecision.
def exitCourtDecision(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#singleCourtDec.
def enterSingleCourtDec(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#singleCourtDec.
def exitSingleCourtDec(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#multipleCourtsDec.
def enterMultipleCourtsDec(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#multipleCourtsDec.
def exitMultipleCourtsDec(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#completeCourtDec.
def enterCompleteCourtDec(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#completeCourtDec.
def exitCompleteCourtDec(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#completeCourtMultipleDecisions.
def enterCompleteCourtMultipleDecisions(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#completeCourtMultipleDecisions.
def exitCompleteCourtMultipleDecisions(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#completeCourtSingleDecision.
def enterCompleteCourtSingleDecision(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#completeCourtSingleDecision.
def exitCompleteCourtSingleDecision(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#incompleteCourtDec.
def enterIncompleteCourtDec(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#incompleteCourtDec.
def exitIncompleteCourtDec(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#completeCourtDecAlt.
def enterCompleteCourtDecAlt(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#completeCourtDecAlt.
def exitCompleteCourtDecAlt(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#incompleteCourtDecAlt.
def enterIncompleteCourtDecAlt(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#incompleteCourtDecAlt.
def exitIncompleteCourtDecAlt(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#decision.
def enterDecision(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#decision.
def exitDecision(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#singleLegalElementId.
def enterSingleLegalElementId(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#singleLegalElementId.
def exitSingleLegalElementId(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#multipleLegalElementIds.
def enterMultipleLegalElementIds(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#multipleLegalElementIds.
def exitMultipleLegalElementIds(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#arthro_id.
def enterArthro_id(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#arthro_id.
def exitArthro_id(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#ids.
def enterIds(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#ids.
def exitIds(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#multiple_ids.
def enterMultiple_ids(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#multiple_ids.
def exitMultiple_ids(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#date_id.
def enterDate_id(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#date_id.
def exitDate_id(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#law_id.
def enterLaw_id(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#law_id.
def exitLaw_id(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#latin_id.
def enterLatin_id(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#latin_id.
def exitLatin_id(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#explicitCourt.
def enterExplicitCourt(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#explicitCourt.
def exitExplicitCourt(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#dikastirio.
def enterDikastirio(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#dikastirio.
def exitDikastirio(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#implicitCourt.
def enterImplicitCourt(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#implicitCourt.
def exitImplicitCourt(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#parartima.
def enterParartima(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#parartima.
def exitParartima(self, ctx):
pass
# Enter a parse tree produced by Legal_refParser#btrimeles.
def enterBtrimeles(self, ctx):
pass
# Exit a parse tree produced by Legal_refParser#btrimeles.
def exitBtrimeles(self, ctx):
pass
| 26.979513 | 86 | 0.699967 | 2,539 | 21,071 | 5.702245 | 0.112249 | 0.071695 | 0.119492 | 0.215085 | 0.788438 | 0.788438 | 0.788438 | 0.786089 | 0.785744 | 0.643735 | 0 | 0.00144 | 0.242181 | 21,071 | 780 | 87 | 27.014103 | 0.905248 | 0.504058 | 0 | 0.49711 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.49711 | false | 0.50289 | 0.00289 | 0 | 0.50289 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 10 |
52b51def6a9b2e8e00386816dbaaddf5b18ee4b4 | 101 | py | Python | augur/metrics/pull_request/__init__.py | Nayan-Das/augur | 857f4a4e7d688fd54356aa0f546834071fbabbf2 | [
"MIT"
] | 3 | 2019-10-31T19:07:48.000Z | 2019-11-20T23:14:15.000Z | augur/metrics/pull_request/__init__.py | Nayan-Das/augur | 857f4a4e7d688fd54356aa0f546834071fbabbf2 | [
"MIT"
] | 3 | 2019-12-03T21:21:17.000Z | 2019-12-05T15:26:22.000Z | augur/metrics/pull_request/__init__.py | Nayan-Das/augur | 857f4a4e7d688fd54356aa0f546834071fbabbf2 | [
"MIT"
] | 4 | 2019-11-05T20:22:12.000Z | 2019-12-12T18:08:30.000Z | from .pull_request import create_pull_request_metrics
from .routes import create_pull_request_routes | 33.666667 | 53 | 0.90099 | 15 | 101 | 5.6 | 0.466667 | 0.392857 | 0.380952 | 0.547619 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.079208 | 101 | 3 | 54 | 33.666667 | 0.903226 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
52dc1ef3c6e8918a6e24109fa3ee9563ea7178ca | 201 | py | Python | Hackerrank/Python/np-dot-and-cross.py | PROxZIMA/Competitive-Coding | ba6b365ea130b6fcaa15c5537b530ed363bab793 | [
"MIT"
] | 1 | 2021-01-10T13:29:21.000Z | 2021-01-10T13:29:21.000Z | Hackerrank/Python/np-dot-and-cross.py | PROxZIMA/Competitive-Coding | ba6b365ea130b6fcaa15c5537b530ed363bab793 | [
"MIT"
] | null | null | null | Hackerrank/Python/np-dot-and-cross.py | PROxZIMA/Competitive-Coding | ba6b365ea130b6fcaa15c5537b530ed363bab793 | [
"MIT"
] | null | null | null | import numpy
n = int(input())
a = numpy.array([list(map(int, input().split())) for _ in range(int(n))])
b = numpy.array([list(map(int, input().split())) for _ in range(int(n))])
print(numpy.dot(a, b))
| 33.5 | 73 | 0.631841 | 36 | 201 | 3.472222 | 0.444444 | 0.192 | 0.224 | 0.272 | 0.704 | 0.704 | 0.704 | 0.704 | 0.704 | 0.704 | 0 | 0 | 0.114428 | 201 | 5 | 74 | 40.2 | 0.702247 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.2 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
eaf7e1f6b7ed7b8bb7216210b6ffc211a1950edb | 8,300 | py | Python | source/tests/test_workspaces_helper.py | pharindoko/workspaces-cost-optimizer | 26cba72a08f855d804cb457f723afc55b14dda76 | [
"Apache-2.0"
] | null | null | null | source/tests/test_workspaces_helper.py | pharindoko/workspaces-cost-optimizer | 26cba72a08f855d804cb457f723afc55b14dda76 | [
"Apache-2.0"
] | null | null | null | source/tests/test_workspaces_helper.py | pharindoko/workspaces-cost-optimizer | 26cba72a08f855d804cb457f723afc55b14dda76 | [
"Apache-2.0"
] | null | null | null | import sys
sys.path.append('engine')
from ecs.workspaces_helper import WorkspacesHelper
from botocore.stub import Stubber
def test_process_workspace_standard(mocker):
workspace = {
'WorkspaceId': 'ws-68h123hty',
'DirectoryId': 'd-901230bb84',
'UserName': 'test_user',
'IpAddress': '111.16.1.233',
'State': 'AVAILABLE',
'BundleId': 'wsb-cl123qzj1',
'SubnetId': 'subnet-05d421387eaa7cf86',
'ComputerName': 'A-APPW123KP4NP',
'WorkspaceProperties': {
'RunningMode': 'ALWAYS_ON',
'RootVolumeSizeGib': 80,
'UserVolumeSizeGib': 50,
'ComputeTypeName': 'STANDARD'
},
'ModificationStates': []
}
settings = {
'region': 'us-east-1',
'hourlyLimits': 10,
'testEndOfMonth': 'yes',
'isDryRun': True,
'startTime': 1,
'endTime': 2
}
workspace_helper = WorkspacesHelper(settings)
mocker.patch.object(workspace_helper.metricsHelper, 'get_billable_hours')
workspace_helper.metricsHelper.get_billable_hours.return_value = 444
mocker.patch.object(workspace_helper, 'check_for_skip_tag')
workspace_helper.check_for_skip_tag.return_value = True
result = workspace_helper.process_workspace(workspace)
assert result['bundleType'] == 'STANDARD'
def test_process_workspace_performance(mocker):
workspace = {
'WorkspaceId': 'ws-68h123hty',
'DirectoryId': 'd-901230bb84',
'UserName': 'test_user',
'IpAddress': '111.16.1.233',
'State': 'AVAILABLE',
'BundleId': 'wsb-cl123qzj1',
'SubnetId': 'subnet-05d421387eaa7cf86',
'ComputerName': 'A-APPW123KP4NP',
'WorkspaceProperties': {
'RunningMode': 'ALWAYS_ON',
'RootVolumeSizeGib': 80,
'UserVolumeSizeGib': 50,
'ComputeTypeName': 'PERFORMANCE'
},
'ModificationStates': []
}
settings = {
'region': 'us-east-1',
'hourlyLimits': 10,
'testEndOfMonth': 'yes',
'isDryRun': True,
'startTime': 1,
'endTime': 2
}
workspace_helper = WorkspacesHelper(settings)
mocker.patch.object(workspace_helper.metricsHelper, 'get_billable_hours')
workspace_helper.metricsHelper.get_billable_hours.return_value = 100
mocker.patch.object(workspace_helper, 'check_for_skip_tag')
workspace_helper.check_for_skip_tag.return_value = False
mocker.patch.object(workspace_helper, 'get_hourly_threshold')
workspace_helper.get_hourly_threshold.return_value = 5
mocker.patch.object(workspace_helper, 'compare_usage_metrics')
workspace_helper.compare_usage_metrics.return_value = {
'resultCode': '-N-',
'newMode': 'ALWAYS_ON'
}
result = workspace_helper.process_workspace(workspace)
assert result['bundleType'] == 'PERFORMANCE'
assert result['billableTime'] == 100
def test_modify_workspace_properties_Always_On(mocker):
settings = {
'region': 'us-east-1',
'hourlyLimits': 10,
'testEndOfMonth': 'yes',
'isDryRun': False,
'startTime': 1,
'endTime': 2
}
workspace_helper = WorkspacesHelper(settings)
client_stubber = Stubber(workspace_helper.client)
response = {}
expected_params = {
'WorkspaceId': '123qwer',
'WorkspaceProperties': {'RunningMode': 'ALWAYS_ON'}
}
client_stubber.add_response('modify_workspace_properties', response, expected_params)
client_stubber.activate()
workspace_id = '123qwer'
new_running_mode = 'ALWAYS_ON'
result = workspace_helper.modify_workspace_properties(workspace_id, new_running_mode)
assert result == '-M-'
def test_modify_workspace_properties_Auto_stop(mocker):
settings = {
'region': 'us-east-1',
'hourlyLimits': 10,
'testEndOfMonth': 'yes',
'isDryRun': False,
'startTime': 1,
'endTime': 2
}
workspace_helper = WorkspacesHelper(settings)
client_stubber = Stubber(workspace_helper.client)
response = {}
expected_params = {
'WorkspaceId': '123qwer',
'WorkspaceProperties': {'RunningMode': 'AUTO_STOP'}
}
client_stubber.add_response('modify_workspace_properties', response, expected_params)
client_stubber.activate()
workspace_id = '123qwer'
new_running_mode = 'AUTO_STOP'
result = workspace_helper.modify_workspace_properties(workspace_id, new_running_mode)
assert result == '-H-'
client_stubber.deactivate()
def test_modify_workspace_properties_Exception(mocker):
settings = {
'region': 'us-east-1',
'hourlyLimits': 10,
'testEndOfMonth': 'yes',
'isDryRun': False,
'startTime': 1,
'endTime': 2
}
workspace_helper = WorkspacesHelper(settings)
client_stubber = Stubber(workspace_helper.client)
response = {}
expected_params = {
'WorkspaceProperties': {'RunningMode': 'AUTO_STOP'}
}
client_stubber.add_response('modify_workspace_properties', response, expected_params)
client_stubber.activate()
workspace_id = '123qwer'
new_running_mode = 'AUTO_STOP'
result = workspace_helper.modify_workspace_properties(workspace_id, new_running_mode)
assert result == '-E-'
def test_modify_workspace_properties_Auto_stop_Dry_Run_True(mocker):
# validate that the stubber call is not made when Dry Run is set to True
# send an invalid request using stubber and validate that the does not method throws exception
settings = {
'region': 'us-east-1',
'hourlyLimits': 10,
'testEndOfMonth': 'yes',
'isDryRun': True,
'startTime': 1,
'endTime': 2
}
workspace_helper = WorkspacesHelper(settings)
client_stubber = Stubber(workspace_helper.client)
response = {}
expected_params = {
'WorkspaceProperties': {'RunningMode': 'AUTO_STOP'}
}
client_stubber.add_response('modify_workspace_properties', response, expected_params)
client_stubber.activate()
workspace_id = '123qwer'
new_running_mode = 'AUTO_STOP'
# check if the method throws exception and validate that the stubber was not called
result = workspace_helper.modify_workspace_properties(workspace_id, new_running_mode)
assert result == '-H-'
client_stubber.deactivate()
def test_modify_workspace_properties_Always_On_Dry_Run_True(mocker):
# validate that the stubber call is not maded when Dry Run is set to True
# send an invalid request using stubber and validate that the does not method throws exception
settings = {
'region': 'us-east-1',
'hourlyLimits': 10,
'testEndOfMonth': 'yes',
'isDryRun': True,
'startTime': 1,
'endTime': 2
}
workspace_helper = WorkspacesHelper(settings)
client_stubber = Stubber(workspace_helper.client)
response = {}
expected_params = {
'WorkspaceProperties': {'RunningMode': 'ALWAYS_ON'}
}
client_stubber.add_response('modify_workspace_properties', response, expected_params)
client_stubber.activate()
workspace_id = '123qwer'
new_running_mode = 'ALWAYS_ON'
# check if the method throws exception and validate that the stubber was not called
result = workspace_helper.modify_workspace_properties(workspace_id, new_running_mode)
assert result == '-M-'
client_stubber.deactivate()
def test_check_for_skip_tag_true(mocker):
tags = [{'Key': 'skip_convert', 'Value': 'True'}]
settings = {
'region': 'us-east-1',
'hourlyLimits': 10,
'testEndOfMonth': 'yes',
'isDryRun': 'yes',
'startTime': 1,
'endTime': 2
}
workspace_helper = WorkspacesHelper(settings)
result = workspace_helper.check_for_skip_tag(tags)
assert result is True
def test_check_for_skip_tag_false(mocker):
tags = [{'Key': 'nothing', 'Value': 'True'}]
settings = {
'region': 'us-east-1',
'hourlyLimits': 10,
'testEndOfMonth': 'yes',
'isDryRun': 'yes',
'startTime': 1,
'endTime': 2
}
workspace_helper = WorkspacesHelper(settings)
result = workspace_helper.check_for_skip_tag(tags)
assert result is False
| 31.679389 | 98 | 0.660602 | 843 | 8,300 | 6.23962 | 0.170819 | 0.09981 | 0.071293 | 0.034221 | 0.929848 | 0.892015 | 0.88365 | 0.859316 | 0.859316 | 0.83346 | 0 | 0.025833 | 0.225783 | 8,300 | 261 | 99 | 31.800766 | 0.792717 | 0.059277 | 0 | 0.757009 | 0 | 0 | 0.244328 | 0.02615 | 0 | 0 | 0 | 0 | 0.046729 | 1 | 0.042056 | false | 0 | 0.014019 | 0 | 0.056075 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d81cee2faf2e0c45b791df8886e5d17536069819 | 44,646 | py | Python | azure-mgmt-notificationhubs/azure/mgmt/notificationhubs/operations/notification_hubs_operations.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2018-07-23T08:59:24.000Z | 2018-07-23T08:59:24.000Z | azure-mgmt-notificationhubs/azure/mgmt/notificationhubs/operations/notification_hubs_operations.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2018-11-29T14:46:42.000Z | 2018-11-29T14:46:42.000Z | azure-mgmt-notificationhubs/azure/mgmt/notificationhubs/operations/notification_hubs_operations.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2018-08-28T14:36:47.000Z | 2018-08-28T14:36:47.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from .. import models
class NotificationHubsOperations(object):
"""NotificationHubsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Client Api Version. Constant value: "2017-04-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-04-01"
self.config = config
def check_notification_hub_availability(
self, resource_group_name, namespace_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Checks the availability of the given notificationHub in a namespace.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param parameters: The notificationHub name.
:type parameters:
~azure.mgmt.notificationhubs.models.CheckAvailabilityParameters
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: CheckAvailabilityResult or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.notificationhubs.models.CheckAvailabilityResult or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.check_notification_hub_availability.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'CheckAvailabilityParameters')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CheckAvailabilityResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
check_notification_hub_availability.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/checkNotificationHubAvailability'}
def create_or_update(
self, resource_group_name, namespace_name, notification_hub_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates/Update a NotificationHub in a namespace.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param notification_hub_name: The notification hub name.
:type notification_hub_name: str
:param parameters: Parameters supplied to the create/update a
NotificationHub Resource.
:type parameters:
~azure.mgmt.notificationhubs.models.NotificationHubCreateOrUpdateParameters
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: NotificationHubResource or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.notificationhubs.models.NotificationHubResource or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'notificationHubName': self._serialize.url("notification_hub_name", notification_hub_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'NotificationHubCreateOrUpdateParameters')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NotificationHubResource', response)
if response.status_code == 201:
deserialized = self._deserialize('NotificationHubResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs/{notificationHubName}'}
def delete(
self, resource_group_name, namespace_name, notification_hub_name, custom_headers=None, raw=False, **operation_config):
"""Deletes a notification hub associated with a namespace.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param notification_hub_name: The notification hub name.
:type notification_hub_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'notificationHubName': self._serialize.url("notification_hub_name", notification_hub_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs/{notificationHubName}'}
def get(
self, resource_group_name, namespace_name, notification_hub_name, custom_headers=None, raw=False, **operation_config):
"""Lists the notification hubs associated with a namespace.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param notification_hub_name: The notification hub name.
:type notification_hub_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: NotificationHubResource or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.notificationhubs.models.NotificationHubResource or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'notificationHubName': self._serialize.url("notification_hub_name", notification_hub_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NotificationHubResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs/{notificationHubName}'}
def create_or_update_authorization_rule(
self, resource_group_name, namespace_name, notification_hub_name, authorization_rule_name, properties, custom_headers=None, raw=False, **operation_config):
"""Creates/Updates an authorization rule for a NotificationHub.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param notification_hub_name: The notification hub name.
:type notification_hub_name: str
:param authorization_rule_name: Authorization Rule Name.
:type authorization_rule_name: str
:param properties: Properties of the Namespace AuthorizationRules.
:type properties:
~azure.mgmt.notificationhubs.models.SharedAccessAuthorizationRuleProperties
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SharedAccessAuthorizationRuleResource or ClientRawResponse if
raw=true
:rtype:
~azure.mgmt.notificationhubs.models.SharedAccessAuthorizationRuleResource
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.SharedAccessAuthorizationRuleCreateOrUpdateParameters(properties=properties)
# Construct URL
url = self.create_or_update_authorization_rule.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'notificationHubName': self._serialize.url("notification_hub_name", notification_hub_name, 'str'),
'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'SharedAccessAuthorizationRuleCreateOrUpdateParameters')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SharedAccessAuthorizationRuleResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_or_update_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs/{notificationHubName}/AuthorizationRules/{authorizationRuleName}'}
def delete_authorization_rule(
self, resource_group_name, namespace_name, notification_hub_name, authorization_rule_name, custom_headers=None, raw=False, **operation_config):
"""Deletes a notificationHub authorization rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param notification_hub_name: The notification hub name.
:type notification_hub_name: str
:param authorization_rule_name: Authorization Rule Name.
:type authorization_rule_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.delete_authorization_rule.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'notificationHubName': self._serialize.url("notification_hub_name", notification_hub_name, 'str'),
'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs/{notificationHubName}/AuthorizationRules/{authorizationRuleName}'}
def get_authorization_rule(
self, resource_group_name, namespace_name, notification_hub_name, authorization_rule_name, custom_headers=None, raw=False, **operation_config):
"""Gets an authorization rule for a NotificationHub by name.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name
:type namespace_name: str
:param notification_hub_name: The notification hub name.
:type notification_hub_name: str
:param authorization_rule_name: authorization rule name.
:type authorization_rule_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SharedAccessAuthorizationRuleResource or ClientRawResponse if
raw=true
:rtype:
~azure.mgmt.notificationhubs.models.SharedAccessAuthorizationRuleResource
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_authorization_rule.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'notificationHubName': self._serialize.url("notification_hub_name", notification_hub_name, 'str'),
'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SharedAccessAuthorizationRuleResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs/{notificationHubName}/AuthorizationRules/{authorizationRuleName}'}
def list(
self, resource_group_name, namespace_name, custom_headers=None, raw=False, **operation_config):
"""Lists the notification hubs associated with a namespace.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of NotificationHubResource
:rtype:
~azure.mgmt.notificationhubs.models.NotificationHubResourcePaged[~azure.mgmt.notificationhubs.models.NotificationHubResource]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.NotificationHubResourcePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.NotificationHubResourcePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs'}
def list_authorization_rules(
self, resource_group_name, namespace_name, notification_hub_name, custom_headers=None, raw=False, **operation_config):
"""Gets the authorization rules for a NotificationHub.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name
:type namespace_name: str
:param notification_hub_name: The notification hub name.
:type notification_hub_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of
SharedAccessAuthorizationRuleResource
:rtype:
~azure.mgmt.notificationhubs.models.SharedAccessAuthorizationRuleResourcePaged[~azure.mgmt.notificationhubs.models.SharedAccessAuthorizationRuleResource]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_authorization_rules.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'notificationHubName': self._serialize.url("notification_hub_name", notification_hub_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.SharedAccessAuthorizationRuleResourcePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.SharedAccessAuthorizationRuleResourcePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_authorization_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs/{notificationHubName}/AuthorizationRules'}
def list_keys(
self, resource_group_name, namespace_name, notification_hub_name, authorization_rule_name, custom_headers=None, raw=False, **operation_config):
"""Gets the Primary and Secondary ConnectionStrings to the NotificationHub
.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param notification_hub_name: The notification hub name.
:type notification_hub_name: str
:param authorization_rule_name: The connection string of the
NotificationHub for the specified authorizationRule.
:type authorization_rule_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ResourceListKeys or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.notificationhubs.models.ResourceListKeys or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.list_keys.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'notificationHubName': self._serialize.url("notification_hub_name", notification_hub_name, 'str'),
'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ResourceListKeys', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs/{notificationHubName}/AuthorizationRules/{authorizationRuleName}/listKeys'}
def regenerate_keys(
self, resource_group_name, namespace_name, notification_hub_name, authorization_rule_name, policy_key=None, custom_headers=None, raw=False, **operation_config):
"""Regenerates the Primary/Secondary Keys to the NotificationHub
Authorization Rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param notification_hub_name: The notification hub name.
:type notification_hub_name: str
:param authorization_rule_name: The connection string of the
NotificationHub for the specified authorizationRule.
:type authorization_rule_name: str
:param policy_key: Name of the key that has to be regenerated for the
Namespace/Notification Hub Authorization Rule. The value can be
Primary Key/Secondary Key.
:type policy_key: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ResourceListKeys or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.notificationhubs.models.ResourceListKeys or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.PolicykeyResource(policy_key=policy_key)
# Construct URL
url = self.regenerate_keys.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'notificationHubName': self._serialize.url("notification_hub_name", notification_hub_name, 'str'),
'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'PolicykeyResource')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ResourceListKeys', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
regenerate_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs/{notificationHubName}/AuthorizationRules/{authorizationRuleName}/regenerateKeys'}
def get_pns_credentials(
self, resource_group_name, namespace_name, notification_hub_name, custom_headers=None, raw=False, **operation_config):
"""Lists the PNS Credentials associated with a notification hub .
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param notification_hub_name: The notification hub name.
:type notification_hub_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PnsCredentialsResource or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.notificationhubs.models.PnsCredentialsResource or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_pns_credentials.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str'),
'notificationHubName': self._serialize.url("notification_hub_name", notification_hub_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PnsCredentialsResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_pns_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NotificationHubs/namespaces/{namespaceName}/notificationHubs/{notificationHubName}/pnsCredentials'}
| 51.024 | 274 | 0.688819 | 4,622 | 44,646 | 6.432713 | 0.048031 | 0.034979 | 0.034306 | 0.02906 | 0.924257 | 0.914974 | 0.905859 | 0.89809 | 0.896273 | 0.890993 | 0 | 0.00315 | 0.217735 | 44,646 | 874 | 275 | 51.08238 | 0.84816 | 0.279488 | 0 | 0.817768 | 0 | 0.022779 | 0.217917 | 0.129653 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034169 | false | 0 | 0.009112 | 0 | 0.102506 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d833185a9e28b4758a9678bc37366c747663a01c | 343 | py | Python | tests/internal/instance_type/test_instance_type_g_auto.py | frolovv/aws.ec2.compare | 582805823492f833d65c0441c4a14dce697c12aa | [
"Apache-2.0"
] | null | null | null | tests/internal/instance_type/test_instance_type_g_auto.py | frolovv/aws.ec2.compare | 582805823492f833d65c0441c4a14dce697c12aa | [
"Apache-2.0"
] | null | null | null | tests/internal/instance_type/test_instance_type_g_auto.py | frolovv/aws.ec2.compare | 582805823492f833d65c0441c4a14dce697c12aa | [
"Apache-2.0"
] | 1 | 2021-12-15T11:58:22.000Z | 2021-12-15T11:58:22.000Z |
# Testing module instance_type.g
import pytest
import ec2_compare.internal.instance_type.g
def test_get_internal_data_instance_type_g_get_instances_list():
assert len(ec2_compare.internal.instance_type.g.get_instances_list()) > 0
def test_get_internal_data_instance_type_g_get():
assert len(ec2_compare.internal.instance_type.g.get) > 0
| 34.3 | 75 | 0.845481 | 56 | 343 | 4.732143 | 0.339286 | 0.271698 | 0.29434 | 0.241509 | 0.826415 | 0.826415 | 0.611321 | 0.611321 | 0.611321 | 0 | 0 | 0.015773 | 0.075802 | 343 | 9 | 76 | 38.111111 | 0.820189 | 0.087464 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 9 |
dc26034344ad20d38d5d71da546681209c93260e | 5,497 | py | Python | request_model.py | sph116/zhongxin_search | 4cebc974fa5606c1701eae3f338949209c61a31b | [
"MIT"
] | 18 | 2019-05-22T01:25:13.000Z | 2022-02-27T13:37:42.000Z | request_model.py | sph116/zhongxin_search | 4cebc974fa5606c1701eae3f338949209c61a31b | [
"MIT"
] | null | null | null | request_model.py | sph116/zhongxin_search | 4cebc974fa5606c1701eae3f338949209c61a31b | [
"MIT"
] | 5 | 2019-08-07T09:54:51.000Z | 2021-02-19T10:47:34.000Z | import requests
import random
from ip_pool import get_ip
from lxml import etree
# from fake_useragent import UserAgent
class download():
def __init__(self):
# ua = UserAgent() #UA实例化 链接网络不稳定
self.UA = []
self.UA.append('Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36')
self.UA.append('Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0')
self.UA.append('Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 1.1.4322)')
def get(self, url, timeout, proxy=None, num_retries=0, ip_times=6):
headers = {'User-Agent': random.choice(self.UA)} #随机ua
if proxy == None: #当代理为空时,不使用代理获取response
try:
rep = requests.get(url=url, headers=headers, timeout=timeout)
if rep.status_code == 200:
return(rep)
else:
raise NameError
except Exception as e: #如过上面的代码执行报错则执行下面代码
# print(e)
if num_retries > 0: #num_retries是我们限定的重试次数
return self.get(url, timeout, num_retries=num_retries-1)
else:
"""获取代理"""
a = get_ip()
ip1 = "http://" + a
proxy = {'http': ip1}
return self.get(url, timeout, proxy)
else:
try:
# print(proxy)
# print('get raw正在启用代理: ', proxy)
rep = requests.get(url=url, headers=headers, timeout=timeout, proxies=proxy)
if rep.status_code == 200:
return rep
else:
raise NameError
# else:
# raise NameError
except Exception as e:
print(e)
if ip_times > 0:
# print('代理爬取失败,重试第', ip_times, '次')
a = get_ip()
ip2 = "http://" + a
proxy = {'http': ip2}
return self.get(url, timeout, proxy, ip_times=ip_times - 1)
else:
return requests.get(url=url, headers=headers, timeout=timeout)
class download1():
def __init__(self):
# ua = UserAgent() #UA实例化 链接网络不稳定
self.UA = []
self.UA.append('Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36')
self.UA.append('Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0')
self.UA.append('Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 1.1.4322)')
def get(self, url, data, timeout, proxy=None, num_retries=0, ip_times=10):
headers = {'User-Agent': random.choice(self.UA)} #随机ua
if proxy == None: #当代理为空时,不使用代理获取response
try:
rep = requests.post(url=url, data=data, headers=headers, timeout=timeout)
if rep.status_code == 200:
rep.encoding = rep.apparent_encoding
sel = etree.HTML(rep.text)
if sel.xpath('//div[@id="news_list"]/table//tr[1]/td[2]/ul/li[1]/a/@href') == []:
raise NameError
else:
return rep
else:
raise NameError
except Exception as e: #如过上面的代码执行报错则执行下面代码
# print(e)
if num_retries > 0: #num_retries是我们限定的重试次数
return self.get(url, timeout, num_retries=num_retries-1)
else:
"""获取代理"""
a = get_ip()
ip1 = "http://" + a
proxy = {'http': ip1}
return self.get(url, timeout, data, proxy)
else:
try:
# print(proxy)
print('get raw正在启用代理: ', proxy)
rep = requests.post(url=url, data=data, headers=headers, timeout=timeout, proxies=proxy)
if rep.status_code == 200:
rep.encoding = rep.apparent_encoding # 解决编码问题
sel = etree.HTML(rep.text)
if sel.xpath('//div[@id="news_list"]/table//tr[1]/td[2]/ul/li[1]/a/@href') == []:
raise NameError
else:
return rep
else:
raise NameError
# else:
# raise NameError
except Exception as e:
# print(e)
if ip_times > 0:
# print('代理爬取失败,重试第', ip_times, '次')
a = get_ip()
ip2 = "http://" + a
proxy = {'http': ip2}
return self.get(url, timeout, data, proxy, ip_times=ip_times - 1)
else:
return requests.post(url=url, headers=headers, data=data, timeout=timeout)
request = download()
request1 = download1()
| 36.646667 | 160 | 0.463889 | 599 | 5,497 | 4.18197 | 0.208681 | 0.028743 | 0.028743 | 0.045509 | 0.913772 | 0.913772 | 0.913772 | 0.910579 | 0.868663 | 0.81996 | 0 | 0.062718 | 0.425687 | 5,497 | 149 | 161 | 36.892617 | 0.730757 | 0.084046 | 0 | 0.755319 | 0 | 0.085106 | 0.170903 | 0.023972 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042553 | false | 0 | 0.042553 | 0 | 0.223404 | 0.021277 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
dcac318911a42db69baff0c16ba8a9d5aec3fd30 | 96,002 | py | Python | tensorflow/python/ipu/tests/pipelining_test.py | pierricklee/tensorflow | c6a61d7b19a9242b06f40120ab42f0fdb0b5c462 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/ipu/tests/pipelining_test.py | pierricklee/tensorflow | c6a61d7b19a9242b06f40120ab42f0fdb0b5c462 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/ipu/tests/pipelining_test.py | pierricklee/tensorflow | c6a61d7b19a9242b06f40120ab42f0fdb0b5c462 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from absl.testing import parameterized
from functools import partial
from tensorflow.python.ipu.config import IPUConfig
import numpy as np
import pva
from tensorflow.keras import layers
from tensorflow.compiler.plugin.poplar.tests import test_utils as tu
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager.backprop import GradientTape
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.keras.optimizer_v2 import gradient_descent as gradient_descent_v2
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import custom_gradient
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import momentum
from tensorflow.python.training import optimizer as optimizer_lib
from tensorflow.python.ipu import embedding_ops
from tensorflow.python.ipu import ipu_compiler
from tensorflow.python.ipu import ipu_infeed_queue
from tensorflow.python.ipu import ipu_outfeed_queue
from tensorflow.python.ipu import normalization_ops
from tensorflow.python.ipu import pipelining_ops
from tensorflow.python.ipu import utils
from tensorflow.python.ipu.optimizers import map_gradient_optimizer
from tensorflow.python.ipu.tests import pipelining_test_util
from tensorflow.compat.v1 import disable_v2_behavior
disable_v2_behavior()
PIPELINE_COMPARE_TEST_CASES = [{
'testcase_name': 'V1',
'opt_type': gradient_descent.GradientDescentOptimizer,
'opt_args': (0.01,)
}, {
'testcase_name': 'V2',
'opt_type': gradient_descent_v2.SGD,
'opt_args': (0.01,)
}]
class PipeliningTest(test_util.TensorFlowTestCase, parameterized.TestCase):
@test_util.deprecated_graph_mode_only
def testNoComputeGradsArgsWithV2(self):
with self.assertRaisesRegex(
ValueError,
"OptimizerFunctionOutput.compute_gradients_args may not be used "
"with OptimizerV2 instances."):
opt = gradient_descent_v2.SGD()
loss = math_ops.square(1)
compute_args = (1, 2, 3)
_ = pipelining_ops.OptimizerFunctionOutput(
opt, loss, compute_gradients_args=compute_args)
@test_util.deprecated_graph_mode_only
def testNoComputeGradsKwargsWithV2(self):
with self.assertRaisesRegex(
ValueError,
"OptimizerFunctionOutput.compute_gradients_kwargs may not be used "
"with OptimizerV2 instances."):
opt = gradient_descent_v2.SGD()
loss = math_ops.square(1)
compute_kwargs = {'a': 1, 'b': 2}
_ = pipelining_ops.OptimizerFunctionOutput(
opt, loss, compute_gradients_kwargs=compute_kwargs)
@test_util.deprecated_graph_mode_only
def testInvalidTypeForTape(self):
with self.assertRaisesRegex(
TypeError, "OptimizerFunctionOutput.tape must be a GradientTape."):
opt = gradient_descent_v2.SGD()
loss = math_ops.square(1)
_ = pipelining_ops.OptimizerFunctionOutput(opt,
loss,
tape=['a', 'b', 'c'])
@test_util.deprecated_graph_mode_only
def testNoGradientTapeWithV1(self):
with self.assertRaisesRegex(
ValueError,
"OptimizerFunctionOutput.tape may only be used with OptimizerV2."):
opt = gradient_descent.GradientDescentOptimizer(1)
with GradientTape() as tape:
loss = math_ops.square(1)
_ = pipelining_ops.OptimizerFunctionOutput(opt, loss, tape=tape)
@test_util.deprecated_graph_mode_only
def testNoVariablesWithV1(self):
with self.assertRaisesRegex(
ValueError,
"OptimizerFunctionOutput.variables may only be used with OptimizerV2."
):
opt = gradient_descent.GradientDescentOptimizer(1)
loss = math_ops.square(1)
_ = pipelining_ops.OptimizerFunctionOutput(opt,
loss,
variables=[1, 2, 3])
@test_util.deprecated_graph_mode_only
def testNoTapeWithVariables(self):
with self.assertRaisesRegex(
ValueError, "OptimizerFunctionOutput.tape may not be used when "
"OptimizerFunctionOutput.variables is nonempty."):
opt = gradient_descent_v2.SGD(1)
with GradientTape() as tape:
loss = math_ops.square(1)
_ = pipelining_ops.OptimizerFunctionOutput(opt,
loss,
variables=[1, 2, 3],
tape=tape)
@test_util.deprecated_graph_mode_only
def testNoVariablesWithTape(self):
with self.assertRaisesRegex(
ValueError, "OptimizerFunctionOutput.variables must be empty when "
"OptimizerFunctionOutput.tape is used."):
opt = gradient_descent_v2.SGD(1)
with GradientTape() as tape:
loss = math_ops.square(1)
f = pipelining_ops.OptimizerFunctionOutput(opt, loss, tape=tape)
f.variables = [1, 2, 3]
@test_util.deprecated_graph_mode_only
def testPipelineNoOutfeedInference(self):
def stage1(x):
with variable_scope.variable_scope("vs", use_resource=True):
y = x + 1
return y
def stage2(x):
loss = math_ops.reduce_sum(x)
return loss
def my_net(x):
return pipelining_ops.pipeline([stage1, stage2], 10, inputs=[x])
with ops.device('cpu'):
x = array_ops.placeholder(np.float32, shape=[1, 4, 4, 2])
with ops.device("/device:IPU:0"):
with self.assertRaisesRegex(
ValueError, 'The last computational stage has tensor outputs'):
ipu_compiler.compile(my_net, inputs=[x])
@test_util.deprecated_graph_mode_only
def testPipelineNoOutfeedWithOutputsTraining(self):
def stage1(x):
with variable_scope.variable_scope("vs", use_resource=True):
y = x + 1
return y
def stage2(x):
y = layers.Conv2D(2,
1,
use_bias=True,
bias_initializer=init_ops.ones_initializer(),
kernel_initializer=init_ops.ones_initializer())(x)
loss = math_ops.reduce_sum(y)
return loss
def optimizer_function(loss):
opt = gradient_descent.GradientDescentOptimizer(0.01)
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
def my_net(x):
return pipelining_ops.pipeline(
[stage1, stage2],
10,
inputs=[x],
optimizer_function=optimizer_function,
pipeline_schedule=pipelining_ops.PipelineSchedule.Grouped)
with ops.device('cpu'):
x = array_ops.placeholder(np.float32, shape=[1, 4, 4, 2])
with ops.device("/device:IPU:0"):
with self.assertRaisesRegex(ValueError,
'The last computational stage has tensor'):
ipu_compiler.compile(my_net, inputs=[x])
@test_util.deprecated_graph_mode_only
def testPipelineIterationsNotMultiple(self):
dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
a = value
b = (value + 10.) / 2.0
return {"a": a, "b": b}
dataset = dataset.map(dataset_parser)
infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed1")
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed1")
def stage1(c, **kwargs):
with variable_scope.variable_scope("vs", use_resource=True):
y = layers.Conv2D(2,
1,
use_bias=True,
kernel_initializer=init_ops.ones_initializer(),
name='conv1')(kwargs["a"])
return y + kwargs["b"], c
def stage2(x, c):
return math_ops.reduce_sum(x) + c
def stage3(x):
return x
def my_net(c):
return pipelining_ops.pipeline(
[stage1, stage2, stage3],
10,
inputs=[c],
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
with ops.device('cpu'):
c = array_ops.placeholder(np.float32, shape=[])
with tu.ipu_session() as sess:
with ops.device("/device:IPU:0"):
r = ipu_compiler.compile(my_net, inputs=[c])
cfg = IPUConfig()
cfg.auto_select_ipus = 4
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
sess.run(variables.global_variables_initializer())
sess.run(infeed_queue.initializer)
with self.assertRaisesRegex(
errors.FailedPreconditionError,
'The pipeline depth of the pipeline must be a multiple of 3'):
sess.run(r, {c: 10.01})
@test_util.deprecated_graph_mode_only
def testPipelineInvalidDeviceMapping(self):
dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
a = value
b = (value + 10.) / 2.0
return {"a": a, "b": b}
dataset = dataset.map(dataset_parser)
infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed3")
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed3")
def stage1(c, **kwargs):
with variable_scope.variable_scope("vs", use_resource=True):
y = layers.Conv2D(2,
1,
use_bias=True,
kernel_initializer=init_ops.ones_initializer(),
name='conv1')(kwargs["a"])
return y + kwargs["b"], c
def stage2(x, c):
return math_ops.reduce_sum(x) + c
def stage3(x):
return x
with ops.device('cpu'):
c = array_ops.placeholder(np.float32, shape=[])
# Wrong type:
with self.assertRaisesRegex(
TypeError, 'device_mapping argument needs to be a list or a tuple'):
pipelining_ops.pipeline(
[stage1, stage2, stage3],
3,
inputs=[c],
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
device_mapping=1,
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
# Too many values:
with self.assertRaisesRegex(ValueError,
'Each stage must be mapped to an IPU'):
pipelining_ops.pipeline(
[stage1, stage2, stage3],
3,
inputs=[c],
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
device_mapping=list(range(4)),
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
# Not enough values:
with self.assertRaisesRegex(ValueError,
'Each stage must be mapped to an IPU'):
pipelining_ops.pipeline(
[stage1, stage2, stage3],
3,
inputs=[c],
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
device_mapping=tuple(range(1)),
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
@test_util.deprecated_graph_mode_only
def testPipelineWithDeviceMapping(self):
dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
a = value
b = (value + 10.) / 2.0
return {"a": a, "b": b}
dataset = dataset.map(dataset_parser)
infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed4")
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed4")
device_mapping = [2, 0, 1]
def stage1(c, **kwargs):
with variable_scope.variable_scope("vs", use_resource=True):
y = layers.Conv2D(2,
1,
use_bias=True,
kernel_initializer=init_ops.ones_initializer(),
name='conv1')(kwargs["a"])
return y + kwargs["b"], c
def stage2(x, c):
return math_ops.reduce_sum(x) + c
def stage3(x):
return x
def my_net(c):
return pipelining_ops.pipeline(
[stage1, stage2, stage3],
12,
inputs=[c],
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
device_mapping=device_mapping,
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
with ops.device('cpu'):
c = array_ops.placeholder(np.float32, shape=[])
with tu.ipu_session() as sess:
with ops.device("/device:IPU:0"):
r = ipu_compiler.compile(my_net, inputs=[c])
cfg = IPUConfig()
cfg.auto_select_ipus = 4
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg._profiling.enable_ipu_events = True # pylint: disable=protected-access
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
outfeed_op = outfeed_queue.dequeue()
report_json = tu.ReportJSON(self, sess)
report_json.reset()
sess.run(variables.global_variables_initializer())
sess.run(infeed_queue.initializer)
sess.run(r, {c: 10.01})
losses_pipeline = sess.run(outfeed_op)
self.assertAllClose(losses_pipeline, [
410.01, 730.01, 650.01, 570.01, 890.01, 410.01, 730.01, 650.01,
570.01, 890.01, 410.01, 730.01
])
report_json.parse_log()
report_json.assert_pipeline_stages_on_expected_ipu(
device_mapping, cfg.ipu_model.tiles_per_ipu)
@test_util.deprecated_graph_mode_only
def testPipelineWithDeviceMappingSameIpu(self):
dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
a = value
b = (value + 10.) / 2.0
return {"a": a, "b": b}
dataset = dataset.map(dataset_parser)
infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed5")
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed5")
device_mapping = [2, 2, 2]
def stage1(c, **kwargs):
with variable_scope.variable_scope("vs", use_resource=True):
y = layers.Conv2D(2,
1,
use_bias=True,
kernel_initializer=init_ops.ones_initializer(),
name='conv1')(kwargs["a"])
return y + kwargs["b"], c
def stage2(x, c):
return math_ops.reduce_sum(x) + c
def stage3(x):
return x
def my_net(c):
return pipelining_ops.pipeline(
[stage1, stage2, stage3],
12,
inputs=[c],
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
device_mapping=device_mapping,
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
with ops.device('cpu'):
c = array_ops.placeholder(np.float32, shape=[])
with tu.ipu_session() as sess:
with ops.device("/device:IPU:0"):
r = ipu_compiler.compile(my_net, inputs=[c])
cfg = IPUConfig()
cfg.auto_select_ipus = 4
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg._profiling.enable_ipu_events = True # pylint: disable=protected-access
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
outfeed_op = outfeed_queue.dequeue()
report_json = tu.ReportJSON(self, sess)
report_json.reset()
sess.run(variables.global_variables_initializer())
sess.run(infeed_queue.initializer)
sess.run(r, {c: 10.01})
losses_pipeline = sess.run(outfeed_op)
self.assertAllClose(losses_pipeline, [
410.01, 730.01, 650.01, 570.01, 890.01, 410.01, 730.01, 650.01,
570.01, 890.01, 410.01, 730.01
])
report_json.parse_log()
report_json.assert_pipeline_stages_on_expected_ipu(
device_mapping, cfg.ipu_model.tiles_per_ipu)
@test_util.deprecated_graph_mode_only
def testPipelineWithInfeedsKwargs(self):
dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
a = value
b = (value + 10.) / 2.0
return {"a": a, "b": b}
dataset = dataset.map(dataset_parser)
infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed6")
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed6")
def stage1(c, **kwargs):
with variable_scope.variable_scope("vs", use_resource=True):
y = layers.Conv2D(2,
1,
use_bias=True,
kernel_initializer=init_ops.ones_initializer(),
name='conv1')(kwargs["a"])
return y + kwargs["b"], c
def stage2(x, c):
return math_ops.reduce_sum(x) + c
def stage3(x):
return x
def my_net(c):
return pipelining_ops.pipeline(
[stage1, stage2, stage3],
12,
inputs=[c],
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
with ops.device('cpu'):
c = array_ops.placeholder(np.float32, shape=[])
with tu.ipu_session() as sess:
with ops.device("/device:IPU:0"):
r = ipu_compiler.compile(my_net, inputs=[c])
cfg = IPUConfig()
cfg.auto_select_ipus = 4
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg._profiling.enable_ipu_events = True # pylint: disable=protected-access
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
outfeed_op = outfeed_queue.dequeue()
report_json = tu.ReportJSON(self, sess)
report_json.reset()
sess.run(variables.global_variables_initializer())
sess.run(infeed_queue.initializer)
report_json.parse_log()
sess.run(r, {c: 10.01})
losses_pipeline = sess.run(outfeed_op)
self.assertAllClose(losses_pipeline, [
410.01, 730.01, 650.01, 570.01, 890.01, 410.01, 730.01, 650.01,
570.01, 890.01, 410.01, 730.01
])
report_json.parse_log()
report_json.assert_pipeline_stages_on_expected_ipu(
(0, 1, 3), cfg.ipu_model.tiles_per_ipu)
@test_util.deprecated_graph_mode_only
def testIllegalCapture(self):
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed8")
with ops.device('cpu'):
y = array_ops.placeholder(np.float32, shape=[])
def stage1(x):
return x * y
def stage2(x):
return x
def model_pipeline(x):
return pipelining_ops.pipeline(
[stage1, stage2],
10,
inputs=[x],
outfeed_queue=outfeed_queue,
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
with ops.device('cpu'):
x = array_ops.placeholder(np.float32, shape=[1, 4, 4, 2])
y = array_ops.placeholder(np.float32, shape=[])
with ops.device("/device:IPU:0"):
with self.assertRaisesRegex(ValueError, 'Trying to capture the tensor'):
ipu_compiler.compile(model_pipeline, inputs=[x])
@test_util.deprecated_graph_mode_only
def testPipelineOnlyOneStage(self):
def stage1(x):
return x
def my_net(x):
return pipelining_ops.pipeline(
[stage1],
10,
inputs=[x],
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
with ops.device('cpu'):
x = array_ops.placeholder(np.float32, shape=[1, 4, 4, 2])
with ops.device("/device:IPU:0"):
with self.assertRaisesRegex(ValueError,
'Pipeline requires at least two'):
ipu_compiler.compile(my_net, inputs=[x])
@test_util.deprecated_graph_mode_only
def testDuplicateInputsOutputs(self):
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed9")
def stage1(x, y):
return x, y, y, x
# The above should be optimised to a single copy for each duplicate output.
def stage2(x1, y1, y2, x2):
return x1, y1, y2, x2
# Same for this stage
def stage3(_x1, _y1, y2, x2):
return x2, y2
def model_pipeline(x, y):
return pipelining_ops.pipeline(
[stage1, stage2, stage3],
12,
inputs=[x, y],
outfeed_queue=outfeed_queue,
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
with ops.device('cpu'):
x = array_ops.placeholder(np.float32, shape=[1, 4, 4, 2])
y = array_ops.placeholder(np.float32, shape=[1, 2])
with ops.device("/device:IPU:0"):
compiled_model_pipeline = ipu_compiler.compile(model_pipeline,
inputs=[x, y])
cfg = IPUConfig()
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
#TODO(T10784) test how many IPU copies are here once we insert IPU copies.
outfeed_op = outfeed_queue.dequeue()
with tu.ipu_session() as sess:
sess.run(compiled_model_pipeline, {
x: np.ones(x.shape),
y: np.ones(y.shape)
})
output = sess.run(outfeed_op)
for i in range(12):
self.assertAllClose(output[0][i], np.ones(x.shape))
self.assertAllClose(output[1][i], np.ones(y.shape))
@test_util.deprecated_graph_mode_only
def testPipelineWithStagesWithConstants(self):
dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
a = value
b = (value + 10.) / 2.0
idx = value[0][0][0][0]
return {"a": a, "b": b, "idx": idx}
dataset = dataset.map(dataset_parser)
infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed10")
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed10")
def stage1(c, **kwargs):
y = layers.Conv2D(2,
1,
use_bias=True,
kernel_initializer=init_ops.ones_initializer(),
name='conv1')(kwargs["a"])
y = normalization_ops.group_norm(y)
return y + kwargs["b"], c, kwargs["idx"]
def stage2(x, c, idx):
return x, c, idx
def stage3(x, c, idx):
return layers.Dense(
2,
kernel_initializer=init_ops.ones_initializer(),
bias_initializer=init_ops.ones_initializer())(x), c, idx
def stage4(x, c, idx):
return math_ops.reduce_sum(
layers.Dense(
2,
kernel_initializer=init_ops.ones_initializer(),
bias_initializer=init_ops.ones_initializer())(x)) + c, idx
def optimizer_function(loss, _):
def func(grad, _):
return clip_ops.clip_by_value(grad, -1., 1.)
opt = map_gradient_optimizer.MapGradientOptimizer(
gradient_descent.GradientDescentOptimizer(0.01), func)
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
def my_net(c):
return pipelining_ops.pipeline(
[stage1, stage2, stage3, stage4],
12,
inputs=[c],
optimizer_function=optimizer_function,
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved)
with ops.device('cpu'):
c = array_ops.placeholder(np.float32, shape=[])
with tu.ipu_session() as sess:
with ops.device("/device:IPU:0"):
r = ipu_compiler.compile(my_net, inputs=[c])
cfg = IPUConfig()
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
tu.move_variable_initialization_to_cpu()
outfeed_op = outfeed_queue.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(infeed_queue.initializer)
# Run the pipeline twice.
sess.run(r, {c: 10.01})
sess.run(r, {c: 10.01})
losses_pipeline = sess.run(outfeed_op)
# The values have been verified and compared against running the same
# graph but sharded with gradient accumulation for 12 mini batches.
self.assertAllClose(losses_pipeline[0], [
1546.01, 1802.01, 1738.01, 1674.01, 1930.01, 1546.01, 1802.01,
1738.01, 1674.01, 1930.01, 1546.01, 1802.01, 1331.1415, 1281.5806,
1479.8259, 1182.457, 1380.7043, 1331.1415, 1281.5806, 1479.8259,
1182.457, 1380.7043, 1331.1415, 1281.5806
])
self.assertAllClose(losses_pipeline[1], [
0, 2, 4, 1, 3, 0, 2, 4, 1, 3, 0, 2, 4, 1, 3, 0, 2, 4, 1, 3, 0, 2, 4,
1
])
@test_util.deprecated_graph_mode_only
def testPipelineWithStagesNoVariables(self):
dataset = tu.create_single_increasing_dataset(5, shape=[1])
infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed11")
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed11")
def stage1(features):
partial = features * features
return partial
def stage2(partial):
prediction = partial + partial
return prediction
def stage3(partial):
return partial
def model():
with variable_scope.variable_scope("vs", use_resource=True):
pipeline_op = pipelining_ops.pipeline(
computational_stages=[stage1, stage2, stage3],
gradient_accumulation_count=6,
repeat_count=1,
inputs=[],
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
name="Pipeline")
return pipeline_op
with tu.ipu_session() as sess:
with ops.device("/device:IPU:0"):
r = ipu_compiler.compile(model, inputs=[])
cfg = IPUConfig()
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
tu.move_variable_initialization_to_cpu()
outfeed_op = outfeed_queue.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(infeed_queue.initializer)
# Run the pipeline.
sess.run(r)
results = sess.run(outfeed_op)
self.assertAllClose(results, [[0.], [2.], [8.], [18.], [32.], [0.]])
@parameterized.named_parameters(*PIPELINE_COMPARE_TEST_CASES)
@test_util.deprecated_graph_mode_only
def testPipelineCompare1(self, opt_type, opt_args):
def dataset_fn():
dataset = tu.create_single_increasing_dataset(7, shape=[4, 4, 2])
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
img = value / 7
label = value[0][0][0][0]
return img, label
return dataset.map(dataset_parser)
gradient_accumulation_count = 20
repeat_count = 2
def optimizer_fn():
return opt_type(*opt_args)
def stage1(c, img, label):
with variable_scope.variable_scope("stage1", use_resource=True):
y = layers.Conv2D(
2,
1,
use_bias=True,
kernel_initializer=init_ops.constant_initializer(0.5),
bias_initializer=init_ops.constant_initializer(0.5),
name='conv1')(img)
return y, c, label
def stage2(x, c, label):
with variable_scope.variable_scope("stage2", use_resource=True):
return x * 20, c, label
def stage3(x, c, label):
with variable_scope.variable_scope("stage3", use_resource=True):
return layers.Dense(
2,
kernel_initializer=init_ops.constant_initializer(0.5),
bias_initializer=init_ops.constant_initializer(0.5))(x), c, label
def stage4(x, c, label):
with variable_scope.variable_scope("stage4", use_resource=True):
return math_ops.reduce_sum(
layers.Dense(2,
kernel_initializer=init_ops.constant_initializer(0.5),
bias_initializer=init_ops.constant_initializer(0.5))
(x)) + c + label
def inputs_fn():
with ops.device('cpu'):
return [array_ops.placeholder(np.float32, shape=[])]
pipelining_test_util.PipelineTester.compare_pipeline_to_cpu(
[stage1, stage2, stage3, stage4],
inputs_fn, [10.01],
repeat_count,
gradient_accumulation_count,
dataset_fn,
optimizer_fn,
self,
15500,
schedule=pipelining_ops.PipelineSchedule.Interleaved)
@parameterized.named_parameters(*PIPELINE_COMPARE_TEST_CASES)
@test_util.deprecated_graph_mode_only
def testPipelineCompare2(self, opt_type, opt_args):
# Resnet like network.
def dataset_fn():
dataset = tu.create_single_increasing_dataset(100, shape=[4])
dataset = dataset.batch(batch_size=32, drop_remainder=True)
dataset = dataset.batch(batch_size=32, drop_remainder=True)
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
img = value
label = math_ops.reduce_mean(img, axis=[1, 2, 3])
return img, math_ops.cast(label, np.int32)
return dataset.map(dataset_parser)
gradient_accumulation_count = 18
repeat_count = 2
def optimizer_fn():
return opt_type(*opt_args)
def fixed_padding(inputs, kernel_size):
pad_total = kernel_size - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
padded_inputs = array_ops.pad(
inputs, [[0, 0], [pad_beg, pad_end], [pad_beg, pad_end], [0, 0]])
return padded_inputs
def block(name, first_stride, out_filters, count, x):
for i in range(count):
shape_in = x.shape
stride = first_stride if (i == 0) else 1
if stride > 1:
x = fixed_padding(x, 3)
sc = x
with variable_scope.variable_scope(name + "/" + str(i) + "/1"):
x = conv(x, 3, stride, out_filters)
x = nn.relu(x)
with variable_scope.variable_scope(name + "/" + str(i) + "/2"):
x = conv(x, 3, 1, out_filters)
# shortcut
if stride != 1:
sc = array_ops.strided_slice(sc, [0, 0, 0, 0],
sc.shape,
strides=[1, stride, stride, 1])
pad = int(x.shape[3] - shape_in[3])
if pad != 0:
sc = array_ops.pad(sc, paddings=[[0, 0], [0, 0], [0, 0], [0, pad]])
x = nn.relu(x + sc)
return x
def fc(x, num_units_out):
return layers.Dense(
num_units_out,
kernel_initializer=init_ops.constant_initializer(0.1),
bias_initializer=init_ops.constant_initializer(0.0))(x)
def max_pool(x, ksize=3, stride=2):
return layers.MaxPooling2D(ksize, stride, padding='SAME')(x)
def conv(x, ksize, stride, filters_out):
return layers.Conv2D(
filters_out,
ksize,
stride,
'SAME',
kernel_initializer=init_ops.constant_initializer(0.1),
bias_initializer=init_ops.constant_initializer(0.0))(x)
def stage1(img, label):
with variable_scope.variable_scope("stage1", use_resource=True):
x = conv(img, 7, 2, 16)
x = nn.relu(x)
x = max_pool(x, ksize=3, stride=2)
return x, label
def stage2(x, label):
with variable_scope.variable_scope("stage2", use_resource=True):
x = block("b", 2, 64, 1, x)
return x, label
def stage3(x, label):
with variable_scope.variable_scope("stage3", use_resource=True):
x = math_ops.reduce_mean(x, axis=[1, 2])
x = fc(x, 100)
loss = math_ops.reduce_mean(
nn.sparse_softmax_cross_entropy_with_logits(logits=x,
labels=label))
return loss
pipelining_test_util.PipelineTester.compare_pipeline_to_sharding(
[stage1, stage2, stage3],
lambda: [], [],
repeat_count,
gradient_accumulation_count,
dataset_fn,
optimizer_fn,
self,
38555,
schedule=pipelining_ops.PipelineSchedule.Interleaved)
@parameterized.named_parameters(*PIPELINE_COMPARE_TEST_CASES)
@test_util.deprecated_graph_mode_only
def testPipelineCompare3(self, opt_type, opt_args):
if utils.running_on_ipu_model():
self.skipTest("Replicated top level graphs are not supported on the "
"IPU_MODEL target")
def dataset_fn():
dataset = tu.create_single_increasing_dataset(10, shape=[4])
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
label = math_ops.reduce_mean(value, axis=[1])
return math_ops.cast(value,
np.int32), math_ops.cast(label / 10, np.int32)
return dataset.map(dataset_parser)
gradient_accumulation_count = 20
repeat_count = 2
def optimizer_fn():
return opt_type(*opt_args)
def stage1(idx, label):
with variable_scope.variable_scope("stage1", use_resource=True):
embedding = variable_scope.get_variable(
"c",
shape=[10, 1216],
dtype=np.float32,
initializer=init_ops.constant_initializer(10.01),
trainable=True)
x = embedding_ops.embedding_lookup(embedding, idx)
return x, label
def stage2(x, label):
with variable_scope.variable_scope("stage2", use_resource=True):
return x, label
def stage3(x, label):
with variable_scope.variable_scope("stage3", use_resource=True):
return x, label
def stage4(x, label):
with variable_scope.variable_scope("stage4", use_resource=True):
logits = math_ops.reduce_sum(x, axis=[-1])
loss = math_ops.reduce_mean(
nn.sparse_softmax_cross_entropy_with_logits(logits=logits,
labels=label))
return loss
pipelining_test_util.PipelineTester.compare_pipeline_to_cpu(
[stage1, stage2, stage3, stage4],
lambda: [], [],
repeat_count,
gradient_accumulation_count,
dataset_fn,
optimizer_fn,
self,
12600,
schedule=pipelining_ops.PipelineSchedule.Interleaved)
@parameterized.named_parameters(*PIPELINE_COMPARE_TEST_CASES)
@test_util.deprecated_graph_mode_only
def testPipelineCompareSharedWeights(self, opt_type, opt_args):
def dataset_fn():
dataset = tu.create_single_increasing_dataset(7, shape=[4, 4])
def dataset_parser(value):
img = value
label = value[0][0] % 4
return img, math_ops.cast(label, np.int32)
dataset = dataset.map(dataset_parser)
return dataset.batch(batch_size=2, drop_remainder=True)
gradient_accumulation_count = 20
repeat_count = 2
def optimizer_fn():
return opt_type(*opt_args)
def stage1(x, label):
with variable_scope.variable_scope("vs", use_resource=True):
weight = variable_scope.get_variable(
"w0",
shape=[4, 4],
dtype=np.float32,
initializer=init_ops.ones_initializer())
x = math_ops.matmul(x, weight)
return x, label
def stage2(x, label):
with variable_scope.variable_scope("vs", use_resource=True):
weight = variable_scope.get_variable(
"w1",
shape=[4, 4],
dtype=np.float32,
initializer=init_ops.ones_initializer())
x = math_ops.matmul(x, weight)
return x, label
def stage3(x, label):
with variable_scope.variable_scope("vs", use_resource=True):
weight = variable_scope.get_variable(
"w2",
shape=[4, 4],
dtype=np.float32,
initializer=init_ops.ones_initializer())
x = math_ops.matmul(x, weight)
return x, label
def stage4(x, label):
with variable_scope.variable_scope("vs", use_resource=True):
weight = variable_scope.get_variable(
"w3",
shape=[4, 4],
dtype=np.float32,
initializer=init_ops.ones_initializer())
x = math_ops.matmul(x, weight)
return x, label
def stage5(x, label):
# Ruse the weight here.
with variable_scope.variable_scope("vs", use_resource=True, reuse=True):
weight = variable_scope.get_variable(
"w0",
shape=[4, 4],
dtype=np.float32,
initializer=init_ops.ones_initializer())
x = math_ops.matmul(x, weight)
logits = math_ops.reduce_mean(x, axis=[1])
loss = math_ops.reduce_mean(
nn.sparse_softmax_cross_entropy_with_logits(logits=logits,
labels=label))
return loss
def inputs_fn():
with ops.device('cpu'):
return []
with self.assertRaisesRegex(NotImplementedError,
"The pipelining schedule"):
pipelining_test_util.PipelineTester.compare_pipeline_to_cpu(
[stage1, stage2, stage3, stage4, stage5],
inputs_fn, [10.01],
repeat_count,
gradient_accumulation_count,
dataset_fn,
optimizer_fn,
self,
21458,
schedule=pipelining_ops.PipelineSchedule.Interleaved,
device_mapping=[0, 1, 2, 3, 0])
@test_util.deprecated_graph_mode_only
def testStageOptionsNotEnough(self):
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed8")
with ops.device('cpu'):
y = array_ops.placeholder(np.float32, shape=[])
def stage1(x):
return x * y
def stage2(x):
return x
def model_pipeline(x):
return pipelining_ops.pipeline(
[stage1, stage2],
10,
inputs=[x],
outfeed_queue=outfeed_queue,
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved,
forward_propagation_stages_poplar_options=[
pipelining_ops.PipelineStageOptions()
])
with ops.device('cpu'):
x = array_ops.placeholder(np.float32, shape=[1, 4, 4, 2])
y = array_ops.placeholder(np.float32, shape=[])
with ops.device("/device:IPU:0"):
with self.assertRaisesRegex(
ValueError,
'forward_propagation_stages_poplar_options must be a list or a tuple'
):
ipu_compiler.compile(model_pipeline, inputs=[x])
@test_util.deprecated_graph_mode_only
def testStageOptionsWUWrongType(self):
dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
a = value
b = (value + 10.) / 2.0
idx = value[0][0][0][0]
return {"a": a, "b": b, "idx": idx}
dataset = dataset.map(dataset_parser)
infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed10")
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed10")
def stage1(c, **kwargs):
y = layers.Conv2D(2,
1,
use_bias=True,
kernel_initializer=init_ops.ones_initializer(),
name='conv1')(kwargs["a"])
y = normalization_ops.group_norm(y)
return y + kwargs["b"], c, kwargs["idx"]
def stage2(x, c, idx):
return x, c, idx
def stage3(x, c, idx):
return layers.Dense(
2,
kernel_initializer=init_ops.ones_initializer(),
bias_initializer=init_ops.ones_initializer())(x), c, idx
def stage4(x, c, idx):
return math_ops.reduce_sum(
layers.Dense(
2,
kernel_initializer=init_ops.ones_initializer(),
bias_initializer=init_ops.ones_initializer())(x)) + c, idx
def optimizer_function(loss, _):
def func(grad, _):
return clip_ops.clip_by_value(grad, -1., 1.)
opt = map_gradient_optimizer.MapGradientOptimizer(
gradient_descent.GradientDescentOptimizer(0.01), func)
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
def my_net(c):
return pipelining_ops.pipeline(
[stage1, stage2, stage3, stage4],
12,
inputs=[c],
optimizer_function=optimizer_function,
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
pipeline_schedule=pipelining_ops.PipelineSchedule.Interleaved,
weight_update_poplar_options={"dead": "beaf"})
with ops.device('cpu'):
c = array_ops.placeholder(np.float32, shape=[])
with ops.device("/device:IPU:0"):
with self.assertRaisesRegex(
TypeError,
'weight_update_poplar_options to be of type PipelineStageOptions'):
ipu_compiler.compile(my_net, inputs=[c])
@test_util.deprecated_graph_mode_only
def testOutfeedLossRequiresOutfeedAndOptimizerFunction(self):
def identity(x):
return x
def optimizer_function(loss):
opt = gradient_descent.GradientDescentOptimizer(0.01)
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
with ops.device("/device:IPU:0"):
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed11")
with self.assertRaisesRegex(ValueError,
"An optimizer_function must be provided"):
pipelining_ops.pipeline([identity, identity, identity, identity],
gradient_accumulation_count=4,
inputs=[1.0],
outfeed_queue=outfeed_queue,
outfeed_loss=True)
with self.assertRaisesRegex(ValueError,
"An outfeed_queue must be provided"):
pipelining_ops.pipeline([identity, identity, identity, identity],
gradient_accumulation_count=4,
inputs=[1.0],
optimizer_function=optimizer_function,
outfeed_loss=True)
@test_util.deprecated_graph_mode_only
def testOutfeedLoss(self):
with tu.ipu_session() as sess:
def stage1(x):
with variable_scope.variable_scope("stage1", use_resource=True):
w = variable_scope.get_variable(name="w", initializer=1.0)
return w * x
def identity(x):
return x
def optimizer_function(x):
opt = gradient_descent.GradientDescentOptimizer(0.01)
loss = x + 1.0
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed12")
def my_net(x):
return pipelining_ops.pipeline([stage1, identity, identity, identity],
gradient_accumulation_count=8,
inputs=[x],
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function,
outfeed_loss=True)
with ops.device("/device:IPU:0"):
pipeline = ipu_compiler.compile(my_net, inputs=[0.0])
cfg = IPUConfig()
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
outfed = outfeed_queue.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(pipeline)
self.assertAllEqual(np.ones(8), sess.run(outfed))
@test_util.deprecated_graph_mode_only
def testOutfeedMaskRequiresOutfeedAndOptimizerFunction(self):
def identity(x):
return x
def optimizer_function(loss):
opt = gradient_descent.GradientDescentOptimizer(0.01)
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
with ops.device("/device:IPU:0"):
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue()
with self.assertRaisesRegex(ValueError,
"An optimizer_function must be provided"):
pipelining_ops.pipeline([identity, identity, identity, identity],
gradient_accumulation_count=4,
inputs=[1.0],
outfeed_queue=outfeed_queue,
outfeed_mask=[False])
with self.assertRaisesRegex(ValueError,
r".*no outfeed_queue has been provided"):
pipelining_ops.pipeline([identity, identity, identity, identity],
gradient_accumulation_count=4,
inputs=[1.0],
optimizer_function=optimizer_function,
outfeed_mask=[False])
with self.assertRaisesRegex(
ValueError, "Only one of `outfeed_loss` and "
"`outfeed_mask` can be set."):
pipelining_ops.pipeline([identity, identity, identity, identity],
gradient_accumulation_count=4,
inputs=[1.0],
optimizer_function=optimizer_function,
outfeed_queue=outfeed_queue,
outfeed_mask=[False],
outfeed_loss=True)
@test_util.deprecated_graph_mode_only
def testOutfeedMask(self):
with tu.ipu_session() as sess:
def stage1(x):
with variable_scope.variable_scope("stage1", use_resource=True):
w = variable_scope.get_variable(name="w", initializer=1.0)
return x, w * x
def stage(x, x2):
return x, x2 + 1
def optimizer_function(x, _):
opt = gradient_descent.GradientDescentOptimizer(0.01)
return pipelining_ops.OptimizerFunctionOutput(opt, x)
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue()
def my_net(x):
return pipelining_ops.pipeline([stage1, stage, stage, stage],
gradient_accumulation_count=8,
inputs=[x],
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function,
outfeed_mask=[True, False])
with ops.device("/device:IPU:0"):
pipeline = ipu_compiler.compile(my_net, inputs=[1.0])
cfg = IPUConfig()
cfg.ipu_model.compile_ipu_code = False
cfg.ipu_model.tiles_per_ipu = 2
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
outfed = outfeed_queue.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(pipeline)
self.assertAllEqual(np.full((1, 8), 4), sess.run(outfed))
@test_util.deprecated_graph_mode_only
def testOutfeedLossAccumulated(self):
""" Tests accumulating the loss from the optimizer function. """
cfg = IPUConfig()
report_helper = tu.ReportHelper()
report_helper.set_autoreport_options(cfg)
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
with tu.ipu_session() as sess:
def stage1(x):
with variable_scope.variable_scope("stage1", use_resource=True):
w = variable_scope.get_variable(name="w", initializer=1.0)
return w * x
def identity(x):
return x
def optimizer_function(x):
opt = gradient_descent.GradientDescentOptimizer(0.01)
loss = x + 1.0
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed13")
def my_net(x):
return pipelining_ops.pipeline([stage1, identity, identity, identity],
gradient_accumulation_count=8,
inputs=[x],
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function,
outfeed_loss=True,
accumulate_outfeed=True)
with ops.device("/device:IPU:0"):
pipeline = ipu_compiler.compile(my_net, inputs=[0.0])
utils.move_variable_initialization_to_cpu()
outfed = outfeed_queue.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(pipeline)
# Loss of '1' is accumulated 8 times.
self.assertAllEqual([8], sess.run(outfed))
# There should be 2 GA-adds. One for the weight and one for the outfeed.
report_json = pva.openReport(report_helper.find_report())
ok = ['GradientAccumulatorAdd', 'GradientAccumulatorAdd_1']
self.assert_compute_sets_contain_list(report_json, ok)
@test_util.deprecated_graph_mode_only
def testOutfeedAccumulatedTraining(self):
"""
Tests accumulating an output from the last computational stage when
training.
"""
cfg = IPUConfig()
report_helper = tu.ReportHelper()
report_helper.set_autoreport_options(cfg)
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
with tu.ipu_session() as sess:
def stage1(x):
with variable_scope.variable_scope("stage1", use_resource=True):
w = variable_scope.get_variable(name="w", initializer=1.0)
return w * x
def identity(x):
return x
def optimizer_function(x):
opt = gradient_descent.GradientDescentOptimizer(0.01)
loss = x + 1.0
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed13")
def my_net(x):
return pipelining_ops.pipeline([stage1, identity, identity, identity],
gradient_accumulation_count=8,
inputs=[x],
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function,
accumulate_outfeed=True)
with ops.device("/device:IPU:0"):
pipeline = ipu_compiler.compile(my_net, inputs=[1.0])
utils.move_variable_initialization_to_cpu()
outfed = outfeed_queue.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(pipeline)
# '1' is accumulated 8 times.
self.assertAllEqual([[8]], sess.run(outfed))
report_json = pva.openReport(report_helper.find_report())
# There should be 2 GA-adds. One for the weight and one for the outfeed.
ok = ['GradientAccumulatorAdd', 'GradientAccumulatorAdd_1']
self.assert_compute_sets_contain_list(report_json, ok)
@test_util.deprecated_graph_mode_only
def testOutfeedAccumulatedTrainingSetDtype(self):
"""
Tests accumulating a float16 loss, setting the accumulator dtype to float32
to avoid overflow.
"""
with tu.ipu_session() as sess:
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed13")
outfeed_queue2 = ipu_outfeed_queue.IPUOutfeedQueue("__feed14")
def my_net(dtype, x):
w_name = 'w1' if not dtype else 'w'
outfeed = outfeed_queue if not dtype else outfeed_queue2
def stage1(x):
with variable_scope.variable_scope("stage1", use_resource=True):
w = variable_scope.get_variable(name=w_name, initializer=1.0)
return w * x
def identity(x):
return math_ops.cast(x + 10000, np.float16)
def optimizer_function(x):
opt = gradient_descent.GradientDescentOptimizer(0.01)
loss = x + 1.0
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
return pipelining_ops.pipeline([stage1, identity, identity, identity],
gradient_accumulation_count=8,
inputs=[x],
outfeed_queue=outfeed,
optimizer_function=optimizer_function,
accumulate_outfeed=True,
accumulate_outfeed_dtype=dtype)
with ops.device("/device:IPU:0"):
pipeline_16 = ipu_compiler.compile(partial(my_net, None), inputs=[1.0])
pipeline_32 = ipu_compiler.compile(partial(my_net, np.float32),
inputs=[1.0])
cfg = IPUConfig()
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
outfed = outfeed_queue.dequeue()
outfed2 = outfeed_queue2.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(pipeline_16)
# Buffer overflows float16
val = sess.run(outfed)[0]
self.assertTrue(val > np.finfo(np.float16).max
or val < np.finfo(np.float16).min)
sess.run(pipeline_32)
# '1' is accumulated 8 times, + 24 ga count * 10000 addition to the loss
self.assertAllEqual([[240008]], sess.run(outfed2))
@test_util.deprecated_graph_mode_only
def testOutfeedAccumulatedTrainingMultipleOutputs(self):
"""
Tests accumulating two outputs from the last computational stage when
training.
"""
cfg = IPUConfig()
report_helper = tu.ReportHelper()
report_helper.set_autoreport_options(cfg)
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
with tu.ipu_session() as sess:
def stage1(x, y):
with variable_scope.variable_scope("stage1", use_resource=True):
w = variable_scope.get_variable(name="w", initializer=1.0)
return w * x, y
def identity(x, y):
return x, y
def optimizer_function(x, y):
opt = gradient_descent.GradientDescentOptimizer(0.01)
loss = x + y + 1.0
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed13")
def my_net(x, y):
return pipelining_ops.pipeline([stage1, identity, identity, identity],
gradient_accumulation_count=8,
inputs=[x, y],
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function,
accumulate_outfeed=True)
with ops.device("/device:IPU:0"):
pipeline = ipu_compiler.compile(my_net, inputs=[1.0, 2.0])
utils.move_variable_initialization_to_cpu()
outfed = outfeed_queue.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(pipeline)
# '1' is accumulated 8 times, '2' is accumulated 8 times.
self.assertAllEqual([[8], [16]], sess.run(outfed))
report_json = pva.openReport(report_helper.find_report())
# There should be 3 GA-adds. One for the weight and one for each output.
ok = [
'GradientAccumulatorAdd', 'GradientAccumulatorAdd_1',
'GradientAccumulatorAdd_2'
]
self.assert_compute_sets_contain_list(report_json, ok)
@test_util.deprecated_graph_mode_only
def testOutfeedAccumulatedInference(self):
""" Tests accumulating an output from the last computational stage. """
cfg = IPUConfig()
report_helper = tu.ReportHelper()
report_helper.set_autoreport_options(cfg)
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
with tu.ipu_session() as sess:
def identity(x):
return x
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed13")
def my_net(x):
return pipelining_ops.pipeline(
[identity, identity, identity, identity],
gradient_accumulation_count=8,
inputs=[x],
outfeed_queue=outfeed_queue,
accumulate_outfeed=True)
with ops.device("/device:IPU:0"):
pipeline = ipu_compiler.compile(my_net, inputs=[1.0])
utils.move_variable_initialization_to_cpu()
outfed = outfeed_queue.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(pipeline)
# '1' is accumulated 8 times.
self.assertAllEqual([[8]], sess.run(outfed))
report_json = pva.openReport(report_helper.find_report())
# There should be 1 GA-add for the outfeed.
ok = ['GradientAccumulatorAdd']
self.assert_compute_sets_contain_list(report_json, ok)
@test_util.deprecated_graph_mode_only
def testOutfeedAccumulatedInferenceMultipleOutputs(self):
""" Tests accumulating 2 outputs from the last computational stage. """
cfg = IPUConfig()
report_helper = tu.ReportHelper()
report_helper.set_autoreport_options(cfg)
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
with tu.ipu_session() as sess:
def identity(x, y):
return x, y
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed13")
def my_net(x, y):
return pipelining_ops.pipeline(
[identity, identity, identity, identity],
gradient_accumulation_count=8,
inputs=[x, y],
outfeed_queue=outfeed_queue,
accumulate_outfeed=True)
with ops.device("/device:IPU:0"):
pipeline = ipu_compiler.compile(my_net, inputs=[1.0, 2.0])
utils.move_variable_initialization_to_cpu()
outfed = outfeed_queue.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(pipeline)
# '1' is accumulated 8 times, '2' is accumulated 8 times.
self.assertAllEqual([[8], [16]], sess.run(outfed))
report_json = pva.openReport(report_helper.find_report())
# There should be a GA-add for each output from the last stage.
ok = ['GradientAccumulatorAdd', 'GradientAccumulatorAdd_1']
self.assert_compute_sets_contain_list(report_json, ok)
@test_util.deprecated_graph_mode_only
def testOutfeedDictInference(self):
with tu.ipu_session() as sess:
def identity(x):
return x
def dictstage(x):
return {"x": x}
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed13")
def my_net(x):
return pipelining_ops.pipeline(
[identity, identity, identity, dictstage],
gradient_accumulation_count=8,
inputs=[x],
outfeed_queue=outfeed_queue)
with ops.device("/device:IPU:0"):
pipeline = ipu_compiler.compile(my_net, inputs=[1.0])
cfg = IPUConfig()
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
outfed = outfeed_queue.dequeue()
sess.run(variables.global_variables_initializer())
sess.run(pipeline)
got = sess.run(outfed)
self.assertIsInstance(got, dict)
self.assertAllEqual(np.ones(8), got["x"])
@test_util.deprecated_graph_mode_only
def testOutfeedAccumulatedTrainingRequiresOutfeedALL(self):
"""
Tests that the pipeline op requires a user to give an outfeed of mode ALL
when accumulating the outfeed.
"""
def stage1(x):
with variable_scope.variable_scope("stage1", use_resource=True):
w = variable_scope.get_variable(name="w", initializer=1.0)
return w * x
def identity(x):
return x
def optimizer_function(x):
opt = gradient_descent.GradientDescentOptimizer(0.01)
loss = x + 1.0
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue(
"__feed13", outfeed_mode=ipu_outfeed_queue.IPUOutfeedMode.LAST)
def my_net(x):
return pipelining_ops.pipeline([stage1, identity, identity, identity],
gradient_accumulation_count=8,
inputs=[x],
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function,
accumulate_outfeed=True)
with ops.device("/device:IPU:0"):
with self.assertRaisesRegex(
ValueError,
"To accumulate the outfeed, it must be in IPUOutfeedMode ALL."):
ipu_compiler.compile(my_net, inputs=[1.0])
@test_util.deprecated_graph_mode_only
def testGradientShapeInference(self):
with tu.ipu_session():
variable_shape = (1, 2, 3)
def stage1(x):
with variable_scope.variable_scope("stage1", use_resource=True):
w = variable_scope.get_variable(name="w", shape=variable_shape)
return w * x
def stage2(x):
return x
class MockOptimizer(gradient_descent.GradientDescentOptimizer): # pylint: disable=abstract-method
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
self.applied_gradients = [g for (g, _) in grads_and_vars]
return super().apply_gradients(grads_and_vars, global_step, name)
optimizer = MockOptimizer(0.01)
def optimizer_function(loss):
return pipelining_ops.OptimizerFunctionOutput(optimizer, loss)
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed14")
def my_net(x):
return pipelining_ops.pipeline([stage1, stage2],
gradient_accumulation_count=4,
inputs=[x],
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function)
with ops.device("/device:IPU:0"):
ipu_compiler.compile(my_net, inputs=[0.0])
self.assertEqual(1, len(optimizer.applied_gradients))
self.assertEqual(variable_shape, optimizer.applied_gradients[0].shape)
@test_util.deprecated_graph_mode_only
def testVariableInOptimizer(self):
with tu.ipu_session() as sess:
def stage1(x):
with variable_scope.variable_scope("stage1", use_resource=True):
w = variable_scope.get_variable(name="w", initializer=1.0)
return w * x
def identity(x):
return x
class MockOptimizer(gradient_descent.GradientDescentOptimizer): # pylint: disable=abstract-method
def __init__(self, lr):
super(MockOptimizer, self).__init__(lr)
with variable_scope.variable_scope("optimizer", use_resource=True):
self.p = variable_scope.get_variable(name="p",
initializer=2.0,
trainable=False)
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
grads_and_vars = [(g + self.p, v) for (g, v) in grads_and_vars]
return super().apply_gradients(grads_and_vars, global_step, name)
def optimizer_function(x):
opt = MockOptimizer(0.5)
return pipelining_ops.OptimizerFunctionOutput(opt, x)
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed15")
def my_net(x):
return pipelining_ops.pipeline([stage1, identity, identity, identity],
gradient_accumulation_count=8,
inputs=[x],
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function,
outfeed_loss=True)
with ops.device("/device:IPU:0"):
pipeline = ipu_compiler.compile(my_net, inputs=[1.0])
cfg = IPUConfig()
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
sess.run(variables.global_variables_initializer())
sess.run(pipeline)
# Accumulate 8 lots of gradient of 1.0 => 8.0, then add 2.0 then
# apply LR and subtract from the original weight:
#
# 1.0 - (8.0 + 2.0) * 0.5 = -4.0
for v in ops.get_default_graph().get_collection('variables'):
if v.name == "stage1/w:0":
new_v = sess.run(v)
self.assertEqual(new_v, -4.0)
# Now change the optimizer variable
for v in ops.get_default_graph().get_collection('variables'):
if v.name == "optimizer/p:0":
sess.run(v.assign(4.0))
sess.run(pipeline)
# Accumulate 8 lots of gradient of 1.0 => -8.0, then add 30.0 then
# apply LR and subtract from the original weight:
#
# -4.0 - (8.0 + 4.0) * 0.5 = -10.0
for v in ops.get_default_graph().get_collection('variables'):
if v.name == "stage1/w:0":
new_v = sess.run(v)
self.assertEqual(new_v, -10.0)
@test_util.deprecated_graph_mode_only
def testPipelineInferenceWithConditional(self):
dataset = tu.create_single_increasing_dataset(10, shape=[1])
dataset = dataset.batch(batch_size=1, drop_remainder=True)
infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed16")
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed16")
def stage1(x):
return x
def stage2(x):
return x
def stage3(x):
p = x > 2.0
return control_flow_ops.cond(p, lambda: constant_op.constant(1.0),
lambda: constant_op.constant(2.0))
def my_net():
return pipelining_ops.pipeline([stage1, stage2, stage3],
6,
inputs=[],
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue)
with tu.ipu_session() as sess:
with ops.device("/device:IPU:0"):
r = ipu_compiler.compile(my_net)
cfg = IPUConfig()
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
outfeed_op = outfeed_queue.dequeue()
sess.run(infeed_queue.initializer)
sess.run(r)
output = sess.run(outfeed_op)
self.assertAllClose(output, [2.0, 2.0, 2.0, 1.0, 1.0, 1.0])
@test_util.deprecated_graph_mode_only
def testPipelineWithCustomGradientFunction(self):
def dataset_fn():
dataset = tu.create_single_increasing_dataset(10, shape=[4])
dataset = dataset.batch(batch_size=4, drop_remainder=True)
def dataset_parser(value):
label = math_ops.reduce_mean(value, axis=[1])
return value, math_ops.cast(label / 10, np.int32)
return dataset.map(dataset_parser)
gradient_accumulation_count = 24
repeat_count = 2
def optimizer_fn():
return gradient_descent.GradientDescentOptimizer(0.01)
@custom_gradient.custom_gradient
def f(x):
x = x * x
def grad(dy):
return dy * x
return x, grad
def stage1(x, label):
with variable_scope.variable_scope("vs", use_resource=True):
weight = variable_scope.get_variable(
"w2",
shape=[4, 4],
dtype=np.float32,
initializer=init_ops.ones_initializer())
x = math_ops.matmul(x, weight)
return x, label
def stage2(x, label):
return f(x), label
def stage3(x, label):
loss = math_ops.reduce_mean(
nn.sparse_softmax_cross_entropy_with_logits(logits=x, labels=label))
return loss
def inputs_fn():
with ops.device('cpu'):
return []
pipelining_test_util.PipelineTester.compare_pipeline_to_cpu(
[stage1, stage2, stage3],
inputs_fn, [],
repeat_count,
gradient_accumulation_count,
dataset_fn,
optimizer_fn,
self,
14415,
schedule=pipelining_ops.PipelineSchedule.Grouped)
@test_util.deprecated_graph_mode_only
def testPipelineWithLoop(self):
def dataset_fn():
dataset = tu.create_single_increasing_dataset(10, shape=[4])
dataset = dataset.batch(batch_size=4, drop_remainder=True)
def dataset_parser(value):
label = math_ops.reduce_mean(value, axis=[1])
return value, math_ops.cast(label / 10, np.int32)
return dataset.map(dataset_parser)
gradient_accumulation_count = 24
repeat_count = 2
def optimizer_fn():
return gradient_descent.GradientDescentOptimizer(0.01)
def stage1(x, label):
with variable_scope.variable_scope("vs", use_resource=True):
weight = variable_scope.get_variable(
"w2",
shape=[4, 4],
dtype=np.float32,
initializer=init_ops.ones_initializer())
x = math_ops.matmul(x, weight)
return x, label
def stage2(x, label):
x = control_flow_ops.while_loop(lambda i, _: i < 10,
lambda i, x: (i + 1, x * x), (0, x),
maximum_iterations=5)[1]
return x, label
def stage3(x, label):
loss = math_ops.reduce_mean(
nn.sparse_softmax_cross_entropy_with_logits(logits=x, labels=label))
return loss
def inputs_fn():
with ops.device('cpu'):
return []
pipelining_test_util.PipelineTester.compare_pipeline_to_cpu(
[stage1, stage2, stage3], inputs_fn, [], repeat_count,
gradient_accumulation_count, dataset_fn, optimizer_fn, self, 11326)
@test_util.deprecated_graph_mode_only
def testPipelineWithTensorArray(self):
def dataset_fn():
dataset = tu.create_single_increasing_dataset(10, shape=[4])
dataset = dataset.batch(batch_size=4, drop_remainder=True)
def dataset_parser(value):
label = math_ops.reduce_mean(value, axis=[1])
return math_ops.cast(value,
np.int8), math_ops.cast(label / 10, np.int32)
return dataset.map(dataset_parser)
gradient_accumulation_count = 24
repeat_count = 2
def optimizer_fn():
return gradient_descent.GradientDescentOptimizer(0.01)
def stage1(x, label):
x = math_ops.cast(x, np.float32)
with variable_scope.variable_scope("vs", use_resource=True):
weight = variable_scope.get_variable(
"w2",
shape=[4, 4],
dtype=np.float32,
initializer=init_ops.ones_initializer())
x = math_ops.matmul(x, weight)
return x, label
def stage2(x, label):
ta = tensor_array_ops.TensorArray(dtype=np.float32, size=4)
def body(i, tx):
tx = tx.write(i, math_ops.cast(i * 2, np.float32))
return i + 1, tx
ta = control_flow_ops.while_loop(lambda i, _: i < 4,
body, (0, ta),
maximum_iterations=5)[1]
return x * ta.stack(), label
def stage3(x, label):
loss = math_ops.reduce_mean(
nn.sparse_softmax_cross_entropy_with_logits(logits=x, labels=label))
return loss
def inputs_fn():
with ops.device('cpu'):
return []
pipelining_test_util.PipelineTester.compare_pipeline_to_cpu(
[stage1, stage2, stage3], inputs_fn, [], repeat_count,
gradient_accumulation_count, dataset_fn, optimizer_fn, self, 11326)
@test_util.deprecated_graph_mode_only
def testPipelineWithEmbeddingOptimization(self):
dataset_size = 100
embedding_size = 15
def dataset_fn():
dataset = tu.create_single_increasing_dataset(dataset_size, shape=[4])
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
label = math_ops.reduce_mean(value, axis=[1])
return math_ops.cast(value,
np.int32), math_ops.cast(label % 4, np.int32)
return dataset.map(dataset_parser)
gradient_accumulation_count = 8
repeat_count = 2
def optimizer_fn():
return gradient_descent.GradientDescentOptimizer(0.01)
np.random.seed(1)
embedding_shape = (dataset_size, embedding_size)
embedding_initializer = np.random.normal(0, 1, embedding_shape).astype(
np.float32)
weights_shape = (embedding_size, embedding_size)
weights_initializer = np.random.normal(0, 1,
weights_shape).astype(np.float32)
def stage1(idx, label):
with variable_scope.variable_scope("stage1", use_resource=True):
embedding = variable_scope.get_variable(
"c",
dtype=np.float32,
initializer=embedding_initializer,
trainable=True)
x = embedding_ops.embedding_lookup(embedding, idx)
return x, label
def stage2(x, label):
with variable_scope.variable_scope("vs", use_resource=True):
weight = variable_scope.get_variable("w0",
dtype=np.float32,
initializer=weights_initializer,
trainable=True)
x = math_ops.matmul(x, weight)
return x, label
def stage3(x, label):
x = math_ops.reduce_sum(x, axis=[-1])
return x, label
def stage4(x, label):
loss = math_ops.reduce_mean(
nn.sparse_softmax_cross_entropy_with_logits(logits=x, labels=label))
return loss
def inputs_fn():
with ops.device('cpu'):
return []
pipelining_test_util.PipelineTester.compare_pipeline_to_sharding(
[stage1, stage2, stage3, stage4],
inputs_fn, [],
repeat_count,
gradient_accumulation_count,
dataset_fn,
optimizer_fn,
self,
12049,
schedule=pipelining_ops.PipelineSchedule.Interleaved)
@test_util.deprecated_graph_mode_only
def testGradientAccumulationDtype(self):
gradient_accumulation_count = 8
gradient_accumulation_dtype = np.float32
x = np.finfo(np.float16).max
y = np.array(0.0, dtype=np.float16)
initial_w = np.array(1.0, dtype=np.float16)
learning_rate = 2**-10
features = np.repeat(x, gradient_accumulation_count)
labels = np.repeat(y, gradient_accumulation_count)
dataset = dataset_ops.Dataset.from_tensor_slices((features, labels))
infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "infeed")
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("outfeed")
grad_outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("grad_outfeed")
def stage1(features, labels):
w = variable_scope.get_variable(name="w", initializer=initial_w)
partial = w * features
return partial, labels
def stage2(partial, labels):
loss = partial + labels
return loss
def identity(*args):
return args
def optimizer_function(loss):
class CastingGradientDescent(optimizer_lib.Optimizer): # pylint: disable=abstract-method
"""Compute update using the dtype of the gradient, and then cast to
the dtype of the variable."""
def __init__(self, outer):
self.outer = outer
super().__init__(use_locking=False, name="CastingGradientDescent")
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
update_ops = []
for (grad, var) in grads_and_vars:
self.outer.assertEqual(grad.dtype, gradient_accumulation_dtype)
update_ops.append(grad_outfeed_queue.enqueue(grad))
delta = math_ops.cast(-learning_rate * grad, var.dtype)
update_ops.append(var.assign_add(delta))
return control_flow_ops.group(*update_ops)
opt = CastingGradientDescent(self)
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
def model():
pipeline_op = pipelining_ops.pipeline(
computational_stages=[stage1, identity, identity, stage2],
gradient_accumulation_count=gradient_accumulation_count,
gradient_accumulation_dtype=gradient_accumulation_dtype,
infeed_queue=infeed_queue,
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function,
name="Pipeline")
return pipeline_op
def compiled_model():
with ops.device("/device:IPU:0"):
return ipu_compiler.compile(model)
with tu.ipu_session() as sess:
train_op = compiled_model()
dequeued_gradient = grad_outfeed_queue.dequeue()
cfg = IPUConfig()
cfg.ipu_model.compile_ipu_code = True
cfg.ipu_model.tiles_per_ipu = 128
cfg.auto_select_ipus = 4
cfg.configure_ipu_system()
utils.move_variable_initialization_to_cpu()
sess.run(infeed_queue.initializer)
sess.run(variables.global_variables_initializer())
sess.run(train_op)
[actual_accumulated_gradient] = sess.run(dequeued_gradient)
# L(x) = w * x + y
# dL(x)/dw = x
# This would overflow in fp16:
expected_accumulated_gradient = gradient_accumulation_count * x.astype(
gradient_accumulation_dtype)
self.assertAllEqual(expected_accumulated_gradient,
actual_accumulated_gradient)
sess.run(infeed_queue.deleter)
sess.run(outfeed_queue.deleter)
sess.run(grad_outfeed_queue.deleter)
@test_util.deprecated_graph_mode_only
@tu.test_uses_ipus(num_ipus=4)
def testGradientAccumulationDtypeTiedEmbedding(self):
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("outfeed")
with ops.device('cpu'):
indices = array_ops.placeholder(np.int32, [8])
def stage1(indices):
# Do an embedding lookup on a float16 embedding table.
with variable_scope.variable_scope("vs", use_resource=True):
table = variable_scope.get_variable(
name="table",
shape=[300, 300],
dtype=dtypes.float16,
initializer=init_ops.ones_initializer())
return array_ops.gather(table, indices)
def identity(*args):
return args
def stage2(partials):
# Do a projection on the same float16 embeddding table.
# Since the table has two (non-consecutive) pipeline stage users, and one
# of those users is a valid AllocationFinder target, the gradient buffer
# for the table will be allocated immediately in the DeferredVisitor.
# When we accumulate in a different data type to the table, the buffer
# should be allocated as the accumulating data type, not the table's data
# type.
with variable_scope.variable_scope("vs", use_resource=True, reuse=True):
table = variable_scope.get_variable(
name="table",
shape=[300, 300],
dtype=dtypes.float16,
initializer=init_ops.ones_initializer())
return math_ops.matmul(partials, table)
def optimizer_function(loss):
class CastingGradientDescent(optimizer_lib.Optimizer): # pylint: disable=abstract-method
"""Compute update using the dtype of the gradient, and then cast to
the dtype of the variable."""
def __init__(self):
super().__init__(use_locking=False, name="CastingGradientDescent")
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
update_ops = []
for (grad, var) in grads_and_vars:
# Cast the gradient to be the var's dtype when applying in the WU.
delta = math_ops.cast(-0.01 * grad, var.dtype)
update_ops.append(var.assign_add(delta))
return control_flow_ops.group(*update_ops)
opt = CastingGradientDescent()
return pipelining_ops.OptimizerFunctionOutput(opt, loss)
def model():
return pipelining_ops.pipeline(
# There must be 4 stages here, otherwise:
# - there won't be >1 users of the gradient buffer because
# - both accs on the buffer will be on the same bwd stage since
# - the PipelineGradientAccumulationOptimizer didn't trigger because
# - it avoids putting size 0 FIFOs between consecutive stages.
# a.k.a. the two stage users of the GA buffer can't be consecutive.
computational_stages=[stage1, identity, identity, stage2],
device_mapping=[0, 1, 1, 0],
gradient_accumulation_count=8,
# Accumulate the float16 embedding table's gradient in float32
gradient_accumulation_dtype=dtypes.float32,
inputs=[indices],
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function,
name="Pipeline")
with ops.device("/device:IPU:0"):
train_op = ipu_compiler.compile(model)
cfg = utils.create_ipu_config(profiling=True, profile_execution=True)
cfg = utils.set_ipu_model_options(cfg,
compile_ipu_code=True,
tiles_per_ipu=128)
cfg = utils.auto_select_ipus(cfg, 4)
utils.configure_ipu_system(cfg)
utils.move_variable_initialization_to_cpu()
with tu.ipu_session() as sess:
sess.run(variables.global_variables_initializer())
sess.run(train_op, feed_dict={indices: np.ones([8], dtype=np.int32)})
@test_util.deprecated_graph_mode_only
def testPipeliningArgsAndKwargs(self):
outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("args_kwargs_outfeed")
def stage1(x):
return x + 1
def stage2(x):
y = layers.Conv2D(2,
1,
use_bias=True,
bias_initializer=init_ops.ones_initializer(),
kernel_initializer=init_ops.ones_initializer())(x)
loss = math_ops.reduce_sum(y)
return loss
def optimizer_function(loss):
opt = gradient_descent.GradientDescentOptimizer(0.01)
# Empty var list.
compute_gradients_args = ([],)
return pipelining_ops.OptimizerFunctionOutput(opt, loss,
compute_gradients_args)
def my_net(x):
return pipelining_ops.pipeline(
[stage1, stage2],
10,
inputs=[x],
outfeed_queue=outfeed_queue,
optimizer_function=optimizer_function,
pipeline_schedule=pipelining_ops.PipelineSchedule.Grouped)
with ops.device('cpu'):
x = array_ops.placeholder(np.float32, shape=[1, 4, 4, 2])
with ops.device("/device:IPU:0"):
with self.assertRaisesRegex(ValueError, 'No variables to optimize.'):
ipu_compiler.compile(my_net, inputs=[x])
@parameterized.named_parameters(*PIPELINE_COMPARE_TEST_CASES)
@test_util.deprecated_graph_mode_only
def testPipelineCompareMultiIPUStage(self, opt_type, opt_args):
# Resnet like network.
def dataset_fn():
dataset = tu.create_single_increasing_dataset(100, shape=[4])
dataset = dataset.batch(batch_size=32, drop_remainder=True)
dataset = dataset.batch(batch_size=32, drop_remainder=True)
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
img = value
label = math_ops.reduce_mean(img, axis=[1, 2, 3])
return img, math_ops.cast(label, np.int32)
return dataset.map(dataset_parser)
gradient_accumulation_count = 18
repeat_count = 2
def optimizer_fn():
return opt_type(*opt_args)
def fixed_padding(inputs, kernel_size):
pad_total = kernel_size - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
padded_inputs = array_ops.pad(
inputs, [[0, 0], [pad_beg, pad_end], [pad_beg, pad_end], [0, 0]])
return padded_inputs
def block(name, first_stride, out_filters, count, x):
for i in range(count):
shape_in = x.shape
stride = first_stride if (i == 0) else 1
if stride > 1:
x = fixed_padding(x, 3)
sc = x
with variable_scope.variable_scope(name + "/" + str(i) + "/1"):
x = conv(x, 3, stride, out_filters)
x = nn.relu(x)
with variable_scope.variable_scope(name + "/" + str(i) + "/2"):
x = conv(x, 3, 1, out_filters)
# shortcut
if stride != 1:
sc = array_ops.strided_slice(sc, [0, 0, 0, 0],
sc.shape,
strides=[1, stride, stride, 1])
pad = int(x.shape[3] - shape_in[3])
if pad != 0:
sc = array_ops.pad(sc, paddings=[[0, 0], [0, 0], [0, 0], [0, pad]])
x = nn.relu(x + sc)
return x
def fc(x, num_units_out):
return layers.Dense(
num_units_out,
kernel_initializer=init_ops.constant_initializer(0.1),
bias_initializer=init_ops.constant_initializer(0.0))(x)
def max_pool(x, ksize=3, stride=2):
return layers.MaxPooling2D(ksize, stride, padding='SAME')(x)
def conv(x, ksize, stride, filters_out):
return layers.Conv2D(
filters_out,
ksize,
stride,
'SAME',
kernel_initializer=init_ops.constant_initializer(0.1),
bias_initializer=init_ops.constant_initializer(0.0))(x)
def stage1(img, label):
with variable_scope.variable_scope("stage1", use_resource=True):
x = conv(img, 7, 2, 16)
x = nn.relu(x)
x = max_pool(x, ksize=3, stride=2)
return x, label
def stage2(x, label):
with variable_scope.variable_scope("stage2", use_resource=True):
x = block("b", 2, 64, 1, x)
return x, label
def stage3(x, label):
with variable_scope.variable_scope("stage3", use_resource=True):
x = math_ops.reduce_mean(x, axis=[1, 2])
x = fc(x, 100)
loss = math_ops.reduce_mean(
nn.sparse_softmax_cross_entropy_with_logits(logits=x,
labels=label))
return loss
pipelining_test_util.PipelineTester.compare_pipeline_to_sharding(
[stage1, stage2, stage3],
lambda: [], [],
repeat_count,
gradient_accumulation_count,
dataset_fn,
optimizer_fn,
self,
53362,
device_mapping=[pipelining_ops._ALL_DEVICES, 0, 1]) # pylint: disable=W0212
@parameterized.named_parameters(*PIPELINE_COMPARE_TEST_CASES)
@test_util.deprecated_graph_mode_only
def testPipelineCompareParStages(self, opt_type, opt_args):
# Resnet like network.
def dataset_fn():
dataset = tu.create_single_increasing_dataset(100, shape=[4])
dataset = dataset.batch(batch_size=32, drop_remainder=True)
dataset = dataset.batch(batch_size=32, drop_remainder=True)
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
img = value
label = math_ops.reduce_mean(img, axis=[1, 2, 3])
return img, math_ops.cast(label, np.int32)
return dataset.map(dataset_parser)
gradient_accumulation_count = 18
repeat_count = 2
def optimizer_fn():
return opt_type(*opt_args)
def fixed_padding(inputs, kernel_size):
pad_total = kernel_size - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
padded_inputs = array_ops.pad(
inputs, [[0, 0], [pad_beg, pad_end], [pad_beg, pad_end], [0, 0]])
return padded_inputs
def block(name, first_stride, out_filters, count, x):
for i in range(count):
shape_in = x.shape
stride = first_stride if (i == 0) else 1
if stride > 1:
x = fixed_padding(x, 3)
sc = x
with variable_scope.variable_scope(name + "/" + str(i) + "/1"):
x = conv(x, 3, stride, out_filters)
x = nn.relu(x)
with variable_scope.variable_scope(name + "/" + str(i) + "/2"):
x = conv(x, 3, 1, out_filters)
# shortcut
if stride != 1:
sc = array_ops.strided_slice(sc, [0, 0, 0, 0],
sc.shape,
strides=[1, stride, stride, 1])
pad = int(x.shape[3] - shape_in[3])
if pad != 0:
sc = array_ops.pad(sc, paddings=[[0, 0], [0, 0], [0, 0], [0, pad]])
x = nn.relu(x + sc)
return x
def fc(x, num_units_out):
return layers.Dense(
num_units_out,
kernel_initializer=init_ops.constant_initializer(0.1),
bias_initializer=init_ops.constant_initializer(0.0))(x)
def max_pool(x, ksize=3, stride=2):
return layers.MaxPooling2D(ksize, stride, padding='SAME')(x)
def conv(x, ksize, stride, filters_out):
return layers.Conv2D(
filters_out,
ksize,
stride,
'SAME',
kernel_initializer=init_ops.constant_initializer(0.1),
bias_initializer=init_ops.constant_initializer(0.0))(x)
def stage1(img, label):
with variable_scope.variable_scope("stage1", use_resource=True):
x = conv(img, 7, 2, 16)
x = nn.relu(x)
x = max_pool(x, ksize=3, stride=2)
return x, label
def stage2a(x, _):
with variable_scope.variable_scope("stage2a", use_resource=True):
x = block("b", 2, 64, 1, x)
return x
def stage2b(x, label):
with variable_scope.variable_scope("stage2b", use_resource=True):
x = block("b", 2, 64, 1, x)
return x, label
def stage3(xa, xb, label):
with variable_scope.variable_scope("stage3", use_resource=True):
x = xa + xb
x = math_ops.reduce_mean(x, axis=[1, 2])
x = fc(x, 100)
loss = math_ops.reduce_mean(
nn.sparse_softmax_cross_entropy_with_logits(logits=x,
labels=label))
return loss
pipelining_test_util.PipelineTester.compare_pipeline_to_sharding(
[stage1, [stage2a, stage2b], stage3],
lambda: [], [],
repeat_count,
gradient_accumulation_count,
dataset_fn,
optimizer_fn,
self,
53362,
device_mapping=[0, [0, 1], 1])
@parameterized.named_parameters(*PIPELINE_COMPARE_TEST_CASES)
@test_util.deprecated_graph_mode_only
def testPipelineCompareParStagesInfeed(self, opt_type, opt_args):
# Resnet like network.
def dataset_fn():
dataset = tu.create_single_increasing_dataset(100, shape=[4])
dataset = dataset.batch(batch_size=32, drop_remainder=True)
dataset = dataset.batch(batch_size=32, drop_remainder=True)
dataset = dataset.batch(batch_size=2, drop_remainder=True)
def dataset_parser(value):
img = value
label = math_ops.reduce_mean(img, axis=[1, 2, 3])
return img, math_ops.cast(label, np.int32)
return dataset.map(dataset_parser)
gradient_accumulation_count = 18
repeat_count = 2
def optimizer_fn():
return opt_type(*opt_args)
def fixed_padding(inputs, kernel_size):
pad_total = kernel_size - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
padded_inputs = array_ops.pad(
inputs, [[0, 0], [pad_beg, pad_end], [pad_beg, pad_end], [0, 0]])
return padded_inputs
def block(name, first_stride, out_filters, count, x):
for i in range(count):
shape_in = x.shape
stride = first_stride if (i == 0) else 1
if stride > 1:
x = fixed_padding(x, 3)
sc = x
with variable_scope.variable_scope(name + "/" + str(i) + "/1"):
x = conv(x, 3, stride, out_filters)
x = nn.relu(x)
with variable_scope.variable_scope(name + "/" + str(i) + "/2"):
x = conv(x, 3, 1, out_filters)
# shortcut
if stride != 1:
sc = array_ops.strided_slice(sc, [0, 0, 0, 0],
sc.shape,
strides=[1, stride, stride, 1])
pad = int(x.shape[3] - shape_in[3])
if pad != 0:
sc = array_ops.pad(sc, paddings=[[0, 0], [0, 0], [0, 0], [0, pad]])
x = nn.relu(x + sc)
return x
def fc(x, num_units_out):
return layers.Dense(
num_units_out,
kernel_initializer=init_ops.constant_initializer(0.1),
bias_initializer=init_ops.constant_initializer(0.0))(x)
def max_pool(x, ksize=3, stride=2):
return layers.MaxPooling2D(ksize, stride, padding='SAME')(x)
def conv(x, ksize, stride, filters_out):
return layers.Conv2D(
filters_out,
ksize,
stride,
'SAME',
kernel_initializer=init_ops.constant_initializer(0.1),
bias_initializer=init_ops.constant_initializer(0.0))(x)
def stage1a(img, _):
with variable_scope.variable_scope("stage1a", use_resource=True):
x = conv(img, 7, 2, 16)
x = nn.relu(x)
x = max_pool(x, ksize=3, stride=2)
return x
def stage1b(img, label):
with variable_scope.variable_scope("stage1b", use_resource=True):
x = conv(img, 7, 2, 16)
x = nn.softmax(x)
x = max_pool(x, ksize=3, stride=2)
return x, label
def stage2(a, b, label):
with variable_scope.variable_scope("stage2a", use_resource=True):
x = block("b", 2, 64, 1, a + b)
return x, label
def stage3(x, label):
with variable_scope.variable_scope("stage3", use_resource=True):
x = math_ops.reduce_mean(x, axis=[1, 2])
x = fc(x, 100)
loss = math_ops.reduce_mean(
nn.sparse_softmax_cross_entropy_with_logits(logits=x,
labels=label))
return loss
pipelining_test_util.PipelineTester.compare_pipeline_to_sharding(
[[stage1a, stage1b], stage2, stage3],
lambda: [], [],
repeat_count,
gradient_accumulation_count,
dataset_fn,
optimizer_fn,
self,
61059,
device_mapping=[[0, 1], 0, 1])
if __name__ == "__main__":
googletest.main()
| 35.06282 | 104 | 0.627362 | 11,542 | 96,002 | 4.97938 | 0.059608 | 0.031111 | 0.017748 | 0.0261 | 0.833136 | 0.809716 | 0.78379 | 0.754385 | 0.727328 | 0.716697 | 0 | 0.029858 | 0.272255 | 96,002 | 2,737 | 105 | 35.07563 | 0.792757 | 0.043062 | 0 | 0.79278 | 0 | 0 | 0.033142 | 0.007069 | 0 | 0 | 0 | 0.000365 | 0.027661 | 1 | 0.135021 | false | 0 | 0.018284 | 0.046414 | 0.265823 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f495bc4e21d897eeb611edc4edad0680833d2f2c | 42 | py | Python | src/lib/subprocess.py | DTenore/skulpt | 098d20acfb088d6db85535132c324b7ac2f2d212 | [
"MIT"
] | 2,671 | 2015-01-03T08:23:25.000Z | 2022-03-31T06:15:48.000Z | src/lib/subprocess.py | wakeupmuyunhe/skulpt | a8fb11a80fb6d7c016bab5dfe3712517a350b347 | [
"MIT"
] | 972 | 2015-01-05T08:11:00.000Z | 2022-03-29T13:47:15.000Z | src/lib/subprocess.py | wakeupmuyunhe/skulpt | a8fb11a80fb6d7c016bab5dfe3712517a350b347 | [
"MIT"
] | 845 | 2015-01-03T19:53:36.000Z | 2022-03-29T18:34:22.000Z | import _sk_fail; _sk_fail._("subprocess")
| 21 | 41 | 0.785714 | 6 | 42 | 4.666667 | 0.666667 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.071429 | 42 | 1 | 42 | 42 | 0.717949 | 0 | 0 | 0 | 0 | 0 | 0.238095 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f4ad246f0c72aad56280002368bcb51cd4804207 | 597 | py | Python | 1_PythonDataProcessing/7_23_casestudy_part2.py | hnwarid/DQLabAcademy | e03d82f97536ae103b6abc65db0ae16520fb68c7 | [
"MIT"
] | null | null | null | 1_PythonDataProcessing/7_23_casestudy_part2.py | hnwarid/DQLabAcademy | e03d82f97536ae103b6abc65db0ae16520fb68c7 | [
"MIT"
] | null | null | null | 1_PythonDataProcessing/7_23_casestudy_part2.py | hnwarid/DQLabAcademy | e03d82f97536ae103b6abc65db0ae16520fb68c7 | [
"MIT"
] | null | null | null | import pandas as pd
data = pd.read_csv('https://storage.googleapis.com/dqlab-dataset/pythonTutorial/ecommerce_banner_promo.csv')
#2. Data eksplorasi dengan dengan mengecek korelasi dari setiap feature menggunakan fungsi corr()
print("\n[2] Data eksplorasi dengan dengan mengecek korelasi dari setiap feature menggunakan fungsi corr()")
print(data.corr())
#3. Data eksplorasi dengan mengecek distribusi label menggunakan fungsi groupby() dan size()
print("\n[3] Data eksplorasi dengan mengecek distribusi label menggunakan fungsi groupby() dan size()")
print(data.groupby('Clicked on Ad').size()) | 59.7 | 108 | 0.79397 | 83 | 597 | 5.674699 | 0.46988 | 0.118896 | 0.169851 | 0.089172 | 0.704883 | 0.704883 | 0.704883 | 0.704883 | 0.704883 | 0.704883 | 0 | 0.007477 | 0.103853 | 597 | 10 | 109 | 59.7 | 0.872897 | 0.313233 | 0 | 0 | 0 | 0 | 0.713936 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0.666667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
f4bd73c9f118eb8a084d29aad135574a06df83d7 | 12,475 | py | Python | gencove/tests/test_projects_get_merged_vcf.py | mislavcimpersak/gencove-cli | 2ee9204609d4120c013392f892653ebe9f4a8f7e | [
"Apache-2.0"
] | 1 | 2020-04-28T06:31:53.000Z | 2020-04-28T06:31:53.000Z | gencove/tests/test_projects_get_merged_vcf.py | mislavcimpersak/gencove-cli | 2ee9204609d4120c013392f892653ebe9f4a8f7e | [
"Apache-2.0"
] | null | null | null | gencove/tests/test_projects_get_merged_vcf.py | mislavcimpersak/gencove-cli | 2ee9204609d4120c013392f892653ebe9f4a8f7e | [
"Apache-2.0"
] | 1 | 2021-07-29T08:24:51.000Z | 2021-07-29T08:24:51.000Z | """Test project's get merged VCF command."""
from uuid import uuid4
from click.testing import CliRunner
from gencove.client import APIClient, APIClientError, APIClientTimeout
from gencove.command.projects.cli import get_merged_vcf
from gencove.models import Project
def test_get_merged_vcf__bad_project_id(mocker):
"""Test get merged file failure when non-uuid string is used as
project id.
"""
runner = CliRunner()
mocked_login = mocker.patch.object(APIClient, "login", return_value=None)
mocked_get_project = mocker.patch.object(
APIClient,
"get_project",
return_value=Project(id=str(uuid4())),
)
res = runner.invoke(
get_merged_vcf,
[
"1111111",
"--email",
"foo@bar.com",
"--password",
"123",
],
)
assert res.exit_code == 1
mocked_login.assert_called_once()
mocked_get_project.assert_not_called()
assert "Project ID is not valid" in res.output
def test_get_merged_vcf__not_owned_project(mocker):
"""Test get merged file failure when project is not owned."""
mocked_response = {"detail": "Not found."}
runner = CliRunner()
mocked_login = mocker.patch.object(APIClient, "login", return_value=None)
mocked_get_project = mocker.patch.object(
APIClient,
"get_project",
return_value=mocked_response,
side_effect=APIClientError(message="", status_code=403),
)
res = runner.invoke(
get_merged_vcf,
[
str(uuid4()),
"--email",
"foo@bar.com",
"--password",
"123",
],
)
assert res.exit_code == 1
mocked_login.assert_called_once()
mocked_get_project.assert_called_once()
assert "You do not have the sufficient permission" in res.output
def test_get_merged_vcf__empty(mocker):
"""Test project doesn't have a merged VCF file."""
project_id = str(uuid4())
runner = CliRunner()
mocked_login = mocker.patch.object(APIClient, "login", return_value=None)
mocked_get_project = mocker.patch.object(
APIClient,
"get_project",
return_value=Project(
id=project_id,
name="Project Cadmus",
description="",
created="2020-06-11T02:14:00.541889Z",
organization=str(uuid4()),
sample_count=3,
pipeline_capabilites=str(uuid4()),
files=[],
),
)
res = runner.invoke(
get_merged_vcf,
[
project_id,
"--email",
"foo@bar.com",
"--password",
"123",
],
)
assert res.exit_code == 1
mocked_login.assert_called_once()
mocked_get_project.assert_called_once()
assert (
"No files to process for project {}".format(project_id) in res.output
)
def test_get_merged_vcf_custom_filename(mocker):
"""Test project download merged VCF success with custom filename."""
project_id = str(uuid4())
file_id = str(uuid4())
download_url = (
"https://bucket.s3.amazonaws.com/output/apps/merge_vcfs/"
"{file_id}/{file_id}.vcf.bgz".format(file_id=file_id)
)
runner = CliRunner()
mocked_login = mocker.patch.object(APIClient, "login", return_value=None)
mocked_get_project = mocker.patch.object(
APIClient,
"get_project",
return_value=Project(
id=project_id,
name="Project Cadmus",
description="",
created="2020-06-11T02:14:00.541889Z",
organization=str(uuid4()),
sample_count=3,
pipeline_capabilites=str(uuid4()),
files=[
{
"id": "755ec682-e4a5-414a-a5be-07e0af11cf75",
"s3_path": (
"app-data/output/apps/merge_vcfs/"
"{file_id}/{file_id}.vcf.bgz".format(file_id=file_id)
),
"size": None,
"download_url": download_url,
"file_type": "impute-vcf-merged",
}
],
),
)
with runner.isolated_filesystem():
mocked_download_file = mocker.patch(
"gencove.command.projects.get_merged_vcf.main.download.utils."
"download_file"
)
res = runner.invoke(
get_merged_vcf,
[
project_id,
"--email",
"foo@bar.com",
"--password",
"123",
"--output-filename",
"superman.vcf.gz",
],
)
assert res.exit_code == 0
mocked_login.assert_called_once()
mocked_get_project.assert_called_once()
mocked_download_file.assert_called_once_with(
"superman.vcf.gz", download_url, no_progress=False
)
def test_get_merged_vcf__no_progress_success(mocker):
"""Test project download merged VCF success."""
project_id = str(uuid4())
file_id = str(uuid4())
runner = CliRunner()
mocked_login = mocker.patch.object(APIClient, "login", return_value=None)
mocked_get_project = mocker.patch.object(
APIClient,
"get_project",
return_value=Project(
id=project_id,
name="Project Cadmus",
description="",
created="2020-06-11T02:14:00.541889Z",
organization=str(uuid4()),
sample_count=3,
pipeline_capabilites=str(uuid4()),
files=[
{
"id": "755ec682-e4a5-414a-a5be-07e0af11cf75",
"s3_path": (
"app-data/output/apps/merge_vcfs/"
"{file_id}/{file_id}.vcf.bgz".format(file_id=file_id)
),
"size": None,
"download_url": (
"https://bucket.s3.amazonaws.com/output/apps/"
"merge_vcfs/{file_id}/{file_id}.vcf.bgz".format(
file_id=file_id
)
),
"file_type": "impute-vcf-merged",
}
],
),
)
with runner.isolated_filesystem():
mocked_download_file = mocker.patch(
"gencove.command.projects.get_merged_vcf.main.download.utils."
"download_file"
)
res = runner.invoke(
get_merged_vcf,
[
project_id,
"--email",
"foo@bar.com",
"--password",
"123",
"--no-progress",
],
)
assert res.exit_code == 0
mocked_login.assert_called_once()
mocked_get_project.assert_called_once()
mocked_download_file.assert_called_once_with(
"{}.vcf.bgz".format(file_id),
"https://bucket.s3.amazonaws.com/output/apps/"
"merge_vcfs/{file_id}/{file_id}.vcf.bgz".format(file_id=file_id),
no_progress=True,
)
def test_get_merged_vcf__slow_response_retry(mocker):
"""Test project download merged VCF slow response retry."""
project_id = str(uuid4())
runner = CliRunner()
mocked_login = mocker.patch.object(APIClient, "login", return_value=None)
mocked_get_project = mocker.patch.object(
APIClient,
"get_project",
side_effect=APIClientTimeout("Could not connect to the api server"),
)
with runner.isolated_filesystem():
mocked_download_file = mocker.patch(
"gencove.command.projects.get_merged_vcf.main.download.utils."
"download_file"
)
res = runner.invoke(
get_merged_vcf,
[
project_id,
"--email",
"foo@bar.com",
"--password",
"123",
],
)
assert res.exit_code == 1
mocked_login.assert_called_once()
assert mocked_get_project.call_count == 5
mocked_download_file.assert_not_called()
def test_get_merged_vcf__success(mocker):
"""Test project download merged VCF success."""
project_id = str(uuid4())
file_id = str(uuid4())
runner = CliRunner()
mocked_login = mocker.patch.object(APIClient, "login", return_value=None)
mocked_get_project = mocker.patch.object(
APIClient,
"get_project",
return_value=Project(
id=project_id,
name="Project Cadmus",
description="",
created="2020-06-11T02:14:00.541889Z",
organization=str(uuid4()),
sample_count=3,
pipeline_capabilites=str(uuid4()),
files=[
{
"id": "755ec682-e4a5-414a-a5be-07e0af11cf75",
"s3_path": (
"app-data/output/apps/merge_vcfs/"
"{file_id}/{file_id}.vcf.bgz".format(file_id=file_id)
),
"size": None,
"download_url": (
"https://bucket.s3.amazonaws.com/output/apps/"
"merge_vcfs/{file_id}/{file_id}.vcf.bgz".format(
file_id=file_id
)
),
"file_type": "impute-vcf-merged",
}
],
),
)
with runner.isolated_filesystem():
mocked_download_file = mocker.patch(
"gencove.command.projects.get_merged_vcf.main.download.utils."
"download_file"
)
res = runner.invoke(
get_merged_vcf,
[
project_id,
"--email",
"foo@bar.com",
"--password",
"123",
],
)
assert res.exit_code == 0
mocked_login.assert_called_once()
mocked_get_project.assert_called_once()
mocked_download_file.assert_called_once_with(
"{}.vcf.bgz".format(file_id),
"https://bucket.s3.amazonaws.com/output/apps/"
"merge_vcfs/{file_id}/{file_id}.vcf.bgz".format(file_id=file_id),
no_progress=False,
)
def test_get_merged_vcf__success__project_with_legacy_webhhok_url(mocker):
"""Test project download merged VCF success."""
project_id = str(uuid4())
file_id = str(uuid4())
runner = CliRunner()
mocked_login = mocker.patch.object(APIClient, "login", return_value=None)
mocked_get_project = mocker.patch.object(
APIClient,
"get_project",
return_value=Project(
id=project_id,
name="Project Cadmus",
description="",
created="2020-06-11T02:14:00.541889Z",
organization=str(uuid4()),
webhook_url="",
sample_count=3,
pipeline_capabilites=str(uuid4()),
files=[
{
"id": "755ec682-e4a5-414a-a5be-07e0af11cf75",
"s3_path": (
"app-data/output/apps/merge_vcfs/"
"{file_id}/{file_id}.vcf.bgz".format(file_id=file_id)
),
"size": None,
"download_url": (
"https://bucket.s3.amazonaws.com/output/apps/"
"merge_vcfs/{file_id}/{file_id}.vcf.bgz".format(
file_id=file_id
)
),
"file_type": "impute-vcf-merged",
}
],
),
)
with runner.isolated_filesystem():
mocked_download_file = mocker.patch(
"gencove.command.projects.get_merged_vcf.main.download.utils."
"download_file"
)
res = runner.invoke(
get_merged_vcf,
[
project_id,
"--email",
"foo@bar.com",
"--password",
"123",
],
)
assert res.exit_code == 0
mocked_login.assert_called_once()
mocked_get_project.assert_called_once()
mocked_download_file.assert_called_once_with(
"{}.vcf.bgz".format(file_id),
"https://bucket.s3.amazonaws.com/output/apps/"
"merge_vcfs/{file_id}/{file_id}.vcf.bgz".format(file_id=file_id),
no_progress=False,
)
| 30.802469 | 77 | 0.535952 | 1,283 | 12,475 | 4.937646 | 0.116134 | 0.048303 | 0.039463 | 0.041673 | 0.868666 | 0.857301 | 0.839305 | 0.822099 | 0.794633 | 0.794633 | 0 | 0.031707 | 0.347735 | 12,475 | 404 | 78 | 30.878713 | 0.746835 | 0.036473 | 0 | 0.762178 | 0 | 0 | 0.196406 | 0.089428 | 0 | 0 | 0 | 0 | 0.091691 | 1 | 0.022923 | false | 0.022923 | 0.014327 | 0 | 0.037249 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
762f772c0bb823773937851285e9ce7f14269141 | 1,147 | py | Python | src/bpaTools/__init__.py | igclib/aixmParser | 8b0f6b9d7fbb8fed80bd4806d2b7f2e520c71d4d | [
"WTFPL"
] | 9 | 2020-05-28T19:26:56.000Z | 2022-03-13T04:37:41.000Z | src/bpaTools/__init__.py | BPascal-91/aixmParser | ef137694856a1bc6959f77bb896077879bf2cb30 | [
"WTFPL"
] | 3 | 2019-12-19T02:17:02.000Z | 2021-06-25T12:05:50.000Z | src/bpaTools/__init__.py | igclib/aixmParser | 8b0f6b9d7fbb8fed80bd4806d2b7f2e520c71d4d | [
"WTFPL"
] | 4 | 2020-05-13T10:25:07.000Z | 2021-08-21T23:10:39.000Z | from .Logger import Logger
from .ProgressBar import ProgressBar
from .GeoCoordinates import geoStr2coords
from .Tools import isInteger, isFloat, cleanAccent, str2bool, theQuit, sysExitError, ctrlPythonVersion, initEvent, getContentOf, getLeftOf, getRightOf, getFileName, getFileExt, getFilePath, getFileCreationDate, getFileModificationDate, getNow, getNowISO, getDateNow, getDate, addDatetime, getVersionFile, getParamTxtFile, getParamJsonFile, readJsonFile, writeJsonFile, writeTextFile, defaultEncoding, encodingUTF8, createFolder, deleteFile, getCommandLineOptions
from .myXml import Xml
__all__ = ([Logger] +
[ProgressBar] +
[geoStr2coords] +
[isInteger, isFloat, cleanAccent, str2bool, theQuit, sysExitError, ctrlPythonVersion, initEvent, getContentOf, getLeftOf, getRightOf, getFileName, getFileExt, getFilePath, getFileCreationDate, getFileModificationDate, getNow, getNowISO, getDateNow, getDate, addDatetime, getVersionFile, getParamTxtFile, getParamJsonFile, readJsonFile, writeJsonFile, writeTextFile, defaultEncoding, encodingUTF8, createFolder, deleteFile, getCommandLineOptions] +
[Xml])
| 95.583333 | 463 | 0.807323 | 88 | 1,147 | 10.477273 | 0.477273 | 0.034707 | 0.058568 | 0.075922 | 0.826464 | 0.826464 | 0.826464 | 0.826464 | 0.826464 | 0.826464 | 0 | 0.005941 | 0.119442 | 1,147 | 11 | 464 | 104.272727 | 0.906931 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 9 |
521f5eb0228ae27e2578615e54d9b6e50c739aeb | 173 | py | Python | stt_showdown/twilio.py | SubhashPeshwa/stt-showdown | c9ea0c04eaa7e1ee7973e5df51869ece69cca910 | [
"MIT"
] | null | null | null | stt_showdown/twilio.py | SubhashPeshwa/stt-showdown | c9ea0c04eaa7e1ee7973e5df51869ece69cca910 | [
"MIT"
] | null | null | null | stt_showdown/twilio.py | SubhashPeshwa/stt-showdown | c9ea0c04eaa7e1ee7973e5df51869ece69cca910 | [
"MIT"
] | null | null | null | import os
def transcribe_stream(stream_file):
return None
def transcribe_file(stream_file):
return None
def transcribe_mic_input(stream_file):
return None | 17.3 | 38 | 0.763006 | 24 | 173 | 5.208333 | 0.416667 | 0.312 | 0.384 | 0.48 | 0.528 | 0.528 | 0 | 0 | 0 | 0 | 0 | 0 | 0.184971 | 173 | 10 | 39 | 17.3 | 0.886525 | 0 | 0 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.428571 | false | 0 | 0.142857 | 0.428571 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
5268cd8ec8d5d50439eedb7fd9d10310252472de | 21,058 | py | Python | gulinalg/tests/test_gufunc_general.py | amit-r/gulinalg | d63c46fcb655b89b08ae438bb7dbd277675d5b14 | [
"BSD-2-Clause"
] | 11 | 2015-04-08T13:44:00.000Z | 2020-11-03T04:21:50.000Z | gulinalg/tests/test_gufunc_general.py | amit-r/gulinalg | d63c46fcb655b89b08ae438bb7dbd277675d5b14 | [
"BSD-2-Clause"
] | 4 | 2017-07-21T05:15:44.000Z | 2020-11-12T15:36:11.000Z | gulinalg/tests/test_gufunc_general.py | amit-r/gulinalg | d63c46fcb655b89b08ae438bb7dbd277675d5b14 | [
"BSD-2-Clause"
] | 13 | 2015-03-30T04:33:26.000Z | 2021-02-22T18:24:26.000Z | """
Tests BLAS functions. Since it supports C as well as Fortran
matrix, that leads to various combinations of matrices to test.
"""
from __future__ import print_function
from unittest import TestCase, skipIf
import numpy as np
from numpy.testing import run_module_suite, assert_allclose
from pkg_resources import parse_version
import gulinalg
M = 75
N = 50
K = 100
class TestMatvecMultiplyNoCopy(TestCase):
"""
Tests the cases that code can handle without copy-rearranging of any of
the input/output arguments.
"""
def test_matvec_multiply_c(self):
"""Multiply C layout matrix with vector"""
a = np.ascontiguousarray(np.random.randn(M, N))
b = np.random.randn(N)
res = gulinalg.matvec_multiply(a, b)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_matvec_multiply_f(self):
"""Multiply FORTRAN layout matrix with vector"""
a = np.asfortranarray(np.random.randn(M, N))
b = np.random.randn(N)
res = gulinalg.matvec_multiply(a, b)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_matvec_multiply_cv_c(self):
"""Test for explicit C array output for C layout input matrix"""
a = np.ascontiguousarray(np.random.randn(M, N))
b = np.ascontiguousarray(np.random.randn(N))
res = np.zeros(M, order='C')
gulinalg.matvec_multiply(a, b, out=res)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_matvec_multiply_fv_c(self):
"""Test for explicit C array output for FORTRAN layout input matrix"""
a = np.asfortranarray(np.random.randn(M, N))
b = np.ascontiguousarray(np.random.randn(N))
res = np.zeros(M, order='C')
gulinalg.matvec_multiply(a, b, out=res)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_matvec_multiply_cv_f(self):
"""Test for explicit FORTRAN array output for C layout input matrix"""
a = np.ascontiguousarray(np.random.randn(M, N))
b = np.ascontiguousarray(np.random.randn(N))
res = np.zeros(M, order='F')
gulinalg.matvec_multiply(a, b, out=res)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_matvec_multiply_fv_f(self):
"""Test for explicit FORTRAN array output for F layout input matrix"""
a = np.asfortranarray(np.random.randn(M, N))
b = np.ascontiguousarray(np.random.randn(N))
res = np.zeros(M, order='F')
gulinalg.matvec_multiply(a, b, out=res)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_matvec_multiply_for_complex_numbers(self):
"""Test for complex numbers input."""
a = np.array([[1 + 2j, 3 + 4j], [5 + 6j, 7 + -8j]])
b = np.array([1 - 2j, 4 + 5j])
res = gulinalg.matvec_multiply(a, b)
ref = np.dot(a, b)
assert_allclose(res, ref)
@skipIf(parse_version(np.__version__) < parse_version('1.13'),
"Prior to 1.13, numpy low level iterators didn't support removing "
"empty axis. So gufunc couldn't be called with empty inner loop")
def test_matvec_size_zero_matrix(self):
"""Test matrix of size zero"""
a = np.random.randn(0, 2)
b = np.random.randn(2)
res = gulinalg.matvec_multiply(a, b)
ref = np.dot(a, b)
assert_allclose(res, ref)
@skipIf(parse_version(np.__version__) < parse_version('1.13'),
"Prior to 1.13, numpy low level iterators didn't support removing "
"empty axis. So gufunc couldn't be called with empty inner loop")
def test_matvec_size_zero_vector(self):
"""Test vector of size zero"""
a = np.random.randn(2, 0)
b = np.random.randn(0)
res = gulinalg.matvec_multiply(a, b)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_matvec_size_one_vector(self):
"""Test vector of size one"""
a = np.random.randn(1, 1)
b = np.random.randn(1)
res = gulinalg.matvec_multiply(a, b)
ref = np.dot(a, b)
assert_allclose(res, ref)
class TestMatvecMultiplyWithCopy(TestCase):
"""
Test the cases where there is at least one operand/output that requires
copy/rearranging.
"""
def test_input_non_contiguous_1(self):
"""First input not contiguous"""
a = np.ascontiguousarray(np.random.randn(M, N, 2))[:, :, 0]
b = np.ascontiguousarray(np.random.randn(N))
res = np.zeros(M, order='C')
assert not a.flags.c_contiguous and not a.flags.f_contiguous
gulinalg.matvec_multiply(a, b, out=res)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_input_non_contiguous_2(self):
"""Second input not contiguous"""
a = np.ascontiguousarray(np.random.randn(M, N))
b = np.ascontiguousarray(np.random.randn(N, 2))[:, 0]
res = np.zeros(M, order='C')
assert not b.flags.c_contiguous and not b.flags.f_contiguous
gulinalg.matvec_multiply(a, b, out=res)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_input_non_contiguous_3(self):
"""Neither input contiguous"""
a = np.ascontiguousarray(np.random.randn(M, N, 2))[:, :, 0]
b = np.ascontiguousarray(np.random.randn(N, 2))[:, 0]
res = np.zeros(M, order='C')
assert not a.flags.c_contiguous and not a.flags.f_contiguous
assert not b.flags.c_contiguous and not b.flags.f_contiguous
gulinalg.matvec_multiply(a, b, out=res)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_output_non_contiguous(self):
"""Output not contiguous"""
a = np.ascontiguousarray(np.random.randn(M, N))
b = np.ascontiguousarray(np.random.randn(N))
res = np.zeros((M, 2), order='C')[:, 0]
assert not res.flags.c_contiguous and not res.flags.f_contiguous
gulinalg.matvec_multiply(a, b, out=res)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_all_non_contiguous(self):
"""Neither input nor output contiguous"""
a = np.ascontiguousarray(np.random.randn(M, N, 2))[:, :, 0]
b = np.ascontiguousarray(np.random.randn(N, 2))[:, 0]
res = np.zeros((M, 2), order='C')[:, 0]
assert not a.flags.c_contiguous and not a.flags.f_contiguous
assert not b.flags.c_contiguous and not b.flags.f_contiguous
assert not res.flags.c_contiguous and not res.flags.f_contiguous
gulinalg.matvec_multiply(a, b, out=res)
ref = np.dot(a, b)
assert_allclose(res, ref)
def test_stride_tricks(self):
"""Test that matrices that are contiguous but have their dimension
overlapped *copy*, as BLAS does not support them"""
a = np.ascontiguousarray(np.random.randn(M + N))
a = np.lib.stride_tricks.as_strided(a,
shape=(M, N),
strides=(a.itemsize, a.itemsize))
b = np.ascontiguousarray(np.random.randn(N))
res = gulinalg.matvec_multiply(a, b)
ref = np.dot(a, b)
assert_allclose(res, ref)
class TestMatvecMultiplyVector(TestCase):
"""Tests showing that the gufunc stuff works"""
def test_vector(self):
"""test vectorized matrix multiply"""
a = np.ascontiguousarray(np.random.randn(10, M, N))
b = np.ascontiguousarray(np.random.randn(10, N))
res = gulinalg.matvec_multiply(a, b)
assert res.shape == (10, M)
ref = np.stack([np.dot(a[i], b[i]) for i in range(len(a))])
assert_allclose(res, ref)
def test_broadcast(self):
"""test broadcast matrix multiply"""
a = np.ascontiguousarray(np.random.randn(M, N))
b = np.ascontiguousarray(np.random.randn(10, N))
res = gulinalg.matvec_multiply(a, b)
assert res.shape == (10, M)
ref = np.stack([np.dot(a, b[i]) for i in range(len(b))])
assert_allclose(res, ref)
def test_nan_handling(self):
"""NaN in one output shouldn't contaminate remaining outputs"""
a = np.eye(2)
b = np.array([[1.0, 2.0], [np.nan, 1.0]])
ref = np.array([[1., 2.], [np.nan, np.nan]])
res = gulinalg.matvec_multiply(a, b)
assert_allclose(res, ref)
def test_infinity_handling(self):
"""Infinity in one output shouldn't contaminate remaining outputs"""
a = np.eye(2)
b = np.array([[1.0, 2.0], [np.inf, 1.0]])
ref = np.array([[1., 2.], [np.inf, np.nan]])
res = gulinalg.matvec_multiply(a, b)
assert_allclose(res, ref)
@skipIf(parse_version(np.__version__) < parse_version('1.13'),
"Prior to 1.13, numpy low level iterators didn't support removing "
"empty axis. So gufunc couldn't be called with empty inner loop")
def test_size_zero_vector(self):
"""Test broadcasting for vector of size zero"""
a = np.ascontiguousarray(np.random.randn(10, 2, 0))
b = np.ascontiguousarray(np.random.randn(10, 0))
res = gulinalg.matvec_multiply(a, b)
assert res.shape == (10, 2)
ref = np.stack([np.dot(a[i], b[i]) for i in range(len(a))])
assert_allclose(res, ref)
@skipIf(parse_version(np.__version__) < parse_version('1.13'),
"Prior to 1.13, numpy low level iterators didn't support removing "
"empty axis. So gufunc couldn't be called with empty inner loop")
def test_size_zero_matrix(self):
"""Test broadcasting for matrix of size zero"""
a = np.ascontiguousarray(np.random.randn(10, 0, 2))
b = np.ascontiguousarray(np.random.randn(10, 2))
res = gulinalg.matvec_multiply(a, b)
assert res.shape == (10, 0)
ref = np.stack([np.dot(a[i], b[i]) for i in range(len(a))])
assert_allclose(res, ref)
def test_size_one_vector(self):
"""Test broadcasting for vector of size one"""
a = np.ascontiguousarray(np.random.randn(10, 1, 1))
b = np.ascontiguousarray(np.random.randn(10, 1))
res = gulinalg.matvec_multiply(a, b)
assert res.shape == (10, 1)
ref = np.stack([np.dot(a[i], b[i]) for i in range(len(a))])
assert_allclose(res, ref)
class TestUpdateRank1Copy(TestCase):
"""
Tests the cases that code can handle without copy-rearranging of any of
the input/output arguments.
"""
def test_update_rank1_c(self):
"""Rank update on C layout matrix"""
a = np.random.randn(M)
b = np.random.randn(N)
c = np.ascontiguousarray(np.random.randn(M, N))
res = gulinalg.update_rank1(a, b, c)
ref = np.dot(a.reshape(M, 1), b.reshape(1, N)) + c
assert_allclose(res, ref)
def test_update_rank1_f(self):
"""Rank update on F layout matrix"""
a = np.random.randn(M)
b = np.random.randn(N)
c = np.asfortranarray(np.random.randn(M, N))
res = gulinalg.update_rank1(a, b, c)
ref = np.dot(a.reshape(M, 1), b.reshape(1, N)) + c
assert_allclose(res, ref)
def test_update_rank1_for_complex_numbers(self):
"""Test for complex numbers"""
a = np.array([1 + 3j, 3 - 4j])
b = np.array([1 - 2j, 4 + 5j])
c = np.array([[1 + 2j, 3 + 4j], [5 + 6j, 7 + -8j]])
res = gulinalg.update_rank1(a, b, c)
ref = np.dot(a.reshape(2, 1), b.conj().reshape(1, 2)) + c
assert_allclose(res, ref)
def test_update_rank1_for_complex_numbers_no_conjugate_transpose(self):
"""Test for complex numbers but no conjuage transpose"""
a = np.array([1 + 3j, 3 - 4j])
b = np.array([1 - 2j, 4 + 5j])
c = np.array([[1 + 2j, 3 + 4j], [5 + 6j, 7 + -8j]])
res = gulinalg.update_rank1(a, b, c, conjugate=False)
ref = np.dot(a.reshape(2, 1), b.reshape(1, 2)) + c
assert_allclose(res, ref)
def test_update_rank1_c_c(self):
"""Rank1 update on C layout matrix, explicit C array output"""
a = np.array([2, 3, 4])
b = np.array([1, 3, 4, 5])
c = np.arange(1, 13).reshape(3, 4)
res = np.zeros((3, 4), order='C')
gulinalg.update_rank1(a, b, c, out=res)
ref = np.dot(a.reshape(3, 1), b.reshape(1, 4)) + c
assert_allclose(res, ref)
def test_update_rank1_f_c(self):
"""Rank1 update on F layout matrix, explicit C array output"""
a = np.array([2, 3, 4])
b = np.array([1, 3, 4, 5])
c = np.asfortranarray(np.arange(1, 13).reshape(3, 4))
res = np.zeros((3, 4), order='C')
gulinalg.update_rank1(a, b, c, out=res)
ref = np.dot(a.reshape(3, 1), b.reshape(1, 4)) + c
assert_allclose(res, ref)
def test_update_rank1_c_f(self):
"""Rank1 update on C layout matrix, explicit F array output"""
a = np.array([2, 3, 4])
b = np.array([1, 3, 4, 5])
c = np.arange(1, 13).reshape(3, 4)
res = np.zeros((3, 4), order='F')
gulinalg.update_rank1(a, b, c, out=res)
ref = np.dot(a.reshape(3, 1), b.reshape(1, 4)) + c
assert_allclose(res, ref)
def test_update_rank1_f_f(self):
"""Rank1 update on F layout matrix, explicit F array output"""
a = np.array([2, 3, 4])
b = np.array([1, 3, 4, 5])
c = np.asfortranarray(np.arange(1, 13).reshape(3, 4))
res = np.zeros((3, 4), order='F')
gulinalg.update_rank1(a, b, c, out=res)
ref = np.dot(a.reshape(3, 1), b.reshape(1, 4)) + c
assert_allclose(res, ref)
@skipIf(parse_version(np.__version__) < parse_version('1.13'),
"Prior to 1.13, numpy low level iterators didn't support removing "
"empty axis. So gufunc couldn't be called with empty inner loop")
def test_size_zero_vector(self):
"""Test vector input of size zero"""
a = np.zeros(1)
b = np.zeros(0)
c = np.ascontiguousarray(np.random.randn(1, 0))
res = gulinalg.update_rank1(a, b, c)
ref = np.dot(np.zeros((1, 0)), np.zeros((0, 0))) + c
assert_allclose(res, ref)
@skipIf(parse_version(np.__version__) < parse_version('1.13'),
"Prior to 1.13, numpy low level iterators didn't support removing "
"empty axis. So gufunc couldn't be called with empty inner loop")
def test_size_zero_matrix(self):
"""Test matrix input of size zero"""
a = np.zeros(0)
b = np.zeros(2)
c = np.full((0, 2), np.nan)
res = gulinalg.update_rank1(a, b, c)
ref = np.dot(np.zeros((0, 0)), np.zeros((0, 2))) + c
assert_allclose(res, ref)
def test_size_one_vector(self):
"""Test vector inputs of size one"""
a = np.random.randn(1)
b = np.random.randn(1)
c = np.ascontiguousarray(np.random.randn(1, 1))
res = gulinalg.update_rank1(a, b, c)
ref = np.dot(a.reshape(1, 1), b.reshape(1, 1)) + c
assert_allclose(res, ref)
class TestUpdateRank1WithCopy(TestCase):
"""
Test the cases where there is at least one operand/output that requires
copy/rearranging.
"""
def test_input_non_contiguous_vectors(self):
"""Not contiguous vector inputs"""
a = np.ascontiguousarray(np.random.randn(M, N, 2))[:, 0, 0]
b = np.ascontiguousarray(np.random.randn(M, N, 2))[0, :, 0]
c = np.ascontiguousarray(np.random.randn(M, N))
assert not a.flags.c_contiguous and not a.flags.f_contiguous
assert not b.flags.c_contiguous and not b.flags.f_contiguous
res = gulinalg.update_rank1(a, b, c)
ref = np.dot(a.reshape(M, 1), b.reshape(1, N)) + c
assert_allclose(res, ref)
def test_input_non_contiguous_matrix(self):
"""Non contiguous matrix input"""
a = np.random.randn(M)
b = np.random.randn(N)
c = np.ascontiguousarray(np.random.randn(M, N, 2))[:, :, 0]
assert not c.flags.c_contiguous and not c.flags.f_contiguous
res = gulinalg.update_rank1(a, b, c)
ref = np.dot(a.reshape(M, 1), b.reshape(1, N)) + c
assert_allclose(res, ref)
def test_output_non_contiguous(self):
"""Output not contiguous"""
a = np.random.randn(M)
b = np.random.randn(N)
c = np.ascontiguousarray(np.random.randn(M, N))
res = np.zeros((M, N, 2), order='C')[:, :, 0]
gulinalg.update_rank1(a, b, c, out=res)
ref = np.dot(a.reshape(M, 1), b.reshape(1, N)) + c
assert_allclose(res, ref)
def test_stride_tricks(self):
"""test that matrices that are contiguous but have their dimension
overlapped *copy*, as BLAS does not support them"""
a = np.random.randn(M)
b = np.random.randn(N)
c = np.ascontiguousarray(np.random.randn(M + N))
c = np.lib.stride_tricks.as_strided(a,
shape=(M, N),
strides=(c.itemsize, c.itemsize))
res = gulinalg.update_rank1(a, b, c)
ref = np.dot(a.reshape(M, 1), b.reshape(1, N)) + c
assert_allclose(res, ref)
class TestUpdateRank1Vector(TestCase):
"""Tests showing that the gufunc stuff works"""
def test_vector(self):
"""test vectorized rank1 update"""
a = np.ascontiguousarray(np.random.randn(10, M))
b = np.ascontiguousarray(np.random.randn(10, N))
c = np.ascontiguousarray(np.random.randn(10, M, N))
res = gulinalg.update_rank1(a, b, c)
assert res.shape == (10, M, N)
ref = np.stack([np.dot(a[i].reshape(M, 1), b[i].reshape(1, N)) + c[i]
for i in range(len(c))])
assert_allclose(res, ref)
def test_broadcast(self):
"""test broadcast rank1 update"""
a = np.ascontiguousarray(np.random.randn(10, M))
b = np.ascontiguousarray(np.random.randn(10, N))
c = np.ascontiguousarray(np.random.randn(M, N))
res = gulinalg.update_rank1(a, b, c)
assert res.shape == (10, M, N)
ref = np.stack([np.dot(a[i].reshape(M, 1), b[i].reshape(1, N)) + c
for i in range(len(b))])
assert_allclose(res, ref)
def test_nan_handling(self):
"""NaN in one output shouldn't contaminate remaining outputs"""
a = np.array([[1, 2], [1, np.nan]])
b = np.array([3, 4])
c = np.array([[1, 2], [3, 4]])
ref = np.array([[[4, 6], [9, 12]],
[[4, 6], [np.nan, np.nan]]])
res = gulinalg.update_rank1(a, b, c)
assert_allclose(res, ref)
def test_infinity_handling(self):
"""Infinity in one output shouldn't contaminate remaining outputs"""
a = np.array([[1, 2], [1, np.inf]])
b = np.array([3, 4])
c = np.array([[1, 2], [3, 4]])
ref = np.array([[[4, 6], [9, 12]],
[[4, 6], [np.inf, np.inf]]])
res = gulinalg.update_rank1(a, b, c)
assert_allclose(res, ref)
@skipIf(parse_version(np.__version__) < parse_version('1.13'),
"Prior to 1.13, numpy low level iterators didn't support removing "
"empty axis. So gufunc couldn't be called with empty inner loop")
def test_size_zero_vector(self):
"""Test broadcasting for matrix input of size zero"""
a = np.ascontiguousarray(np.random.randn(10, 1))
b = np.ascontiguousarray(np.random.randn(10, 0))
c = np.ascontiguousarray(np.random.randn(10, 1, 0))
res = gulinalg.update_rank1(a, b, c)
assert res.shape == (10, 1, 0)
ref = np.stack([np.dot(np.zeros((1, 0)), np.zeros((0, 0))) + c[i]
for i in range(len(c))])
assert_allclose(res, ref)
@skipIf(parse_version(np.__version__) < parse_version('1.13'),
"Prior to 1.13, numpy low level iterators didn't support removing "
"empty axis. So gufunc couldn't be called with empty inner loop")
def test_size_zero_matrix(self):
"""Test broadcasting for matrix input of size zero"""
a = np.ascontiguousarray(np.random.randn(10, 0))
b = np.ascontiguousarray(np.random.randn(10, 2))
c = np.ascontiguousarray(np.random.randn(10, 0, 2))
res = gulinalg.update_rank1(a, b, c)
assert res.shape == (10, 0, 2)
ref = np.stack([np.dot(np.zeros((0, 0)), np.zeros((0, 2))) + c[i]
for i in range(len(c))])
assert_allclose(res, ref)
def test_size_one_vector(self):
"""Test broadcasting for vector inputs of size one"""
a = np.ascontiguousarray(np.random.randn(10, 1))
b = np.ascontiguousarray(np.random.randn(10, 1))
c = np.ascontiguousarray(np.random.randn(10, 1, 1))
res = gulinalg.update_rank1(a, b, c)
assert res.shape == (10, 1, 1)
ref = np.stack([np.dot(a[i].reshape(1, 1), b[i].reshape(1, 1)) + c[i]
for i in range(len(c))])
assert_allclose(res, ref)
if __name__ == '__main__':
run_module_suite()
| 41.616601 | 79 | 0.590702 | 3,143 | 21,058 | 3.852689 | 0.063633 | 0.050871 | 0.082666 | 0.118177 | 0.926253 | 0.917499 | 0.902056 | 0.882154 | 0.829053 | 0.812536 | 0 | 0.029391 | 0.266455 | 21,058 | 505 | 80 | 41.69901 | 0.754515 | 0.121616 | 0 | 0.729659 | 0 | 0 | 0.058914 | 0 | 0 | 0 | 0 | 0 | 0.175853 | 1 | 0.11811 | false | 0 | 0.015748 | 0 | 0.149606 | 0.002625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0d68943546150bac5769a996c24b661e74fd8348 | 1,416 | py | Python | tests/test_1968.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | tests/test_1968.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | tests/test_1968.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import pytest
"""
Test 1968. Array With Elements Not Equal to Average of Neighbors
"""
@pytest.fixture(scope="session")
def init_variables_1968():
from src.leetcode_1968_array_with_elements_not_equal_to_average_of_neighbors import (
Solution,
)
solution = Solution()
def _init_variables_1968():
return solution
yield _init_variables_1968
class TestClass1968:
def test_solution_0(self, init_variables_1968):
assert init_variables_1968().rearrangeArray([1, 2, 3, 4, 5]) == [1, 2, 4, 5, 3]
def test_solution_1(self, init_variables_1968):
assert init_variables_1968().rearrangeArray([6, 2, 0, 9, 7]) == [9, 7, 6, 2, 0]
#!/usr/bin/env python
import pytest
"""
Test 1968. Array With Elements Not Equal to Average of Neighbors
"""
@pytest.fixture(scope="session")
def init_variables_1968():
from src.leetcode_1968_array_with_elements_not_equal_to_average_of_neighbors import (
Solution,
)
solution = Solution()
def _init_variables_1968():
return solution
yield _init_variables_1968
class TestClass1968:
def test_solution_0(self, init_variables_1968):
assert init_variables_1968().rearrangeArray([1, 2, 3, 4, 5]) == [1, 2, 4, 5, 3]
def test_solution_1(self, init_variables_1968):
assert init_variables_1968().rearrangeArray([6, 2, 0, 9, 7]) == [9, 7, 6, 2, 0]
| 23.213115 | 89 | 0.693503 | 200 | 1,416 | 4.61 | 0.215 | 0.197397 | 0.258134 | 0.091106 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0.108772 | 0.194915 | 1,416 | 60 | 90 | 23.6 | 0.7 | 0.028249 | 0 | 0.933333 | 0 | 0 | 0.011382 | 0 | 0 | 0 | 0 | 0 | 0.133333 | 1 | 0.266667 | false | 0 | 0.133333 | 0.066667 | 0.533333 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 10 |
0d881e28777b8731d25801de24e088b3be6a943b | 1,767 | py | Python | tests/unit_tests/test_nn/test_converters/test_tensorflow/test_BatchNormalization.py | samysweb/dnnv | 58fb95b7300914d9da28eed86c39eca473b1aaef | [
"MIT"
] | 5 | 2022-01-28T20:30:34.000Z | 2022-03-17T09:26:52.000Z | tests/unit_tests/test_nn/test_converters/test_tensorflow/test_BatchNormalization.py | samysweb/dnnv | 58fb95b7300914d9da28eed86c39eca473b1aaef | [
"MIT"
] | 9 | 2022-01-27T03:50:28.000Z | 2022-02-08T18:42:17.000Z | tests/unit_tests/test_nn/test_converters/test_tensorflow/test_BatchNormalization.py | samysweb/dnnv | 58fb95b7300914d9da28eed86c39eca473b1aaef | [
"MIT"
] | 2 | 2022-02-03T17:32:43.000Z | 2022-03-24T16:38:49.000Z | import numpy as np
from dnnv.nn.converters.tensorflow import *
from dnnv.nn.operations import *
TOL = 1e-6
def test_BatchNormalization_consts():
x = np.arange(12).astype(np.float32).reshape((1, 3, 2, 2))
scale = np.full(3, 2.0, dtype=np.float32)
bias = np.full(3, 0.0, dtype=np.float32)
mean = np.full(3, 5.5, dtype=np.float32)
var = np.full(3, 11.9, dtype=np.float32)
op = BatchNormalization(x, scale, bias, mean, var)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
y = np.array(
[
[
[[-3.1887393, -2.6089685], [-2.0291977, -1.4494269]],
[[-0.8696561, -0.28988528], [0.28988552, 0.8696561]],
[[1.4494271, 2.0291982], [2.6089687, 3.1887393]],
]
],
dtype=np.float32,
)
assert np.all(result >= (y - TOL))
assert np.all(result <= (y + TOL))
def test_BatchNormalization_x_is_op():
x = np.arange(12).astype(np.float32).reshape((1, 3, 2, 2))
scale = np.full(3, 2.0, dtype=np.float32)
bias = np.full(3, 0.0, dtype=np.float32)
mean = np.full(3, 5.5, dtype=np.float32)
var = np.full(3, 11.9, dtype=np.float32)
input_op = Input((1, 3, 2, 2), np.dtype(np.float32))
op = BatchNormalization(input_op, scale, bias, mean, var)
tf_op = TensorflowConverter().visit(op)
result = tf_op(x).numpy()
y = np.array(
[
[
[[-3.1887393, -2.6089685], [-2.0291977, -1.4494269]],
[[-0.8696561, -0.28988528], [0.28988552, 0.8696561]],
[[1.4494271, 2.0291982], [2.6089687, 3.1887393]],
]
],
dtype=np.float32,
)
assert np.all(result >= (y - TOL))
assert np.all(result <= (y + TOL))
| 31.553571 | 69 | 0.551783 | 253 | 1,767 | 3.806324 | 0.225296 | 0.121495 | 0.159917 | 0.062305 | 0.82243 | 0.766355 | 0.766355 | 0.766355 | 0.766355 | 0.766355 | 0 | 0.205089 | 0.265988 | 1,767 | 55 | 70 | 32.127273 | 0.537394 | 0 | 0 | 0.595745 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085106 | 1 | 0.042553 | false | 0 | 0.06383 | 0 | 0.106383 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0d99beb28367f0a1a3ee430483aa1b8c9b103052 | 286 | py | Python | lg_offliner/src/lg_offliner/__init__.py | carlosvquezada/lg_ros_nodes | 7560e99272d06ef5c80a5444131dad72c078a718 | [
"Apache-2.0"
] | null | null | null | lg_offliner/src/lg_offliner/__init__.py | carlosvquezada/lg_ros_nodes | 7560e99272d06ef5c80a5444131dad72c078a718 | [
"Apache-2.0"
] | null | null | null | lg_offliner/src/lg_offliner/__init__.py | carlosvquezada/lg_ros_nodes | 7560e99272d06ef5c80a5444131dad72c078a718 | [
"Apache-2.0"
] | null | null | null | from offliner import ROS_NODE_NAME
from offliner import LG_OFFLINER_DEBUG_TOPIC_DEFAULT
from offliner import LG_OFFLINER_OFFLINE_TOPIC_DEFAULT
from offliner import Checker
from offliner import ConnectivityResults
from offliner import process_custom_publishers
from offliner import main
| 35.75 | 54 | 0.902098 | 40 | 286 | 6.15 | 0.425 | 0.341463 | 0.512195 | 0.162602 | 0.398374 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097902 | 286 | 7 | 55 | 40.857143 | 0.953488 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0dbed31036c28b71afe691288c605207a25cb6c0 | 311 | py | Python | one liners/binary-converter.py | Computroniks/random-python-stuff | f976c59c38a91fbe5d019d0e8626f8a6cb6a2a4b | [
"MIT"
] | 1 | 2020-09-21T18:39:13.000Z | 2020-09-21T18:39:13.000Z | one liners/binary-converter.py | Computroniks/random-stuff | f976c59c38a91fbe5d019d0e8626f8a6cb6a2a4b | [
"MIT"
] | null | null | null | one liners/binary-converter.py | Computroniks/random-stuff | f976c59c38a91fbe5d019d0e8626f8a6cb6a2a4b | [
"MIT"
] | null | null | null | print(eval("str(bin(int((input('Please enter number to convert to binary: '))))).lstrip('0b')") if int(input('What do you want to convert?\n[1] Deanery to binary\n[2] Binary to deanery\nEnter your option [1/2]: ')) == 1 else eval("int(input('Please enter a binanary number to convert to decimal: '), base=2)"))
| 155.5 | 310 | 0.691318 | 55 | 311 | 3.909091 | 0.563636 | 0.111628 | 0.130233 | 0.176744 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025735 | 0.125402 | 311 | 1 | 311 | 311 | 0.764706 | 0 | 0 | 0 | 0 | 2 | 0.829582 | 0.083601 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
0dc2021b659d8c10e1bad74a948c0f923c8b66a1 | 51,403 | py | Python | data_steward/analytics/table_metrics/Table_Metrics_part_1.py | calbach/curation | 3d571a2bf9d236322e87a3113c2b3f3d0cb3d5b5 | [
"MIT"
] | null | null | null | data_steward/analytics/table_metrics/Table_Metrics_part_1.py | calbach/curation | 3d571a2bf9d236322e87a3113c2b3f3d0cb3d5b5 | [
"MIT"
] | null | null | null | data_steward/analytics/table_metrics/Table_Metrics_part_1.py | calbach/curation | 3d571a2bf9d236322e87a3113c2b3f3d0cb3d5b5 | [
"MIT"
] | null | null | null | # ---
# jupyter:
# jupytext:
# formats: ipynb,py:light
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.4'
# jupytext_version: 1.2.1
# kernelspec:
# display_name: Python 2
# language: python
# name: python2
# ---
# +
# #!pip install --upgrade google-cloud-bigquery[pandas]
# -
from google.cloud import bigquery
client = bigquery.Client()
# %load_ext google.cloud.bigquery
# %reload_ext google.cloud.bigquery
# +
#######################################
print('Setting everything up...')
#######################################
import warnings
warnings.filterwarnings('ignore')
import pandas as pd
import matplotlib.pyplot as plt
DATASET =
plt.style.use('ggplot')
pd.options.display.max_rows = 999
pd.options.display.max_columns = 999
pd.options.display.max_colwidth = 999
def cstr(s, color='black'):
return "<text style=color:{}>{}</text>".format(color, s)
print('done.')
# +
dic = {'src_hpo_id': ["trans_am_essentia", "saou_ummc", "seec_miami", "seec_morehouse", "seec_emory", "uamc_banner",
"pitt", "nyc_cu", "ipmc_uic", "trans_am_spectrum", "tach_hfhs", "nec_bmc", "cpmc_uci", "nec_phs",
"nyc_cornell", "ipmc_nu", "nyc_hh", "ipmc_uchicago", "aouw_mcri", "syhc", "cpmc_ceders",
"seec_ufl", "saou_uab", "trans_am_baylor", "cpmc_ucsd", "ecchc", "chci", "aouw_uwh", "cpmc_usc",
"hrhc", "ipmc_northshore", "chs", "cpmc_ucsf", "jhchc", "aouw_mcw", "cpmc_ucd", "ipmc_rush"],
'HPO': ["Essentia Health Superior Clinic", "University of Mississippi", "SouthEast Enrollment Center Miami",
"SouthEast Enrollment Center Morehouse", "SouthEast Enrollment Center Emory", "Banner Health",
"University of Pittsburgh", "Columbia University Medical Center", "University of Illinois Chicago",
"Spectrum Health", "Henry Ford Health System", "Boston Medical Center", "UC Irvine",
"Partners HealthCare", "Weill Cornell Medical Center", "Northwestern Memorial Hospital",
"Harlem Hospital", "University of Chicago", "Marshfield Clinic", "San Ysidro Health Center",
"Cedars-Sinai", "University of Florida", "University of Alabama at Birmingham", "Baylor", "UC San Diego",
"Eau Claire Cooperative Health Center", "Community Health Center, Inc.",
"UW Health (University of Wisconsin Madison)", "University of Southern California", "HRHCare",
"NorthShore University Health System", "Cherokee Health Systems", "UC San Francisco",
"Jackson-Hinds CHC", "Medical College of Wisconsin", "UC Davis", "Rush University"]}
site_df = pd.DataFrame(data=dic)
site_df
# -
# # There should not be duplicate rows.
# ## visit_occurrence table
# +
######################################
print('Getting the data from the database...')
######################################
foreign_key_df = pd.io.gbq.read_gbq('''
SELECT
src_hpo_id,
person_id, visit_concept_id, visit_start_date, visit_start_datetime, visit_end_date, visit_end_datetime,
visit_type_concept_id, provider_id, care_site_id, visit_source_value, visit_source_concept_id,
admitting_source_concept_id, admitting_source_value, discharge_to_concept_id,
discharge_to_source_value, preceding_visit_occurrence_id,
COUNT(*) as cnt
FROM
`{}.unioned_ehr_visit_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_visit_occurrence`) AS t2
ON
t1.visit_occurrence_id=t2.visit_occurrence_id
WHERE
t1.visit_concept_id!=0 AND t1.visit_concept_id IS NOT NULL AND
t1.person_id!=0 and t1.person_id IS NOT NULL
GROUP BY
1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17
HAVING
COUNT(*) > 1
ORDER BY
1,2,3,4,5,6,7,8,9
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard')
print(foreign_key_df.shape[0], 'records received.')
# -
foreign_key_df.head()
visit_occurrence = foreign_key_df.groupby(['src_hpo_id']).size().reset_index().rename(
columns={0: 'visit_occurrence'}).sort_values(["visit_occurrence"]).set_index("src_hpo_id")
visit_occurrence = visit_occurrence.reset_index()
visit_occurrence
# ## condition_occurrence table
# +
######################################
print('Getting the data from the database...')
######################################
foreign_key_df = pd.io.gbq.read_gbq('''
SELECT
src_hpo_id,
person_id, condition_concept_id, condition_start_date, condition_start_datetime, condition_end_date,
condition_end_datetime, condition_type_concept_id, stop_reason, provider_id, visit_occurrence_id,
condition_source_value, condition_source_concept_id, condition_status_source_value, condition_status_concept_id,
COUNT(*) as cnt
FROM
`{}.unioned_ehr_condition_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_condition_occurrence`) AS t2
ON
t1.condition_occurrence_id=t2.condition_occurrence_id
WHERE
t1.condition_concept_id!=0 AND t1.condition_concept_id IS NOT NULL AND
t1.person_id!=0 and t1.person_id IS NOT NULL
GROUP BY
1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
HAVING
COUNT(*) > 1
ORDER BY
1,2,3,4,5,6,7,8,9,10,11,12,13,14
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard')
print(foreign_key_df.shape[0], 'records received.')
# -
foreign_key_df.head()
condition_occurrence = foreign_key_df.groupby(['src_hpo_id']).size().reset_index().rename(
columns={0: 'condition_occurrence'}).sort_values(["condition_occurrence"]).set_index("src_hpo_id")
condition_occurrence = condition_occurrence.reset_index()
condition_occurrence
# ## drug_exposure table
# +
######################################
print('Getting the data from the database...')
######################################
foreign_key_df = pd.io.gbq.read_gbq('''
SELECT
src_hpo_id,
person_id, drug_concept_id, drug_exposure_start_date,drug_exposure_start_datetime,
drug_exposure_end_date,drug_exposure_end_datetime, verbatim_end_date, drug_type_concept_id,
stop_reason, refills, quantity,
days_supply, sig, route_concept_id, lot_number, provider_id, visit_occurrence_id, drug_source_value,
drug_source_concept_id, route_source_value, dose_unit_source_value,
COUNT(*) as cnt
FROM
`{}.unioned_ehr_drug_exposure` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_drug_exposure`) AS t2
ON
t1.drug_exposure_id=t2.drug_exposure_id
WHERE
t1.drug_concept_id!=0 AND t1.drug_concept_id IS NOT NULL AND
t1.person_id!=0 and t1.person_id IS NOT NULL
GROUP BY
1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22
HAVING
COUNT(*) > 1
ORDER BY
1,2,3
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard')
print(foreign_key_df.shape[0], 'records received.')
# -
foreign_key_df.head()
drug_exposure = foreign_key_df.groupby(['src_hpo_id']).size().reset_index().rename(
columns={0: 'drug_exposure'}).sort_values(["drug_exposure"]).set_index("src_hpo_id")
drug_exposure = drug_exposure.reset_index()
drug_exposure
# ## measurement table
# +
######################################
print('Getting the data from the database...')
######################################
foreign_key_df = pd.io.gbq.read_gbq('''
SELECT
src_hpo_id,
person_id, measurement_concept_id, measurement_date, measurement_datetime, measurement_type_concept_id,
operator_concept_id, value_as_number, value_as_concept_id, unit_concept_id, range_low,
range_high, provider_id, visit_occurrence_id,
measurement_source_value, measurement_source_concept_id, unit_source_value, value_source_value,
COUNT(*) as cnt
FROM
`{}.unioned_ehr_measurement` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_measurement`) AS t2
ON
t1.measurement_id=t2.measurement_id
WHERE
t1.measurement_concept_id!=0 AND t1.measurement_concept_id IS NOT NULL AND
t1.person_id!=0 and t1.person_id IS NOT NULL
GROUP BY
1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18
HAVING
COUNT(*) > 1
ORDER BY
1,2,3
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard')
print(foreign_key_df.shape[0], 'records received.')
# -
foreign_key_df.head()
measurement = foreign_key_df.groupby(['src_hpo_id']).size().reset_index().rename(
columns={0: 'measurement'}).sort_values(["measurement"]).set_index("src_hpo_id")
measurement = measurement.reset_index()
measurement
# ## procedure_occurrence
# +
######################################
print('Getting the data from the database...')
######################################
foreign_key_df = pd.io.gbq.read_gbq('''
SELECT
src_hpo_id,
person_id, procedure_concept_id, procedure_date, procedure_datetime, procedure_type_concept_id, modifier_concept_id,
quantity, provider_id, visit_occurrence_id, procedure_source_value, procedure_source_concept_id, qualifier_source_value,
COUNT(*) as cnt
FROM
`{}.unioned_ehr_procedure_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_procedure_occurrence`) AS t2
ON
t1.procedure_occurrence_id=t2.procedure_occurrence_id
WHERE
t1.procedure_concept_id!=0 AND t1.procedure_concept_id IS NOT NULL AND
t1.person_id!=0 and t1.person_id IS NOT NULL
GROUP BY
1,2,3,4,5,6,7,8,9,10,11,12,13
HAVING
COUNT(*) > 1
ORDER BY
1,2,3,4,5,6,7,8,9,10,11,12,13,14
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard')
print(foreign_key_df.shape[0], 'records received.')
# -
foreign_key_df.head()
procedure_occurrence = foreign_key_df.groupby(['src_hpo_id']).size().reset_index().rename(
columns={0: 'procedure_occurrence'}).sort_values(["procedure_occurrence"]).set_index("src_hpo_id")
procedure_occurrence = procedure_occurrence.reset_index()
procedure_occurrence
# ## observation table
# +
######################################
print('Getting the data from the database...')
######################################
foreign_key_df = pd.io.gbq.read_gbq('''
SELECT
src_hpo_id,
person_id, observation_concept_id, observation_date, observation_datetime, observation_type_concept_id, value_as_number,
value_as_string, value_as_concept_id, qualifier_concept_id, unit_concept_id, provider_id, visit_occurrence_id,
observation_source_value, observation_source_concept_id, unit_source_value, qualifier_source_value, value_source_concept_id,
value_source_value, questionnaire_response_id,
COUNT(*) as cnt
FROM
`{}.unioned_ehr_observation` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_observation`) AS t2
ON
t1.observation_id=t2.observation_id
WHERE
t1.observation_concept_id!=0 AND t1.observation_concept_id IS NOT NULL AND
t1.person_id!=0 and t1.person_id IS NOT NULL
GROUP BY
1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20
HAVING
COUNT(*) > 1
ORDER BY
1,2,3,4,5,6,7,8,9,10,11,12,13,14
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard')
print(foreign_key_df.shape[0], 'records received.')
# -
foreign_key_df.head()
observation = foreign_key_df.groupby(['src_hpo_id']).size().reset_index().rename(
columns={0: 'observation'}).sort_values(["observation"]).set_index("src_hpo_id")
observation = observation.reset_index()
observation
# ## provider table
# +
######################################
print('Getting the data from the database...')
######################################
foreign_key_df = pd.io.gbq.read_gbq('''
SELECT
provider_name, NPI, DEA, specialty_concept_id, care_site_id, year_of_birth,
gender_concept_id, provider_source_value, specialty_source_value,
specialty_source_concept_id, gender_source_value, gender_source_concept_id,
COUNT(*) as cnt
FROM
`{}.unioned_ehr_provider` AS t1
GROUP BY
1,2,3,4,5,6,7,8,9,10,11,12
HAVING
COUNT(*) > 1
ORDER BY
1,2,3,4,5,6,7,8,9,10,11,12
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard')
print(foreign_key_df.shape[0], 'records received.')
# -
foreign_key_df.head()
# ## device_exposure table
# +
######################################
print('Getting the data from the database...')
######################################
foreign_key_df = pd.io.gbq.read_gbq('''
SELECT
src_hpo_id,
person_id, device_concept_id, device_exposure_start_date, device_exposure_start_datetime, device_exposure_end_date,
device_exposure_end_datetime, device_type_concept_id, unique_device_id, quantity, provider_id,
visit_occurrence_id, device_source_value, device_source_concept_id,
COUNT(*) as cnt
FROM
`{}.unioned_ehr_device_exposure` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_device_exposure`) AS t2
ON
t1.device_exposure_id=t2.device_exposure_id
WHERE
t1.device_concept_id!=0 AND t1.device_concept_id IS NOT NULL AND
t1.person_id!=0 and t1.person_id IS NOT NULL
GROUP BY
1,2,3,4,5,6,7,8,9,10,11,12,13,14
HAVING
COUNT(*) > 1
ORDER BY
1,2,3,4,5,6,7,8,9,10,11,12,13,14
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard')
print(foreign_key_df.shape[0], 'records received.')
# -
foreign_key_df.head()
device_exposure = foreign_key_df.groupby(['src_hpo_id']).size().reset_index().rename(
columns={0: 'device_exposure'}).sort_values(["device_exposure"]).set_index("src_hpo_id")
device_exposure = device_exposure.reset_index()
device_exposure
# ## death table
# +
######################################
print('Getting the data from the database...')
######################################
foreign_key_df = pd.io.gbq.read_gbq('''
SELECT
death_date, death_datetime, death_type_concept_id, cause_concept_id, cause_source_value, cause_source_concept_id,
COUNT(*) as cnt
FROM
`{}.unioned_ehr_death` AS t1
WHERE
t1.death_date IS NOT NULL AND t1.person_id IS NOT NULL
GROUP BY
1,2,3,4,5,6
HAVING
COUNT(*) > 1
ORDER BY
1,2,3,4,5,6
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard')
print(foreign_key_df.shape[0], 'records received.')
# -
foreign_key_df.head()
# ## care_site table
# +
######################################
print('Getting the data from the database...')
######################################
foreign_key_df = pd.io.gbq.read_gbq('''
SELECT
place_of_service_concept_id, location_id, place_of_service_source_value,
care_site_name, care_site_source_value,
COUNT(*) as cnt
FROM
`{}.unioned_ehr_care_site` AS t1
GROUP BY
1,2,3,4,5
HAVING
COUNT(*) > 1
ORDER BY
1,2,3,4,5
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard')
print(foreign_key_df.shape[0], 'records received.')
# -
foreign_key_df.head()
# ## Sites combined
sites_success = pd.merge(visit_occurrence, condition_occurrence, how='outer', on='src_hpo_id')
sites_success = pd.merge(sites_success, drug_exposure, how='outer', on='src_hpo_id')
sites_success = pd.merge(sites_success, measurement, how='outer', on='src_hpo_id')
sites_success = pd.merge(sites_success, procedure_occurrence, how='outer', on='src_hpo_id')
sites_success = pd.merge(sites_success, device_exposure, how='outer', on='src_hpo_id')
sites_success = pd.merge(sites_success, observation, how='outer', on='src_hpo_id')
sites_success = pd.merge(sites_success, site_df, how='outer', on='src_hpo_id')
sites_success = sites_success.fillna(0)
sites_success[["visit_occurrence", "condition_occurrence", "drug_exposure", "measurement", "procedure_occurrence",
"device_exposure", "observation"]] \
= sites_success[["visit_occurrence", "condition_occurrence", "drug_exposure", "measurement", "procedure_occurrence",
"device_exposure", "observation"]].astype(int)
sites_success
sites_success.to_csv("data\\duplicates.csv")
# # 20.Dataframe (row for each hpo_id) Condition_occurrence table, condition_source_concept_id field
condition_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS condition_total_row
FROM
`{}.unioned_ehr_condition_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_condition_occurrence`) AS t2
ON
t1.condition_occurrence_id=t2.condition_occurrence_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS condition_well_defined_row
FROM
`{}.unioned_ehr_condition_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_condition_occurrence`) AS t2
ON
t1.condition_occurrence_id=t2.condition_occurrence_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.condition_concept_id
WHERE
t3.domain_id="Condition" and t3.standard_concept="S"
GROUP BY
1
),
data3 AS (
SELECT
src_hpo_id,
COUNT(*) AS condition_total_zero
FROM
`{}.unioned_ehr_condition_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_condition_occurrence`) AS t2
ON
t1.condition_occurrence_id=t2.condition_occurrence_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.condition_concept_id
WHERE
(t3.concept_id=0 or t3.concept_id is null)
GROUP BY
1
)
SELECT
data1.src_hpo_id,
condition_well_defined_row,
condition_total_row,
round(100*(condition_well_defined_row/condition_total_row),1) as condition_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
LEFT OUTER JOIN
data3
ON
data1.src_hpo_id=data3.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET),
dialect='standard'
)
condition_concept_df.shape
condition_concept_df = condition_concept_df.fillna(0)
condition_concept_df
# # 21.Dataframe (row for each hpo_id) Procedure_occurrence table, procedure_source_concept_id field
procedure_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS procedure_total_row
FROM
`{}.unioned_ehr_procedure_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_procedure_occurrence`) AS t2
ON
t1.procedure_occurrence_id=t2.procedure_occurrence_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS procedure_well_defined_row
FROM
`{}.unioned_ehr_procedure_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_procedure_occurrence`) AS t2
ON
t1.procedure_occurrence_id=t2.procedure_occurrence_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.procedure_source_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Procedure"
GROUP BY
1
)
SELECT
data1.src_hpo_id,
procedure_well_defined_row,
procedure_total_row,
round(100*(procedure_well_defined_row/procedure_total_row),1) as procedure_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET),
dialect='standard'
)
procedure_concept_df.shape
procedure_concept_df = procedure_concept_df.fillna(0)
procedure_concept_df
# # 22.Dataframe (row for each hpo_id) Drug_exposures table, drug_source_concept_id field
drug_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS drug_total_row
FROM
`{}.unioned_ehr_drug_exposure` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_drug_exposure`) AS t2
ON
t1.drug_exposure_id=t2.drug_exposure_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS drug_well_defined_row
FROM
`{}.unioned_ehr_drug_exposure` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_drug_exposure`) AS t2
ON
t1.drug_exposure_id=t2.drug_exposure_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.drug_source_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Drug"
GROUP BY
1
)
SELECT
data1.src_hpo_id,
drug_well_defined_row,
drug_total_row,
round(100*(drug_well_defined_row/drug_total_row),1) as drug_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET),
dialect='standard'
)
drug_concept_df.shape
drug_concept_df = drug_concept_df.fillna(0)
drug_concept_df
# # 23.Dataframe (row for each hpo_id) Observation table, Observation_source_concept_id field
#
#
observation_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS observation_total_row
FROM
`{}.unioned_ehr_observation` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_observation`) AS t2
ON
t1.observation_id=t2.observation_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS observation_well_defined_row
FROM
`{}.unioned_ehr_observation` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_observation`) AS t2
ON
t1.observation_id=t2.observation_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.observation_source_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Observation"
GROUP BY
1
)
SELECT
data1.src_hpo_id,
observation_well_defined_row,
observation_total_row,
round(100*(observation_well_defined_row/observation_total_row),1) as observation_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET),
dialect='standard'
)
observation_concept_df.shape
observation_concept_df = observation_concept_df.fillna(0)
observation_concept_df
# # 21.Dataframe (row for each hpo_id) Measurement table, measurement_source_concept_id field
measurement_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS measurement_total_row
FROM
`{}.unioned_ehr_measurement` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_measurement`) AS t2
ON
t1.measurement_id=t2.measurement_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS measurement_well_defined_row
FROM
`{}.unioned_ehr_measurement` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_measurement`) AS t2
ON
t1.measurement_id=t2.measurement_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.measurement_source_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Measurement"
GROUP BY
1
)
SELECT
data1.src_hpo_id,
measurement_well_defined_row,
measurement_total_row,
round(100*(measurement_well_defined_row/measurement_total_row),1) as measurement_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET),
dialect='standard'
)
measurement_concept_df.shape
measurement_concept_df = measurement_concept_df.fillna(0)
measurement_concept_df
# # 21.Dataframe (row for each hpo_id) visit_occurrence table, visit_source_concept_id field
visit_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS visit_total_row
FROM
`{}.unioned_ehr_visit_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_visit_occurrence`) AS t2
ON
t1.visit_occurrence_id=t2.visit_occurrence_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS visit_well_defined_row
FROM
`{}.unioned_ehr_visit_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_visit_occurrence`) AS t2
ON
t1.visit_occurrence_id=t2.visit_occurrence_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.visit_source_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Visit"
GROUP BY
1
)
SELECT
data1.src_hpo_id,
visit_well_defined_row,
visit_total_row,
round(100*(visit_well_defined_row/visit_total_row),1) as visit_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET),
dialect='standard'
)
visit_concept_df.shape
visit_concept_df = visit_concept_df.fillna(0)
visit_concept_df
datas = [
procedure_concept_df, drug_concept_df, observation_concept_df, measurement_concept_df, visit_concept_df
]
master_df = condition_concept_df
for filename in datas:
master_df = pd.merge(master_df, filename, on='src_hpo_id', how='outer')
master_df
source = pd.merge(master_df, site_df, how='outer', on='src_hpo_id')
source = source.fillna("No Data")
source.to_csv("data\\source.csv")
# # 16.Dataframe (row for each hpo_id) Condition_occurrence table, condition_concept_id field
condition_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS condition_total_row
FROM
`{}.unioned_ehr_condition_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_condition_occurrence`) AS t2
ON
t1.condition_occurrence_id=t2.condition_occurrence_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS condition_well_defined_row
FROM
`{}.unioned_ehr_condition_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_condition_occurrence`) AS t2
ON
t1.condition_occurrence_id=t2.condition_occurrence_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.condition_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Condition"
GROUP BY
1
),
data3 AS (
SELECT
src_hpo_id,
COUNT(*) AS condition_total_zeros_or_null
FROM
`{}.unioned_ehr_condition_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_condition_occurrence`) AS t2
ON
t1.condition_occurrence_id=t2.condition_occurrence_id
WHERE
(t1.condition_concept_id=0 or t1.condition_concept_id IS NULL)
GROUP BY
1
),
data4 AS (
SELECT
src_hpo_id,
COUNT(*) AS condition_total_null
FROM
`{}.unioned_ehr_condition_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_condition_occurrence`) AS t2
ON
t1.condition_occurrence_id=t2.condition_occurrence_id
WHERE
t1.condition_concept_id IS NULL
GROUP BY
1
)
SELECT
data1.src_hpo_id,
condition_well_defined_row,
condition_total_row,
condition_total_zeros_or_null,
condition_total_null,
round(100*(condition_well_defined_row/condition_total_row),1) as condition_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
LEFT OUTER JOIN
data3
ON
data1.src_hpo_id=data3.src_hpo_id
LEFT OUTER JOIN
data4
ON
data1.src_hpo_id=data4.src_hpo_id
ORDER BY
4 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET),
dialect='standard'
)
condition_concept_df.shape
condition_concept_df = condition_concept_df.fillna(0)
condition_concept_df
# # 17.Dataframe (row for each hpo_id) Procedure_occurrence table, procedure_concept_id field
procedure_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS procedure_total_row
FROM
`{}.unioned_ehr_procedure_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_procedure_occurrence`) AS t2
ON
t1.procedure_occurrence_id=t2.procedure_occurrence_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS procedure_well_defined_row
FROM
`{}.unioned_ehr_procedure_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_procedure_occurrence`) AS t2
ON
t1.procedure_occurrence_id=t2.procedure_occurrence_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.procedure_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Procedure"
GROUP BY
1
),
data3 AS (
SELECT
src_hpo_id,
COUNT(*) AS procedure_total_zero_null
FROM
`{}.unioned_ehr_procedure_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_procedure_occurrence`) AS t2
ON
t1.procedure_occurrence_id=t2.procedure_occurrence_id
WHERE
(t1.procedure_concept_id=0 or t1.procedure_concept_id IS NULL)
GROUP BY
1
),
data4 AS (
SELECT
src_hpo_id,
COUNT(*) AS procedure_total_null
FROM
`{}.unioned_ehr_procedure_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_procedure_occurrence`) AS t2
ON
t1.procedure_occurrence_id=t2.procedure_occurrence_id
WHERE
t1.procedure_concept_id IS NULL
GROUP BY
1
)
SELECT
data1.src_hpo_id,
procedure_well_defined_row,
procedure_total_zero_null,
procedure_total_null,
procedure_total_row,
round(100*(procedure_well_defined_row/procedure_total_row),1) as procedure_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
LEFT OUTER JOIN
data3
ON
data1.src_hpo_id=data3.src_hpo_id
LEFT OUTER JOIN
data4
ON
data1.src_hpo_id=data4.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET),
dialect='standard'
)
procedure_concept_df.shape
procedure_concept_df = procedure_concept_df.fillna(0)
procedure_concept_df
# # 18.Dataframe (row for each hpo_id) Drug_exposures table, drug_concept_id field
drug_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS drug_total_row
FROM
`{}.unioned_ehr_drug_exposure` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_drug_exposure`) AS t2
ON
t1.drug_exposure_id=t2.drug_exposure_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS drug_well_defined_row
FROM
`{}.unioned_ehr_drug_exposure` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_drug_exposure`) AS t2
ON
t1.drug_exposure_id=t2.drug_exposure_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.drug_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Drug"
GROUP BY
1
),
data3 AS (
SELECT
src_hpo_id,
COUNT(*) AS drug_total_zero_null
FROM
`{}.unioned_ehr_drug_exposure` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_drug_exposure`) AS t2
ON
t1.drug_exposure_id=t2.drug_exposure_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.drug_concept_id
WHERE
(t1.drug_concept_id=0 OR t1.drug_concept_id IS NULL)
GROUP BY
1
),
data4 AS (
SELECT
src_hpo_id,
COUNT(*) AS drug_total_null
FROM
`{}.unioned_ehr_drug_exposure` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_drug_exposure`) AS t2
ON
t1.drug_exposure_id=t2.drug_exposure_id
WHERE
t1.drug_concept_id IS NULL
GROUP BY
1
)
SELECT
data1.src_hpo_id,
drug_well_defined_row,
drug_total_zero_null,
drug_total_null,
drug_total_row,
round(100*(drug_well_defined_row/drug_total_row),1) as drug_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
LEFT OUTER JOIN
data3
ON
data1.src_hpo_id=data3.src_hpo_id
LEFT OUTER JOIN
data4
ON
data1.src_hpo_id=data4.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard'
)
drug_concept_df.shape
# +
drug_concept_df = drug_concept_df.fillna(0)
drug_concept_df
# -
# # 19.Dataframe (row for each hpo_id) Observation table, Observation_concept_id field
#
observation_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS observation_total_row
FROM
`{}.unioned_ehr_observation` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_observation`) AS t2
ON
t1.observation_id=t2.observation_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS observation_well_defined_row
FROM
`{}.unioned_ehr_observation` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_observation`) AS t2
ON
t1.observation_id=t2.observation_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.observation_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Observation"
GROUP BY
1
),
data3 AS (
SELECT
src_hpo_id,
COUNT(*) AS observation_total_zero_missing
FROM
`{}.unioned_ehr_observation` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_observation`) AS t2
ON
t1.observation_id=t2.observation_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.observation_concept_id
WHERE
(t1.observation_concept_id=0 OR t1.observation_concept_id IS NULL)
GROUP BY
1
),
data4 AS (
SELECT
src_hpo_id,
COUNT(*) AS observation_total_missing
FROM
`{}.unioned_ehr_observation` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_observation`) AS t2
ON
t1.observation_id=t2.observation_id
WHERE
t1.observation_concept_id IS NULL
GROUP BY
1
)
SELECT
data1.src_hpo_id,
observation_total_zero_missing,
observation_total_missing,
observation_well_defined_row,
observation_total_row,
round(100*(observation_well_defined_row/observation_total_row),1) as observation_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
LEFT OUTER JOIN
data3
ON
data1.src_hpo_id=data3.src_hpo_id
LEFT OUTER JOIN
data4
ON
data1.src_hpo_id=data4.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard'
)
observation_concept_df.shape
observation_concept_df = observation_concept_df.fillna(0)
observation_concept_df
# # 19.Dataframe (row for each hpo_id) measurement table, measurement_concept_id field
#
measurement_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS measurement_total_row
FROM
`{}.unioned_ehr_measurement` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_measurement`) AS t2
ON
t1.measurement_id=t2.measurement_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS measurement_well_defined_row
FROM
`{}.unioned_ehr_measurement` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_measurement`) AS t2
ON
t1.measurement_id=t2.measurement_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.measurement_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Measurement"
GROUP BY
1
),
data3 AS (
SELECT
src_hpo_id,
COUNT(*) AS measurement_total_zero_missing
FROM
`{}.unioned_ehr_measurement` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_measurement`) AS t2
ON
t1.measurement_id=t2.measurement_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.measurement_concept_id
WHERE
(t1.measurement_concept_id=0 OR t1.measurement_concept_id IS NULL)
GROUP BY
1
),
data4 AS (
SELECT
src_hpo_id,
COUNT(*) AS measurement_total_missing
FROM
`{}.unioned_ehr_measurement` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_measurement`) AS t2
ON
t1.measurement_id=t2.measurement_id
WHERE
t1.measurement_concept_id IS NULL
GROUP BY
1
)
SELECT
data1.src_hpo_id,
measurement_total_zero_missing,
measurement_total_missing,
measurement_well_defined_row,
measurement_total_row,
round(100*(measurement_well_defined_row/measurement_total_row),1) as measurement_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
LEFT OUTER JOIN
data3
ON
data1.src_hpo_id=data3.src_hpo_id
LEFT OUTER JOIN
data4
ON
data1.src_hpo_id=data4.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET, DATASET, DATASET, DATASET, DATASET),
dialect='standard'
)
measurement_concept_df.shape
measurement_concept_df = measurement_concept_df.fillna(0)
measurement_concept_df
# # 17.Dataframe (row for each hpo_id) visit_occurrence table, visit_concept_id field
visit_concept_df = pd.io.gbq.read_gbq('''
WITH
data1 AS (
SELECT
src_hpo_id,
COUNT(*) AS visit_total_row
FROM
`{}.unioned_ehr_visit_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_visit_occurrence`) AS t2
ON
t1.visit_occurrence_id=t2.visit_occurrence_id
GROUP BY
1
),
data2 AS (
SELECT
src_hpo_id,
COUNT(*) AS visit_well_defined_row
FROM
`{}.unioned_ehr_visit_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_visit_occurrence`) AS t2
ON
t1.visit_occurrence_id=t2.visit_occurrence_id
INNER JOIN
`{}.concept` as t3
ON
t3.concept_id = t1.visit_concept_id
WHERE
t3.standard_concept="S" and t3.domain_id="Visit"
GROUP BY
1
),
data3 AS (
SELECT
src_hpo_id,
COUNT(*) AS visit_total_zero_null
FROM
`{}.unioned_ehr_visit_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_visit_occurrence`) AS t2
ON
t1.visit_occurrence_id=t2.visit_occurrence_id
WHERE
(t1.visit_concept_id=0 or t1.visit_concept_id IS NULL)
GROUP BY
1
),
data4 AS (
SELECT
src_hpo_id,
COUNT(*) AS visit_total_null
FROM
`{}.unioned_ehr_visit_occurrence` AS t1
INNER JOIN
(SELECT
DISTINCT *
FROM
`{}._mapping_visit_occurrence`) AS t2
ON
t1.visit_occurrence_id=t2.visit_occurrence_id
WHERE
t1.visit_concept_id IS NULL
GROUP BY
1
)
SELECT
data1.src_hpo_id,
visit_well_defined_row,
visit_total_zero_null,
visit_total_null,
visit_total_row,
round(100*(visit_well_defined_row/visit_total_row),1) as visit_success_rate
FROM
data1
LEFT OUTER JOIN
data2
ON
data1.src_hpo_id=data2.src_hpo_id
LEFT OUTER JOIN
data3
ON
data1.src_hpo_id=data3.src_hpo_id
LEFT OUTER JOIN
data4
ON
data1.src_hpo_id=data4.src_hpo_id
ORDER BY
1 DESC
'''.format(DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET, DATASET,
DATASET, DATASET),
dialect='standard'
)
visit_concept_df.shape
visit_concept_df = visit_concept_df.fillna(0)
visit_concept_df
# ## Sites combined
# +
datas = [
drug_concept_df,
procedure_concept_df,
condition_concept_df,
measurement_concept_df,
visit_concept_df
]
master_df = observation_concept_df
for filename in datas:
master_df = pd.merge(master_df, filename, on='src_hpo_id', how='outer')
master_df
success_rate = pd.merge(master_df, site_df, how='outer', on='src_hpo_id')
success_rate
# +
success_rate = success_rate.fillna("No Data")
success_rate
success_rate.to_csv("data\\concept.csv")
# -
| 30.798682 | 132 | 0.547011 | 5,697 | 51,403 | 4.618922 | 0.060032 | 0.109067 | 0.146044 | 0.171316 | 0.818994 | 0.798054 | 0.790226 | 0.781485 | 0.775671 | 0.750361 | 0 | 0.029024 | 0.361224 | 51,403 | 1,668 | 133 | 30.817146 | 0.772377 | 0.034726 | 0 | 0.820641 | 0 | 0.009253 | 0.775855 | 0.227597 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.002847 | null | null | 0.015658 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
21d75e226d59dc23c0abd833b9ed0c454b26c841 | 13,387 | py | Python | old/ACO.py | Ellsom1945/Routing-problem--CVRP | 8f36b7d874d3009d3801d5d2645c56ac135f2aa9 | [
"MIT"
] | 3 | 2021-04-01T07:03:00.000Z | 2021-12-25T08:34:56.000Z | old/ACO.py | Ellsom1945/Routing-problem--CVRP | 8f36b7d874d3009d3801d5d2645c56ac135f2aa9 | [
"MIT"
] | 1 | 2021-12-25T08:39:04.000Z | 2021-12-25T08:39:04.000Z | old/ACO.py | Ellsom1945/Routing-problem--CVRP | 8f36b7d874d3009d3801d5d2645c56ac135f2aa9 | [
"MIT"
] | 1 | 2021-10-03T02:47:16.000Z | 2021-10-03T02:47:16.000Z | #50个需求地,5个供应地,以距离供应地最近为聚类条件,形成5条路径
import numpy as np
import matplotlib.pyplot as plt
from numpy import random
#需求地坐标
coordinates = np.array([[3979, 4854], [2965, 901], [1844, 1979], [2385, 537], [2156, 2169], [2582, 4561], [2920, 4481],\
[2746, 1749], [1116, 364], [736, 2568], [1611, 1313], [3674, 4814], [3556, 3696], [1673, 465], \
[1304, 510], [365, 3962], [2485, 2505], [967, 2414], [4771, 1303], [683, 564], [3876, 2460], \
[3319, 4193], [3449, 2322], [457, 3422], [2702, 3892], [1778, 3699], [2251, 2849], [2384, 1894],\
[917, 3749], [878, 835], [1841, 1616], [2538, 1560], [2582, 3891], [817, 1786], [3040, 2736], [1498, 706],\
[4851, 4512], [2139, 4515], [89, 1686], [4962, 4457], [1275, 5], [1836, 665], [988, 701], [965, 547], [3143, 3909],\
[1081, 3319], [640, 2566], [1694, 938], [4702, 1536], [2826, 4625]])
#供应地坐标
coordinates2 = np.array([[3987, 2398], [1273, 3380],[4622, 766],[974, 207], [1377, 823]])
def getdistmat1(coordinates,coordinates2):
num = coordinates.shape[0] #矩阵的行数
distmat1 = np.zeros((50,5)) #构造全零矩阵
for i in range(50):
for j in range(0,5):#利用数组求二范式计算距离
distmat1[i][j] = np.linalg.norm(coordinates[i] - coordinates2[j])
return distmat1
distmat1=getdistmat1(coordinates,coordinates2)
gather=[]
for i in range (5):
gather.append([])
for i in range (0,50):
gather[np.argwhere(distmat1==min(distmat1[i]))[0][1]].append(coordinates[i])
sumpath=0
for z in range(0,len(gather)):
l=z
a=len(gather[l])
gather[l].append(coordinates2[l])
def getdistmat(coordinates):
num = len(coordinates) #矩阵的行数
distmat = np.zeros((num,num)) #构造全零矩阵
for i in range(num):
for j in range(i,num):#利用数组求二范式计算距离
distmat[i][j] = distmat[j][i] = \
np.linalg.norm(coordinates[i] - coordinates[j])
return distmat
distmat = np.array(getdistmat(gather[l])) #距离矩阵
numant = 2*a #蚂蚁个数
numplace = a+1 #需求地个数
alpha = 1 #信息素重要程度因子
beta = 5 #启发函数重要程度因子
rho = 0.1 #信息素的挥发速度
Q = 1 #完成率
iter = 0 #迭代初始
itermax = 100 #迭代总数
#启发矩阵 diag将对角元素设为1e10 表示从i到j的期望值
etatable = 1.0 / (distmat+np.diag([1e10] * numplace))
#信息素矩阵
pheromonetable = np.ones((numplace,numplace))#构造全一矩阵
pathtable = np.zeros((numant,numplace)).astype(int)#路径记录表
distmat = np.array(getdistmat(gather[l]))
lengthaver = np.zeros(itermax)#各代路径的平均长度
lengthbest = np.zeros(itermax)#各代及其之前的最佳路径长度
pathbest = np.zeros((itermax, numplace))#存放最佳路径地点的坐标
while iter < itermax:
if numant <= numplace:
pathtable[:,0] = np.random.permutation(range(0,numplace))[:numant]
#随机排列一个序列
else: #将蚂蚁随机放置在需求点
pathtable[:numplace,0] = np.random.permutation(range(0,numplace))[:]
pathtable[numplace:,0] = \
np.random.permutation(range(0,numplace))[:numant-numplace]
length = np.zeros(numant)#计算各个蚂蚁的路径距离
for i in range(numant):
visiting = pathtable[i,0] #当前所在位置
unvisited = set(range(numplace))#未访问的地点
unvisited.remove(visiting) #删除已经过的地点
for j in range(1,numplace):#轮盘法选择下一个地点
listunvisited = list(unvisited)
probtrans = np.zeros(len(listunvisited))
for k in range(len(listunvisited)):
probtrans[k] = \
np.power(pheromonetable[visiting][listunvisited[k]],alpha)\
*np.power(etatable[visiting][listunvisited[k]],beta)
#求出本只蚂蚁的转移到各个地点的概率数列
cumsumprobtrans = (probtrans / sum(probtrans)).cumsum()
cumsumprobtrans -= np.random.rand()
k = listunvisited[list(cumsumprobtrans>0).index(True)]#下一个城市
pathtable[i,j] = k
unvisited.remove(k)
#计算到K城市的距离
length[i] += distmat[visiting][k]
visiting = k
#一只蚂蚁总的路径
length[i] += distmat[visiting][pathtable[i, 0]]
#平均路径
lengthaver[iter] = length.mean()
#选出最佳路径
if iter == 0:
lengthbest[iter] = length.min()
pathbest[iter] = pathtable[length.argmin()].copy()
else:
if length.min() > lengthbest[iter - 1]:
lengthbest[iter] = lengthbest[iter - 1]
pathbest[iter] = pathbest[iter - 1].copy()
else:
lengthbest[iter] = length.min()
pathbest[iter] = pathtable[length.argmin()].copy()
#更新信息素
changepheromonetable = np.zeros((numplace, numplace))
for i in range(numant):
for j in range(numplace-1):
changepheromonetable[pathtable[i, j]][pathtable[i, j + 1]] += \
Q / distmat[pathtable[i, j]][pathtable[i, j + 1]]
changepheromonetable[pathtable[i, j + 1]][pathtable[i, 0]] += \
Q / distmat[pathtable[i, j + 1]][pathtable[i, 0]]
#信息素更新公式
pheromonetable = (1 - rho) * pheromonetable + changepheromonetable
iter +=1
print("this iteration end:",iter)
if (iter - 1)%20 == 0:
print("schedule:",iter - 1)
#作出找到的最优路径图
bestpath = pathbest[-1]
for i in range(0,a):
plt.plot(gather[l][i][0],gather[l][i][1],'r',marker=u'$\cdot$')
plt.xlim([-100,5000])
plt.ylim([-100,5000])
for i in range(numplace-1):
m, n = int(bestpath[i]), int(bestpath[i + 1])
print ("best-path",m,n)
plt.plot([gather[l][m][0],gather[l][n][0]],\
[gather[l][m][1],gather[l][n][1]],'k')
plt.plot([gather[l][int(bestpath[numplace-1])][0],gather[l][int(bestpath[0])][0]],\
[gather[l][int(bestpath[numplace-1])][1],gather[l][int(bestpath[0])][1]],'k')
plt.plot(gather[l][a][0],gather[l][a][1],'ob')
sumpath+=lengthbest[a-1]
ax=plt.gca()
ax.set_title("Best Path")
ax.set_xlabel('X axis')
ax.set_ylabel('Y_axis')
plt.savefig('Best Path.png',dpi=500,bbox_inches='tight')
plt.show()
print(sumpath)
#50个需求地,50个供应地,以距离供应地最近为聚类条件,形成路径
import numpy as np
import matplotlib.pyplot as plt
from numpy import random
#需求地坐标
coordinates = np.array([[3979, 4854], [2965, 901], [1844, 1979], [2385, 537], [2156, 2169], [2582, 4561], [2920, 4481],\
[2746, 1749], [1116, 364], [736, 2568], [1611, 1313], [3674, 4814], [3556, 3696], [1673, 465], \
[1304, 510], [365, 3962], [2485, 2505], [967, 2414], [4771, 1303], [683, 564], [3876, 2460], \
[3319, 4193], [3449, 2322], [457, 3422], [2702, 3892], [1778, 3699], [2251, 2849], [2384, 1894],\
[917, 3749], [878, 835], [1841, 1616], [2538, 1560], [2582, 3891], [817, 1786], [3040, 2736], [1498, 706],\
[4851, 4512], [2139, 4515], [89, 1686], [4962, 4457], [1275, 5], [1836, 665], [988, 701], [965, 547], [3143, 3909],\
[1081, 3319], [640, 2566], [1694, 938], [4702, 1536], [2826, 4625]])
#供应地坐标
coordinates2 = np.array([[3322, 58], [3987, 2398], [3144, 417], [1273, 3380], [2792, 526], [2759, 3258],\
[2390, 4410], [3368, 2957], [841, 4658], [4674, 3347], [2749, 2452], [2237, 3424], [3086, 1432], [2160, 2810],\
[4622, 766], [3330, 4004], [4150, 3170], [3429, 4197], [1991, 2780], [1656, 383], [974, 207], [4907, 1616],\
[1377, 823], [3214, 4037], [4159, 3570], [2296, 14], [3110, 1510], [2577, 2966], [4255, 2547], [2637, 1885],\
[1406, 4309], [2450, 3962], [4295, 1183], [4369, 2409], [939, 967], [3699, 2823], [1711, 2909], [1462, 3568],\
[793, 4057], [4240, 1848], [4410, 2969], [1803, 3053], [1141, 328], [225, 4181], [674, 4990], [3913, 328], [2708, 3970],\
[3199, 188], [3273, 526], [1531, 1774]])
def getdistmat1(coordinates,coordinates2):
num = coordinates.shape[0] #矩阵的行数
distmat1 = np.zeros((50,50)) #构造全零矩阵
for i in range(50):
for j in range(50):#利用数组求二范式计算距离
distmat1[i][j] = np.linalg.norm(coordinates[i] - coordinates2[j])
return distmat1
distmat1=getdistmat1(coordinates,coordinates2)
gather=[]
for i in range (50):
gather.append([])
for i in range (0,50):
gather[np.argwhere(distmat1==min(distmat1[i]))[0][1]].append(coordinates[i])
sumpath=0
for z in range(0,len(gather)):
l=z
a=len(gather[l])
if a==0:
continue
gather[l].append(coordinates2[l])
def getdistmat(coordinates):
num = len(coordinates) #矩阵的行数
distmat = np.zeros((num,num)) #构造全零矩阵
for i in range(num):
for j in range(i,num):#利用数组求二范式计算距离
distmat[i][j] = distmat[j][i] = \
np.linalg.norm(coordinates[i] - coordinates[j])
return distmat
distmat = np.array(getdistmat(gather[l])) #距离矩阵
numant = 2*a #蚂蚁个数
numplace = a+1 #需求地个数
alpha = 1 #信息素重要程度因子
beta = 5 #启发函数重要程度因子
rho = 0.1 #信息素的挥发速度
Q = 1 #完成率
iter = 0 #迭代初始
itermax = 50 #迭代总数
#启发矩阵 diag将对角元素设为1e10 表示从i到j的期望值
etatable = 1.0 / (distmat+np.diag([1e10] * numplace))
#信息素矩阵
pheromonetable = np.ones((numplace,numplace))#构造全一矩阵
pathtable = np.zeros((numant,numplace)).astype(int)#路径记录表
distmat = np.array(getdistmat(gather[l]))
lengthaver = np.zeros(itermax)#各代路径的平均长度
lengthbest = np.zeros(itermax)#各代及其之前的最佳路径长度
pathbest = np.zeros((itermax, numplace))#存放最佳路径地点的坐标
while iter < itermax:
if numant <= numplace:
pathtable[:,0] = np.random.permutation(range(0,numplace))[:numant]
#随机排列一个序列
else: #将蚂蚁随机放置在需求点
pathtable[:numplace,0] = np.random.permutation(range(0,numplace))[:]
pathtable[numplace:,0] = \
np.random.permutation(range(0,numplace))[:numant-numplace]
length = np.zeros(numant)#计算各个蚂蚁的路径距离
for i in range(numant):
visiting = pathtable[i,0] #当前所在位置
unvisited = set(range(numplace))#未访问的地点
unvisited.remove(visiting) #删除已经过的地点
for j in range(1,numplace):#轮盘法选择下一个地点
listunvisited = list(unvisited)
probtrans = np.zeros(len(listunvisited))
for k in range(len(listunvisited)):
probtrans[k] = \
np.power(pheromonetable[visiting][listunvisited[k]],alpha)\
*np.power(etatable[visiting][listunvisited[k]],beta)
#求出本只蚂蚁的转移到各个地点的概率数列
cumsumprobtrans = (probtrans / sum(probtrans)).cumsum()
cumsumprobtrans -= np.random.rand()
k = listunvisited[list(cumsumprobtrans>0).index(True)]#下一个城市
pathtable[i,j] = k
unvisited.remove(k)
#计算到K城市的距离
length[i] += distmat[visiting][k]
visiting = k
#一只蚂蚁总的路径
length[i] += distmat[visiting][pathtable[i, 0]]
#平均路径
lengthaver[iter] = length.mean()
#选出最佳路径
if iter == 0:
lengthbest[iter] = length.min()
pathbest[iter] = pathtable[length.argmin()].copy()
else:
if length.min() > lengthbest[iter - 1]:
lengthbest[iter] = lengthbest[iter - 1]
pathbest[iter] = pathbest[iter - 1].copy()
else:
lengthbest[iter] = length.min()
pathbest[iter] = pathtable[length.argmin()].copy()
#更新信息素
changepheromonetable = np.zeros((numplace, numplace))
for i in range(numant):
for j in range(numplace-1):
changepheromonetable[pathtable[i, j]][pathtable[i, j + 1]] += \
Q / distmat[pathtable[i, j]][pathtable[i, j + 1]]
changepheromonetable[pathtable[i, j + 1]][pathtable[i, 0]] += \
Q / distmat[pathtable[i, j + 1]][pathtable[i, 0]]
#信息素更新公式
pheromonetable = (1 - rho) * pheromonetable + changepheromonetable
iter +=1
print("this iteration end:",iter)
if (iter - 1)%20 == 0:
print("schedule:",iter - 1)
#作出找到的最优路径图
bestpath = pathbest[-1]
for i in range(0,a):
plt.plot(gather[l][i][0],gather[l][i][1],'r',marker=u'$\cdot$')
plt.xlim([-100,5000])
plt.ylim([-100,5000])
for i in range(numplace-1):
m, n = int(bestpath[i]), int(bestpath[i + 1])
print ("best-path",m,n)
plt.plot([gather[l][m][0],gather[l][n][0]],\
[gather[l][m][1],gather[l][n][1]],'k')
plt.plot([gather[l][int(bestpath[numplace-1])][0],gather[l][int(bestpath[0])][0]],\
[gather[l][int(bestpath[numplace-1])][1],gather[l][int(bestpath[0])][1]],'k')
plt.plot(gather[l][a][0],gather[l][a][1],'ob')
sumpath+=lengthbest[a-1]
ax=plt.gca()
ax.set_title("Best Path")
ax.set_xlabel('X axis')
ax.set_ylabel('Y_axis')
plt.savefig('Best Path.png',dpi=500,bbox_inches='tight')
plt.show()
plt.close()
print(sumpath)
| 38.802899 | 146 | 0.543288 | 1,603 | 13,387 | 4.53088 | 0.223955 | 0.032769 | 0.013218 | 0.024232 | 0.92689 | 0.926614 | 0.926614 | 0.926614 | 0.926614 | 0.926614 | 0 | 0.146344 | 0.28692 | 13,387 | 344 | 147 | 38.915698 | 0.614498 | 0.052663 | 0 | 0.929961 | 0 | 0 | 0.013969 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.015564 | false | 0 | 0.023346 | 0 | 0.054475 | 0.031128 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
33d3cfcbb96eaf088e49da00210c6009e2cf1d89 | 1,610 | py | Python | scripts/data-preparation/filewriter.py | Usprogis/LTEC | 8ab588e8b020fce9a34377823009481549228b4c | [
"MIT"
] | 6 | 2020-05-25T12:46:41.000Z | 2022-01-24T05:21:36.000Z | scripts/data-preparation/filewriter.py | Usprogis/LTEC | 8ab588e8b020fce9a34377823009481549228b4c | [
"MIT"
] | 1 | 2020-05-29T03:17:10.000Z | 2020-05-29T10:37:49.000Z | scripts/data-preparation/filewriter.py | Usprogis/LTEC | 8ab588e8b020fce9a34377823009481549228b4c | [
"MIT"
] | 2 | 2021-01-07T17:14:26.000Z | 2021-03-04T19:32:48.000Z | x = open("tweetsFinal.json", "w")
x.write("[\n")
read = open(“tweets2011.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2012.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2013Q1.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2013Q2.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2013Q3.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2013Q4.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2014Q1.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2014Q2.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2014Q3.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2014Q4.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2015Q1.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2015Q2.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2016Q1.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2016Q2.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2017.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2018.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2019.json”, “r”)
for line in read:
x.write(line)
read.close()
read = open(“tweets2020.json”, “r”)
for line in read:
x.write(line)
read.close()
x.write("]")
x.close() | 14 | 37 | 0.654658 | 264 | 1,610 | 3.992424 | 0.117424 | 0.113852 | 0.136622 | 0.204934 | 0.760911 | 0.760911 | 0.760911 | 0.760911 | 0.760911 | 0.760911 | 0 | 0.061448 | 0.150932 | 1,610 | 115 | 38 | 14 | 0.709583 | 0 | 0 | 0.710526 | 0 | 0 | 0.013035 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
33e33b18bde27a71d600a21631be5002636e47d5 | 3,989 | py | Python | app/test/test_cloudformation.py | troydieter/aws-auto-cleanup | 523bae5cc57b81d3a2f0d43c87b9f1ef5390e3a4 | [
"MIT"
] | 322 | 2019-04-15T01:59:57.000Z | 2022-03-09T00:06:55.000Z | app/test/test_cloudformation.py | troydieter/aws-auto-cleanup | 523bae5cc57b81d3a2f0d43c87b9f1ef5390e3a4 | [
"MIT"
] | 70 | 2019-04-15T01:27:21.000Z | 2022-03-02T00:39:29.000Z | app/test/test_cloudformation.py | troydieter/aws-auto-cleanup | 523bae5cc57b81d3a2f0d43c87b9f1ef5390e3a4 | [
"MIT"
] | 49 | 2019-04-15T06:36:42.000Z | 2022-01-17T11:37:32.000Z | import datetime
import logging
import moto
import pytest
from .. import cloudformation_cleanup
class TestStacksMoreThanTTL:
@pytest.fixture
def test_class(self):
with moto.mock_cloudformation():
whitelist = {}
settings = {
"general": {"dry_run": False},
"services": {"cloudformation": {"stacks": {"clean": True, "ttl": -1}}},
}
execution_log = {"AWS": {}}
test_class = cloudformation_cleanup.CloudFormationCleanup(
logging, whitelist, settings, execution_log, "ap-southeast-2"
)
yield test_class
def test(self, test_class):
# create test stack
test_class.client_cloudformation.create_stack(
StackName="sample-sqs",
TemplateBody='{"Resources":{"SQSQueue":{"Type":"AWS::SQS::Queue","Properties":{"QueueName":"test_queue"}}}}',
)
# validate stack creation
response = test_class.client_cloudformation.list_stacks()
assert response["StackSummaries"][0]["StackName"] == "sample-sqs"
# test stacks functions
test_class.stacks()
# validate stack deletion
response = test_class.client_cloudformation.list_stacks()
assert response["StackSummaries"][0]["StackStatus"] == "DELETE_COMPLETE"
class TestStacksLessThanTTL:
@pytest.fixture
def test_class(self):
with moto.mock_cloudformation():
whitelist = {}
settings = {
"general": {"dry_run": False},
"services": {
"cloudformation": {"stacks": {"clean": True, "ttl": 5000}}
},
}
execution_log = {"AWS": {}}
test_class = cloudformation_cleanup.CloudFormationCleanup(
logging, whitelist, settings, execution_log, "ap-southeast-2"
)
yield test_class
def test(self, test_class):
# create test stack
test_class.client_cloudformation.create_stack(
StackName="sample-sqs",
TemplateBody='{"Resources":{"SQSQueue":{"Type":"AWS::SQS::Queue","Properties":{"QueueName":"test_queue"}}}}',
)
# validate stack creation
response = test_class.client_cloudformation.list_stacks()
assert response["StackSummaries"][0]["StackName"] == "sample-sqs"
# test stacks functions
test_class.stacks()
# validate stack not deleted
response = test_class.client_cloudformation.list_stacks()
assert response["StackSummaries"][0]["StackStatus"] == "CREATE_COMPLETE"
class TestStacksWhitelist:
@pytest.fixture
def test_class(self):
with moto.mock_cloudformation():
whitelist = {"cloudformation": {"stack": ["sample-sqs"]}}
settings = {
"general": {"dry_run": False},
"services": {
"cloudformation": {"stacks": {"clean": True, "ttl": 5000}}
},
}
execution_log = {"AWS": {}}
test_class = cloudformation_cleanup.CloudFormationCleanup(
logging, whitelist, settings, execution_log, "ap-southeast-2"
)
yield test_class
def test(self, test_class):
# create test stack
test_class.client_cloudformation.create_stack(
StackName="sample-sqs",
TemplateBody='{"Resources":{"SQSQueue":{"Type":"AWS::SQS::Queue","Properties":{"QueueName":"test_queue"}}}}',
)
# validate stack creation
response = test_class.client_cloudformation.list_stacks()
assert response["StackSummaries"][0]["StackName"] == "sample-sqs"
# test stacks functions
test_class.stacks()
# validate stack not deleted
response = test_class.client_cloudformation.list_stacks()
assert response["StackSummaries"][0]["StackStatus"] == "CREATE_COMPLETE"
| 33.241667 | 121 | 0.587365 | 354 | 3,989 | 6.437853 | 0.186441 | 0.094778 | 0.059237 | 0.114524 | 0.909171 | 0.909171 | 0.909171 | 0.909171 | 0.909171 | 0.909171 | 0 | 0.00632 | 0.286037 | 3,989 | 119 | 122 | 33.521008 | 0.79389 | 0.067435 | 0 | 0.728395 | 0 | 0 | 0.204478 | 0.075263 | 0 | 0 | 0 | 0 | 0.074074 | 1 | 0.074074 | false | 0 | 0.061728 | 0 | 0.17284 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d50eeee2cf8ff9e689b8108db7112711a624f31b | 12,322 | py | Python | tests/test_profile_creator.py | fhightower-tc/threatconnect-doublecheck | 39caefb2d292c4a1080188e39598e094233882b1 | [
"MIT"
] | null | null | null | tests/test_profile_creator.py | fhightower-tc/threatconnect-doublecheck | 39caefb2d292c4a1080188e39598e094233882b1 | [
"MIT"
] | null | null | null | tests/test_profile_creator.py | fhightower-tc/threatconnect-doublecheck | 39caefb2d292c4a1080188e39598e094233882b1 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from tc_dc import profile_creator
from .test_profile_validation import data_1_a, data_1_b
DATA = [{'address': 'guangluma@hotmail.com',
'associations': {'groups': [{'dateAdded': '2018-06-29T12:33:55Z',
'id': 1132777716,
'name': 'GrьЯe',
'ownerName': 'Technical Blogs and Reports',
'type': 'Incident',
'webLink': 'https://app.threatconnect.com/auth/incident/incident.xhtml?incident=1132777716'}],
'indicators': [{'confidence': 63,
'dateAdded': '2018-06-29T12:33:59Z',
'description': 'This indicator appears in a post from Tamagothi Daily Spam.',
'id': 1132778073,
'lastModified': '2018-09-27T13:40:56Z',
'ownerName': 'Technical Blogs and Reports',
'rating': 2.0,
'summary': 'guanglum70@gmail.com',
'webLink': 'https://app.threatconnect.com/auth/indicators/details/emailaddress.xhtml?emailaddress=guanglum70%40gmail.com&owner=Technical+Blogs+and+Reports'}]},
'attribute': [{'dateAdded': '2018-06-29T12:34:03Z',
'displayed': True,
'id': 1132778206,
'lastModified': '2018-06-29T12:34:03Z',
'type': 'Source',
'value': 'http://spam.tamagothi.de/2018/06/29/gre-2/'},
{'dateAdded': '2018-06-29T12:34:03Z',
'displayed': True,
'id': 1132778193,
'lastModified': '2018-06-29T12:34:03Z',
'type': 'Description',
'value': 'This indicator appears in a post from Tamagothi Daily Spam.'}],
'confidence': 63,
'dateAdded': '2018-06-29T12:34:03Z',
'description': 'This indicator appears in a post from Tamagothi Daily Spam.',
'id': 1132778184,
'lastModified': '2018-09-27T13:40:56Z',
'name': 'guangluma@hotmail.com',
'owner': {'id': 10666,
'name': 'Technical Blogs and Reports',
'type': 'Source'},
'rating': 2.0,
'source': 'http://spam.tamagothi.de/2018/06/29/gre-2/',
'tag': [{'name': 'Mail',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=Mail&owner=Technical+Blogs+and+Reports'},
{'name': 'BLOG: Tamagothi Daily Spam',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=BLOG%3A+Tamagothi+Daily+Spam&owner=Technical+Blogs+and+Reports'},
{'name': '419',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=419&owner=Technical+Blogs+and+Reports'},
{'name': 'gmail.com',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=gmail.com&owner=Technical+Blogs+and+Reports'},
{'name': 'Geschäftsvorschlag',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=Gesch%C3%A4ftsvorschlag&owner=Technical+Blogs+and+Reports'}],
'webLink': 'https://app.threatconnect.com/auth/indicators/details/emailaddress.xhtml?emailaddress=guangluma%40hotmail.com&owner=Technical+Blogs+and+Reports'},
{'address': 'guanglum70@gmail.com',
'associations': {'groups': [{'dateAdded': '2018-06-29T12:33:55Z',
'id': 1132777716,
'name': 'GrьЯe',
'ownerName': 'Technical Blogs and Reports',
'type': 'Incident',
'webLink': 'https://app.threatconnect.com/auth/incident/incident.xhtml?incident=1132777716'}],
'indicators': [{'confidence': 63,
'dateAdded': '2018-06-29T12:34:03Z',
'description': 'This indicator appears in a post from Tamagothi Daily Spam.',
'id': 1132778184,
'lastModified': '2018-09-27T13:40:56Z',
'ownerName': 'Technical Blogs and Reports',
'rating': 2.0,
'summary': 'guangluma@hotmail.com',
'webLink': 'https://app.threatconnect.com/auth/indicators/details/emailaddress.xhtml?emailaddress=guangluma%40hotmail.com&owner=Technical+Blogs+and+Reports'}]},
'attribute': [{'dateAdded': '2018-06-29T12:34:00Z',
'displayed': True,
'id': 1132778099,
'lastModified': '2018-06-29T12:34:00Z',
'type': 'Source',
'value': 'http://spam.tamagothi.de/2018/06/29/gre-2/'},
{'dateAdded': '2018-06-29T12:34:00Z',
'displayed': True,
'id': 1132778085,
'lastModified': '2018-06-29T12:34:00Z',
'type': 'Description',
'value': 'This indicator appears in a post from Tamagothi Daily Spam.'}],
'confidence': 63,
'dateAdded': '2018-06-29T12:33:59Z',
'description': 'This indicator appears in a post from Tamagothi Daily Spam.',
'id': 1132778073,
'lastModified': '2018-09-27T13:40:56Z',
'name': 'guanglum70@gmail.com',
'owner': {'id': 10666,
'name': 'Technical Blogs and Reports',
'type': 'Source'},
'rating': 2.0,
'source': 'http://spam.tamagothi.de/2018/06/29/gre-2/',
'tag': [{'name': 'Mail',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=Mail&owner=Technical+Blogs+and+Reports'},
{'name': 'BLOG: Tamagothi Daily Spam',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=BLOG%3A+Tamagothi+Daily+Spam&owner=Technical+Blogs+and+Reports'},
{'name': '419',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=419&owner=Technical+Blogs+and+Reports'},
{'name': 'gmail.com',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=gmail.com&owner=Technical+Blogs+and+Reports'},
{'name': 'Geschäftsvorschlag',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=Gesch%C3%A4ftsvorschlag&owner=Technical+Blogs+and+Reports'}],
'webLink': 'https://app.threatconnect.com/auth/indicators/details/emailaddress.xhtml?emailaddress=guanglum70%40gmail.com&owner=Technical+Blogs+and+Reports'}]
def test_profile_creator_1():
profile = profile_creator.create_profile(DATA)
assert profile == {'settings': {'all': {'attributes': {'required': [{'type': 'Source', 'value': ''}, {'type': 'Description', 'value': ''}], 'desired': []}, 'associations': {'required': [{'type': 'Incident'}], 'desired': []}, 'tags': {'required': ['Mail', 'BLOG: Tamagothi Daily Spam', '419', 'gmail.com', 'Geschäftsvorschlag'], 'desired': []}}}}
def test_profile_creator_differentiate_required_and_desired():
data = [{'address': 'guangluma@hotmail.com',
'associations': {'groups': [{'dateAdded': '2018-06-29T12:33:55Z',
'id': 1132777716,
'name': 'GrьЯe',
'ownerName': 'Technical Blogs and Reports',
'type': 'Incident',
'webLink': 'https://app.threatconnect.com/auth/incident/incident.xhtml?incident=1132777716'}],
'indicators': [{'confidence': 63,
'dateAdded': '2018-06-29T12:33:59Z',
'description': 'This indicator appears in a post from Tamagothi Daily Spam.',
'id': 1132778073,
'lastModified': '2018-09-27T13:40:56Z',
'ownerName': 'Technical Blogs and Reports',
'rating': 2.0,
'summary': 'guanglum70@gmail.com',
'webLink': 'https://app.threatconnect.com/auth/indicators/details/emailaddress.xhtml?emailaddress=guanglum70%40gmail.com&owner=Technical+Blogs+and+Reports'}]},
'attribute': [{'dateAdded': '2018-06-29T12:34:03Z',
'displayed': True,
'id': 1132778193,
'lastModified': '2018-06-29T12:34:03Z',
'type': 'Description',
'value': 'This indicator appears in a post from Tamagothi Daily Spam.'}],
'confidence': 63,
'dateAdded': '2018-06-29T12:34:03Z',
'description': 'This indicator appears in a post from Tamagothi Daily Spam.',
'id': 1132778184,
'lastModified': '2018-09-27T13:40:56Z',
'name': 'guangluma@hotmail.com',
'owner': {'id': 10666,
'name': 'Technical Blogs and Reports',
'type': 'Source'},
'rating': 2.0,
'source': 'http://spam.tamagothi.de/2018/06/29/gre-2/',
'tag': [{'name': 'Mail',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=Mail&owner=Technical+Blogs+and+Reports'},
{'name': 'BLOG: Tamagothi Daily Spam',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=BLOG%3A+Tamagothi+Daily+Spam&owner=Technical+Blogs+and+Reports'},
{'name': '419',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=419&owner=Technical+Blogs+and+Reports'},
{'name': 'gmail.com',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=gmail.com&owner=Technical+Blogs+and+Reports'},
{'name': 'Geschäftsvorschlag',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=Gesch%C3%A4ftsvorschlag&owner=Technical+Blogs+and+Reports'}],
'webLink': 'https://app.threatconnect.com/auth/indicators/details/emailaddress.xhtml?emailaddress=guangluma%40hotmail.com&owner=Technical+Blogs+and+Reports'},
{'address': 'guanglum70@gmail.com',
'associations': {'groups': [{'dateAdded': '2018-06-29T12:33:55Z',
'id': 1132777716,
'name': 'GrьЯe',
'ownerName': 'Technical Blogs and Reports',
'type': 'Incident',
'webLink': 'https://app.threatconnect.com/auth/incident/incident.xhtml?incident=1132777716'}],
'indicators': [{'confidence': 63,
'dateAdded': '2018-06-29T12:34:03Z',
'description': 'This indicator appears in a post from Tamagothi Daily Spam.',
'id': 1132778184,
'lastModified': '2018-09-27T13:40:56Z',
'ownerName': 'Technical Blogs and Reports',
'rating': 2.0,
'summary': 'guangluma@hotmail.com',
'webLink': 'https://app.threatconnect.com/auth/indicators/details/emailaddress.xhtml?emailaddress=guangluma%40hotmail.com&owner=Technical+Blogs+and+Reports'}]},
'attribute': [{'dateAdded': '2018-06-29T12:34:00Z',
'displayed': True,
'id': 1132778099,
'lastModified': '2018-06-29T12:34:00Z',
'type': 'Source',
'value': 'http://spam.tamagothi.de/2018/06/29/gre-2/'},
{'dateAdded': '2018-06-29T12:34:00Z',
'displayed': True,
'id': 1132778085,
'lastModified': '2018-06-29T12:34:00Z',
'type': 'Description',
'value': 'This indicator appears in a post from Tamagothi Daily Spam.'}],
'confidence': 63,
'dateAdded': '2018-06-29T12:33:59Z',
'description': 'This indicator appears in a post from Tamagothi Daily Spam.',
'id': 1132778073,
'lastModified': '2018-09-27T13:40:56Z',
'name': 'guanglum70@gmail.com',
'owner': {'id': 10666,
'name': 'Technical Blogs and Reports',
'type': 'Source'},
'rating': 2.0,
'source': 'http://spam.tamagothi.de/2018/06/29/gre-2/',
'tag': [{'name': 'Mail',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=Mail&owner=Technical+Blogs+and+Reports'},
{'name': 'BLOG: Tamagothi Daily Spam',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=BLOG%3A+Tamagothi+Daily+Spam&owner=Technical+Blogs+and+Reports'},
{'name': '419',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=419&owner=Technical+Blogs+and+Reports'},
{'name': 'Geschäftsvorschlag',
'webLink': 'https://app.threatconnect.com/auth/tags/tag.xhtml?tag=Gesch%C3%A4ftsvorschlag&owner=Technical+Blogs+and+Reports'}],
'webLink': 'https://app.threatconnect.com/auth/indicators/details/emailaddress.xhtml?emailaddress=guanglum70%40gmail.com&owner=Technical+Blogs+and+Reports'}]
profile = profile_creator.create_profile(data)
assert profile == {'settings': {'all': {'attributes': {'required': [{'type': 'Description', 'value': ''}], 'desired': [{'type': 'Source', 'value': ''}]}, 'associations': {'required': [{'type': 'Incident'}], 'desired': []}, 'tags': {'required': ['Mail', 'BLOG: Tamagothi Daily Spam', '419', 'Geschäftsvorschlag'], 'desired': ['gmail.com']}}}}
# TODO: get the functions below working - the current problem is that the associations in data in data_1_a and data_1_b are not in the same format as the form returned by democritus (which can be seen in the tests above)
def test_profile_creator_a():
data = data_1_a()
profile = profile_creator.create_profile(data)
assert len(profile) == 1
assert profile == {'settings': {'all': {'attributes': {'required': [{'type': 'Description', 'value': ''}, {'type': 'Source', 'value': ''}, {'type': 'Additional Analysis and Context', 'value': ''}], 'desired': []}, 'associations': {'required': [{'type': 'Document'}, {'type': 'Adversary'}], 'desired': []}, 'tags': {'required': ['Ugly'], 'desired': []}}}}
def test_profile_creator_b():
data = data_1_b()
profile = profile_creator.create_profile(data)
assert len(profile) == 1
assert profile == {'settings': {'all': {'attributes': {'required': [{'type': 'Description', 'value': ''}, {'type': 'Source', 'value': ''}, {'type': 'Additional Analysis and Context', 'value': ''}], 'desired': []}, 'associations': {'required': [{'type': 'Document'}], 'desired': [{'type': 'Adversary'}]}, 'tags': {'required': [], 'desired': ['Ugly']}}}}
| 54.764444 | 358 | 0.665801 | 1,508 | 12,322 | 5.414456 | 0.09748 | 0.066871 | 0.0812 | 0.114636 | 0.931415 | 0.923086 | 0.923086 | 0.919657 | 0.919657 | 0.917697 | 0 | 0.084361 | 0.124574 | 12,322 | 224 | 359 | 55.008929 | 0.672569 | 0.021263 | 0 | 0.919048 | 0 | 0.128571 | 0.679937 | 0.01045 | 0 | 0 | 0 | 0.004464 | 0.028571 | 1 | 0.019048 | false | 0 | 0.009524 | 0 | 0.028571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1d15a80cff0e3cc0ea7d175bb2be0dff265734ad | 324 | py | Python | app/blueprints/auth/routes/__init__.py | neurothrone/project-dot | 20889075611bed645689a76a30257f96e4b55988 | [
"MIT"
] | null | null | null | app/blueprints/auth/routes/__init__.py | neurothrone/project-dot | 20889075611bed645689a76a30257f96e4b55988 | [
"MIT"
] | null | null | null | app/blueprints/auth/routes/__init__.py | neurothrone/project-dot | 20889075611bed645689a76a30257f96e4b55988 | [
"MIT"
] | null | null | null | from app.blueprints.auth.routes import account
from app.blueprints.auth.routes import confirm
from app.blueprints.auth.routes import email
from app.blueprints.auth.routes import login
from app.blueprints.auth.routes import password
from app.blueprints.auth.routes import join
from app.blueprints.auth.routes import username
| 40.5 | 47 | 0.848765 | 49 | 324 | 5.612245 | 0.265306 | 0.178182 | 0.432727 | 0.534545 | 0.84 | 0.84 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08642 | 324 | 7 | 48 | 46.285714 | 0.929054 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.142857 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 10 |
1d7d0c0e9d1411036c076924b3d77d4754903bbf | 846 | py | Python | experiments/examples/common/stan_code.py | DominicBroadbentCompass/bayesian-coresets-optimization | 3657f2ebfc4f0e6b36f5c651b0651f06d7e3d6b1 | [
"MIT"
] | 5 | 2021-05-21T02:34:17.000Z | 2022-03-29T15:17:26.000Z | experiments/examples/common/stan_code.py | DominicBroadbentCompass/bayesian-coresets-optimization | 3657f2ebfc4f0e6b36f5c651b0651f06d7e3d6b1 | [
"MIT"
] | 2 | 2021-03-12T04:07:52.000Z | 2021-03-15T12:56:05.000Z | examples/common/stan_code.py | dionman/beta-cores | d8b09a8f9ee2daf56aa5b7e7dc1ed3baf845117a | [
"MIT"
] | 4 | 2020-06-23T04:51:43.000Z | 2021-02-04T15:07:41.000Z | logistic_code = """
data {
int<lower=0> n; // number of observations
int<lower=0> d; // number of predictors
int<lower=0,upper=1> y[n]; // outputs
matrix[n,d] x; // inputs
}
parameters {
real theta0; // intercept
vector[d] theta; // auxiliary parameter
}
transformed parameters {
vector[n] f;
f = theta0 + x*theta;
}
model {
theta0 ~ normal(0, 1);
theta ~ normal(0, 1);
y ~ bernoulli_logit(f);
}
"""
poisson_code = """
data {
int<lower=0> n; // number of observations
int<lower=0> d; // number of predictors
int<lower=0> y[n]; // outputs
matrix[n,d] x; // inputs
}
parameters {
real theta0; // intercept
vector[d] theta; // auxiliary parameter
}
transformed parameters {
vector[n] f;
f = -log_inv_logit(-(theta0 + x*theta));
}
model {
theta0 ~ normal(0, 1);
theta ~ normal(0, 1);
y ~ poisson(f);
}
"""
| 19.674419 | 43 | 0.621749 | 122 | 846 | 4.270492 | 0.295082 | 0.092131 | 0.103647 | 0.06142 | 0.894434 | 0.894434 | 0.894434 | 0.894434 | 0.894434 | 0.894434 | 0 | 0.031157 | 0.20331 | 846 | 42 | 44 | 20.142857 | 0.74184 | 0 | 0 | 0.619048 | 0 | 0 | 0.946809 | 0.027187 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1d8e4ed6dea6dd00e16e43e26624000552257b53 | 109 | py | Python | src/fluentdemo/lib/healthchecks.py | edoburu/demo.django-fluent.org | 10556eb383849fb20b8c6958d87c4b9f94085af2 | [
"CC-BY-3.0"
] | 24 | 2016-09-09T02:54:18.000Z | 2021-02-28T05:35:01.000Z | src/fluentdemo/lib/healthchecks.py | edoburu/demo.django-fluent.org | 10556eb383849fb20b8c6958d87c4b9f94085af2 | [
"CC-BY-3.0"
] | 288 | 2017-04-13T16:00:23.000Z | 2022-01-06T13:48:02.000Z | src/fluentdemo/lib/healthchecks.py | edoburu/demo.django-fluent.org | 10556eb383849fb20b8c6958d87c4b9f94085af2 | [
"CC-BY-3.0"
] | 5 | 2017-03-20T10:37:59.000Z | 2020-07-28T15:44:08.000Z | from django.conf import settings
def git_version():
return getattr(settings, 'GIT_VERSION', 'Unknown')
| 18.166667 | 54 | 0.743119 | 14 | 109 | 5.642857 | 0.785714 | 0.253165 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.146789 | 109 | 5 | 55 | 21.8 | 0.849462 | 0 | 0 | 0 | 0 | 0 | 0.165138 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 8 |
1d9d391596ef65b839b1e3991beaa4db8bcd35d0 | 178 | py | Python | src/resource_locker/reporter/__init__.py | ARMmbed/resource_locker | 256ed000c350e16986e10fb52b2a9b59423c4477 | [
"Apache-2.0"
] | null | null | null | src/resource_locker/reporter/__init__.py | ARMmbed/resource_locker | 256ed000c350e16986e10fb52b2a9b59423c4477 | [
"Apache-2.0"
] | null | null | null | src/resource_locker/reporter/__init__.py | ARMmbed/resource_locker | 256ed000c350e16986e10fb52b2a9b59423c4477 | [
"Apache-2.0"
] | 1 | 2021-09-10T13:59:31.000Z | 2021-09-10T13:59:31.000Z | from .aspects import Aspects
from .reporter import RedisReporter
from .reporter import DummyReporter
from .reporter import safe
from .timer import Timer
from .query import Query
| 25.428571 | 35 | 0.831461 | 24 | 178 | 6.166667 | 0.375 | 0.243243 | 0.364865 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.134831 | 178 | 6 | 36 | 29.666667 | 0.961039 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
d5ab76fbbce10599c9b7845a669387cc021e4250 | 11,058 | py | Python | openapi_server/controllers/addresses_controller.py | havardhuns/graphsense-REST | e2b2c851fc6fd7bba06de66a7abdb82cb76ad1d0 | [
"MIT"
] | null | null | null | openapi_server/controllers/addresses_controller.py | havardhuns/graphsense-REST | e2b2c851fc6fd7bba06de66a7abdb82cb76ad1d0 | [
"MIT"
] | null | null | null | openapi_server/controllers/addresses_controller.py | havardhuns/graphsense-REST | e2b2c851fc6fd7bba06de66a7abdb82cb76ad1d0 | [
"MIT"
] | null | null | null | from typing import List, Dict
from aiohttp import web
import traceback
import json
from openapi_server.models.address import Address
from openapi_server.models.address_tags import AddressTags
from openapi_server.models.address_txs import AddressTxs
from openapi_server.models.entity import Entity
from openapi_server.models.links import Links
from openapi_server.models.neighbors import Neighbors
import gsrest.service.addresses_service as service
from openapi_server import util
async def get_address(request: web.Request, currency, address, include_tags=None) -> web.Response:
"""Get an address, optionally with tags
:param currency: The cryptocurrency code (e.g., btc)
:type currency: str
:param address: The cryptocurrency address
:type address: str
:param include_tags: Whether to include the first page of tags. Use the respective /tags endpoint to retrieve more if needed.
:type include_tags: bool
"""
try:
if 'currency' in ['','currency','address','include_tags']:
if currency is not None:
currency = currency.lower()
result = service.get_address(request
,currency=currency,address=address,include_tags=include_tags)
result = await result
if isinstance(result, list):
result = [d.to_dict() for d in result]
else:
result = result.to_dict()
result = web.Response(
status=200,
text=json.dumps(result),
headers={'Content-type': 'application/json'})
return result
except RuntimeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPNotFound(text=str(e))
except ValueError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except TypeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except Exception as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPInternalServerError()
async def get_address_entity(request: web.Request, currency, address, include_tags=None) -> web.Response:
"""Get the entity of an address
:param currency: The cryptocurrency code (e.g., btc)
:type currency: str
:param address: The cryptocurrency address
:type address: str
:param include_tags: Whether to include the first page of tags. Use the respective /tags endpoint to retrieve more if needed.
:type include_tags: bool
"""
try:
if 'currency' in ['','currency','address','include_tags']:
if currency is not None:
currency = currency.lower()
result = service.get_address_entity(request
,currency=currency,address=address,include_tags=include_tags)
result = await result
if isinstance(result, list):
result = [d.to_dict() for d in result]
else:
result = result.to_dict()
result = web.Response(
status=200,
text=json.dumps(result),
headers={'Content-type': 'application/json'})
return result
except RuntimeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPNotFound(text=str(e))
except ValueError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except TypeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except Exception as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPInternalServerError()
async def list_address_links(request: web.Request, currency, address, neighbor, page=None, pagesize=None) -> web.Response:
"""Get outgoing transactions between two addresses
:param currency: The cryptocurrency code (e.g., btc)
:type currency: str
:param address: The cryptocurrency address
:type address: str
:param neighbor: Neighbor address
:type neighbor: str
:param page: Resumption token for retrieving the next page
:type page: str
:param pagesize: Number of items returned in a single page
:type pagesize: int
"""
try:
if 'currency' in ['','currency','address','neighbor','page','pagesize']:
if currency is not None:
currency = currency.lower()
result = service.list_address_links(request
,currency=currency,address=address,neighbor=neighbor,page=page,pagesize=pagesize)
result = await result
if isinstance(result, list):
result = [d.to_dict() for d in result]
else:
result = result.to_dict()
result = web.Response(
status=200,
text=json.dumps(result),
headers={'Content-type': 'application/json'})
return result
except RuntimeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPNotFound(text=str(e))
except ValueError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except TypeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except Exception as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPInternalServerError()
async def list_address_neighbors(request: web.Request, currency, address, direction, include_labels=None, page=None, pagesize=None) -> web.Response:
"""Get an addresses' neighbors in the address graph
:param currency: The cryptocurrency code (e.g., btc)
:type currency: str
:param address: The cryptocurrency address
:type address: str
:param direction: Incoming or outgoing neighbors
:type direction: str
:param include_labels: Whether to include labels of first page of tags
:type include_labels: bool
:param page: Resumption token for retrieving the next page
:type page: str
:param pagesize: Number of items returned in a single page
:type pagesize: int
"""
try:
if 'currency' in ['','currency','address','direction','include_labels','page','pagesize']:
if currency is not None:
currency = currency.lower()
result = service.list_address_neighbors(request
,currency=currency,address=address,direction=direction,include_labels=include_labels,page=page,pagesize=pagesize)
result = await result
if isinstance(result, list):
result = [d.to_dict() for d in result]
else:
result = result.to_dict()
result = web.Response(
status=200,
text=json.dumps(result),
headers={'Content-type': 'application/json'})
return result
except RuntimeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPNotFound(text=str(e))
except ValueError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except TypeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except Exception as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPInternalServerError()
async def list_address_txs(request: web.Request, currency, address, page=None, pagesize=None) -> web.Response:
"""Get all transactions an address has been involved in
:param currency: The cryptocurrency code (e.g., btc)
:type currency: str
:param address: The cryptocurrency address
:type address: str
:param page: Resumption token for retrieving the next page
:type page: str
:param pagesize: Number of items returned in a single page
:type pagesize: int
"""
try:
if 'currency' in ['','currency','address','page','pagesize']:
if currency is not None:
currency = currency.lower()
result = service.list_address_txs(request
,currency=currency,address=address,page=page,pagesize=pagesize)
result = await result
if isinstance(result, list):
result = [d.to_dict() for d in result]
else:
result = result.to_dict()
result = web.Response(
status=200,
text=json.dumps(result),
headers={'Content-type': 'application/json'})
return result
except RuntimeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPNotFound(text=str(e))
except ValueError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except TypeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except Exception as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPInternalServerError()
async def list_tags_by_address(request: web.Request, currency, address, page=None, pagesize=None) -> web.Response:
"""Get attribution tags for a given address
:param currency: The cryptocurrency code (e.g., btc)
:type currency: str
:param address: The cryptocurrency address
:type address: str
:param page: Resumption token for retrieving the next page
:type page: str
:param pagesize: Number of items returned in a single page
:type pagesize: int
"""
try:
if 'currency' in ['','currency','address','page','pagesize']:
if currency is not None:
currency = currency.lower()
result = service.list_tags_by_address(request
,currency=currency,address=address,page=page,pagesize=pagesize)
result = await result
if isinstance(result, list):
result = [d.to_dict() for d in result]
else:
result = result.to_dict()
result = web.Response(
status=200,
text=json.dumps(result),
headers={'Content-type': 'application/json'})
return result
except RuntimeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPNotFound(text=str(e))
except ValueError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except TypeError as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPBadRequest(text=str(e))
except Exception as e:
traceback.print_exception(type(e), e, e.__traceback__)
raise web.HTTPInternalServerError()
| 38.664336 | 148 | 0.646681 | 1,329 | 11,058 | 5.242287 | 0.087284 | 0.068896 | 0.041338 | 0.058562 | 0.883594 | 0.842256 | 0.839242 | 0.829482 | 0.829482 | 0.829482 | 0 | 0.002434 | 0.256918 | 11,058 | 285 | 149 | 38.8 | 0.845442 | 0 | 0 | 0.849462 | 0 | 0 | 0.047898 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.064516 | 0 | 0.096774 | 0.129032 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
63661ae5fcf248acf1ea42a6e63610f77586b690 | 2,763 | py | Python | tests/test_00009_html2txt_12.py | renesugar/html2txt | 069ff7048417737f9072dea86dd6a33b31049b2a | [
"MIT"
] | null | null | null | tests/test_00009_html2txt_12.py | renesugar/html2txt | 069ff7048417737f9072dea86dd6a33b31049b2a | [
"MIT"
] | null | null | null | tests/test_00009_html2txt_12.py | renesugar/html2txt | 069ff7048417737f9072dea86dd6a33b31049b2a | [
"MIT"
] | 2 | 2021-09-20T21:47:22.000Z | 2021-12-10T03:59:58.000Z | import pytest
from html2txt import converters
# example: 9
# section: html2txt
def test_12_00009():
html = """<pre class="brush: js">var n = 123; // allocates memory for a number
var s = "azerty"; // allocates memory for a string
var o = {
a: 1,
b: null
}; // allocates memory for an object and contained values
// (like object) allocates memory for the array and
// contained values
var a = [1, null, "abra"];
function f(a){
return a + 2;
} // allocates a function (which is a callable object)
// function expressions also allocate an object
someElement.addEventListener('click', function(){
someElement.style.backgroundColor = 'blue';
}, false);
</pre>
<h4 id="Allocation_via_function_calls">Allocation via function calls</h4>
<p>Some function calls result in object allocation.</p>
<pre class="brush: js">var d = new Date(); // allocates a Date object
var e = document.createElement('div'); // allocates a DOM element</pre>
<p>Some methods allocate new values or objects:</p>
<pre class="brush: js">var s = "azerty";
var s2 = s.substr(0, 3); // s2 is a new string
// Since strings are immutable value,
// JavaScript may decide to not allocate memory,
// but just store the [0, 3] range.
var a = ["ouais ouais", "nan nan"];
var a2 = ["generation", "nan nan"];
var a3 = a.concat(a2);
// new array with 4 elements being
// the concatenation of a and a2 elements
</pre>
"""
expected_markdown = """
```js
var n = 123; // allocates memory for a number
var s = "azerty"; // allocates memory for a string
var o = {
a: 1,
b: null
}; // allocates memory for an object and contained values
// (like object) allocates memory for the array and
// contained values
var a = [1, null, "abra"];
function f(a){
return a + 2;
} // allocates a function (which is a callable object)
// function expressions also allocate an object
someElement.addEventListener('click', function(){
someElement.style.backgroundColor = 'blue';
}, false);
```
#### Allocation via function calls
Some function calls result in object allocation.
```js
var d = new Date(); // allocates a Date object
var e = document.createElement('div'); // allocates a DOM element
```
Some methods allocate new values or objects:
```js
var s = "azerty";
var s2 = s.substr(0, 3); // s2 is a new string
// Since strings are immutable value,
// JavaScript may decide to not allocate memory,
// but just store the [0, 3] range.
var a = ["ouais ouais", "nan nan"];
var a2 = ["generation", "nan nan"];
var a3 = a.concat(a2);
// new array with 4 elements being
// the concatenation of a and a2 elements
```
"""
markdown = converters.Html2Markdown().convert(html)
assert markdown == expected_markdown
| 27.909091 | 87 | 0.669562 | 397 | 2,763 | 4.642317 | 0.287154 | 0.065111 | 0.078133 | 0.041237 | 0.856755 | 0.849702 | 0.834509 | 0.749864 | 0.749864 | 0.749864 | 0 | 0.021277 | 0.200507 | 2,763 | 98 | 88 | 28.193878 | 0.813038 | 0.010134 | 0 | 0.74359 | 0 | 0 | 0.923865 | 0.089678 | 0 | 0 | 0 | 0 | 0.012821 | 1 | 0.012821 | false | 0 | 0.025641 | 0 | 0.064103 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
63722b6268e3b6c8eaa681c48c19cc74d48880a6 | 41,822 | py | Python | applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/classDefs_solverWrapper/methodDefs_KratosSolverWrapper/mpi_solve.py | clazaro/Kratos | b947b82c90dfcbf13d60511427f85990d36b90be | [
"BSD-4-Clause"
] | 2 | 2020-12-22T11:50:11.000Z | 2021-09-15T11:36:30.000Z | applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/classDefs_solverWrapper/methodDefs_KratosSolverWrapper/mpi_solve.py | clazaro/Kratos | b947b82c90dfcbf13d60511427f85990d36b90be | [
"BSD-4-Clause"
] | 3 | 2021-08-18T16:12:20.000Z | 2021-09-02T07:36:15.000Z | applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/classDefs_solverWrapper/methodDefs_KratosSolverWrapper/mpi_solve.py | clazaro/Kratos | b947b82c90dfcbf13d60511427f85990d36b90be | [
"BSD-4-Clause"
] | 1 | 2017-05-02T00:52:44.000Z | 2017-05-02T00:52:44.000Z | # Import Python libraries
import time
import pickle
import os
try:
from threadpoolctl import *
except:
pass
# Import Kratos, XMC, distributed environment
from KratosMultiphysics import IsDistributedRun, DataCommunicator
from xmc.classDefs_solverWrapper.methodDefs_KratosSolverWrapper.solve import ExecuteInstanceDeterministicAdaptiveRefinementAux_Functionality,ExecuteInstanceReadingFromFileAux_Functionality,ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality
from exaqute import *
computing_units_mlmc_execute_0 = int(os.getenv("computing_units_mlmc_execute_0", 1))
computing_units_mlmc_execute_1 = int(os.getenv("computing_units_mlmc_execute_1", 1))
computing_units_mlmc_execute_2 = int(os.getenv("computing_units_mlmc_execute_2", 1))
computing_procs_mlmc_execute_0 = int(os.getenv("computing_procs_mlmc_execute_0", 1))
computing_procs_mlmc_execute_1 = int(os.getenv("computing_procs_mlmc_execute_1", 1))
computing_procs_mlmc_execute_2 = int(os.getenv("computing_procs_mlmc_execute_2", 1))
ppn_mlmc_execute_0 = int(os.getenv("ppn_mlmc_execute_0", 1))
ppn_mlmc_execute_1 = int(os.getenv("ppn_mlmc_execute_1", 1))
ppn_mlmc_execute_2 = int(os.getenv("ppn_mlmc_execute_2", 1))
####################################################################################################
############################################ WRAPPERS ##############################################
####################################################################################################
def SerializeMPIModel_Wrapper(pickled_parameters, main_model_part_name, fake_sample_to_serialize, analysis, current_index):
if current_index == 0:
pickled_model = SerializeMPIModelAuxLev0_Task(pickled_parameters, main_model_part_name, fake_sample_to_serialize, analysis)
elif current_index == 1:
pickled_model = SerializeMPIModelAuxLev1_Task(pickled_parameters, main_model_part_name, fake_sample_to_serialize, analysis)
elif current_index == 2:
pickled_model = SerializeMPIModelAuxLev2_Task(pickled_parameters, main_model_part_name, fake_sample_to_serialize, analysis)
else:
raise Exception("Level not supported")
return pickled_model
def SerializeDeterministicAdaptiveRefinementMPIModel_Wrapper(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,adaptive_refinement_jump_to_finest_level):
if current_index == 0:
pickled_model = SerializeDeterministicAdaptiveRefinementMPIModelAuxLev0_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,adaptive_refinement_jump_to_finest_level)
elif current_index == 1:
pickled_model = SerializeDeterministicAdaptiveRefinementMPIModelAuxLev1_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,adaptive_refinement_jump_to_finest_level)
elif current_index == 2:
pickled_model = SerializeDeterministicAdaptiveRefinementMPIModelAuxLev2_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,adaptive_refinement_jump_to_finest_level)
else:
raise Exception("Level not supported")
return pickled_model
def executeInstanceStochasticAdaptiveRefinementAllAtOnce_Wrapper(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,current_contribution):
if (current_index == 0):
qoi_and_time_list = ExecuteInstanceStochasticAdaptiveRefinementAllAtOnceAuxLev0_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 1):
qoi_and_time_list = ExecuteInstanceStochasticAdaptiveRefinementAllAtOnceAuxLev1_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 2):
qoi_and_time_list = ExecuteInstanceStochasticAdaptiveRefinementAllAtOnceAuxLev2_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
else:
raise Exception("Level not supported")
if IsDistributedRun():
# running with mpirun the whole xmc algorithm
qoi, time_for_qoi = UnfoldQT(qoi_and_time_list)
else:
# running with distributed environment framework, only Kratos tasks are run with mpi
qoi, time_for_qoi = UnfoldFutureQT(qoi_and_time_list)
return qoi, time_for_qoi
def executeInstanceStochasticAdaptiveRefinementMultipleTasks_Wrapper(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,mapping_flag,print_to_file,current_contribution,pickled_mapping_reference_model=None):
if (current_index == 0):
qoi_pickled_current_model_time_for_qoi_list = ExecuteInstanceStochasticAdaptiveRefinementMultipleTasksAuxLev0_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
else:
# We cannot run with multiple tasks, since tasks of different levels are normally run with different number of processors,
# and when running with MPI the model should be pickled with the number of processes of the task.
# For example, if I want to run with MPI and 4 processes, I need to serialize within an MPI task of 4 processes.
raise Exception("Level not supported. You should set \"taskAllAtOnce\" to \"true\" to run multi-level algorithms with \"stochastic_adaptive_refinement\" as \"refinement_strategy\".")
if IsDistributedRun():
# running with mpirun the whole xmc algorithm
qoi, pickled_current_model, time_for_qoi = UnfoldQMT(qoi_pickled_current_model_time_for_qoi_list)
else:
# running with distributed environment framework, only Kratos tasks are run with mpi
qoi, pickled_current_model, time_for_qoi = UnfoldFutureQMT(qoi_pickled_current_model_time_for_qoi_list)
return qoi, pickled_current_model, time_for_qoi
def executeInstanceDeterministicAdaptiveRefinement_Wrapper(current_index,pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,current_contribution):
if (current_index == 0):
qoi_and_time_list = executeInstanceDeterministicAdaptiveRefinementAuxLev0_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 1):
qoi_and_time_list = executeInstanceDeterministicAdaptiveRefinementAuxLev1_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 2):
qoi_and_time_list = executeInstanceDeterministicAdaptiveRefinementAuxLev2_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
else:
raise Exception("Level not supported")
if IsDistributedRun():
# running with mpirun the whole xmc algorithm
qoi, time_for_qoi = UnfoldQT(qoi_and_time_list)
else:
# running with distributed environment framework, only Kratos tasks are run with mpi
qoi, time_for_qoi = UnfoldFutureQT(qoi_and_time_list)
return qoi, time_for_qoi
def executeInstanceReadingFromFile_Wrapper(current_index,pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,current_contribution):
if (current_index == 0):
qoi_and_time_list = executeInstanceReadingFromFileAuxLev0_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 1):
qoi_and_time_list = executeInstanceReadingFromFileAuxLev1_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 2):
qoi_and_time_list = executeInstanceReadingFromFileAuxLev2_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
else:
raise Exception("Level not supported")
if IsDistributedRun():
# running with mpirun the whole xmc algorithm
qoi, time_for_qoi = UnfoldQT(qoi_and_time_list)
else:
# running with distributed environment framework, only Kratos tasks are run with mpi
qoi, time_for_qoi = UnfoldFutureQT(qoi_and_time_list)
return qoi, time_for_qoi
####################################################################################################
############################################## TASKS ###############################################
####################################################################################################
@task(keep=True, returns=2)
def UnfoldQT(qoi_and_time_list):
communicator = DataCommunicator.GetDefault()
qoi = qoi_and_time_list[0]
time_for_qoi = communicator.SumAll(qoi_and_time_list[-1])
return qoi, time_for_qoi
@task(keep=True, returns=3)
def UnfoldQMT(qoi_pickled_current_model_time_for_qoi_list):
communicator = DataCommunicator.GetDefault()
qoi = qoi_pickled_current_model_time_for_qoi_list[0]
pickled_current_model = qoi_pickled_current_model_time_for_qoi_list[1]
time_for_qoi = communicator.SumAll(qoi_pickled_current_model_time_for_qoi_list[-1])
return qoi, pickled_current_model, time_for_qoi
@task(keep=True, qoi_and_time_list={Type: COLLECTION_IN, Depth: 2}, returns=2)
def UnfoldFutureQT(qoi_and_time_list):
qoi = qoi_and_time_list[0][0] # get first qoi element (all are equal since they are synchronized)
time_for_qoi = 0.0
for qoi_and_time in qoi_and_time_list:
time_for_qoi += qoi_and_time[1] # sum all times
return qoi, time_for_qoi
@task(keep=True, qoi_pickled_current_model_time_for_qoi_list={Type: COLLECTION_IN, Depth: 2}, returns=3)
def UnfoldFutureQMT(qoi_pickled_current_model_time_for_qoi_list):
qoi = qoi_pickled_current_model_time_for_qoi_list[0][0] # get first qoi element (all are equal since they are synchronized)
pickled_current_model = qoi_pickled_current_model_time_for_qoi_list[1]
time_for_qoi = 0.0
for qoi_pickled_current_model_time_for_qoi in qoi_pickled_current_model_time_for_qoi_list:
time_for_qoi += qoi_pickled_current_model_time_for_qoi[-1] # sum all times
return qoi, pickled_current_model, time_for_qoi
########################################## Serialization ##########################################
@constraint(computing_units=computing_units_mlmc_execute_0)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0, processes_per_node=ppn_mlmc_execute_0)
@task(keep=True, returns=computing_procs_mlmc_execute_0)
def SerializeMPIModelAuxLev0_Task(pickled_parameters, main_model_part_name, fake_sample_to_serialize, analysis):
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
serialized_parameters = pickle.loads(pickled_parameters)
del pickled_parameters
deserialized_parameters = KratosMultiphysics.Parameters()
serialized_parameters.Load("ParametersSerialization", deserialized_parameters)
# prepare the model to serialize
model = KratosMultiphysics.Model()
fake_sample = fake_sample_to_serialize
deserialized_parameters["solver_settings"]["model_import_settings"]["input_type"].SetString("mdpa")
# initialize analysis stage
simulation = analysis(model,deserialized_parameters,fake_sample)
simulation.Initialize()
# reset general flags
simulation.model.GetModelPart(main_model_part_name).ProcessInfo.SetValue(KratosMultiphysics.IS_RESTARTED,True)
# serialize model
serialized_model = KratosMultiphysics.MpiSerializer()
serialized_model.Save("ModelSerialization",simulation.model)
# self.serialized_model.append(serialized_model)
# pickle dataserialized_data
pickled_model = pickle.dumps(serialized_model, 2) # second argument is the protocol and is NECESSARY (according to pybind11 docs)
return pickled_model
@constraint(computing_units=computing_units_mlmc_execute_1)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_1, processes_per_node=ppn_mlmc_execute_1)
@task(keep=True, returns=computing_procs_mlmc_execute_1)
def SerializeMPIModelAuxLev1_Task(pickled_parameters, main_model_part_name, fake_sample_to_serialize, analysis):
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
serialized_parameters = pickle.loads(pickled_parameters)
del pickled_parameters
deserialized_parameters = KratosMultiphysics.Parameters()
serialized_parameters.Load("ParametersSerialization", deserialized_parameters)
# prepare the model to serialize
model = KratosMultiphysics.Model()
fake_sample = fake_sample_to_serialize
deserialized_parameters["solver_settings"]["model_import_settings"]["input_type"].SetString("mdpa")
# initialize analysis stage
simulation = analysis(model,deserialized_parameters,fake_sample)
simulation.Initialize()
# reset general flags
simulation.model.GetModelPart(main_model_part_name).ProcessInfo.SetValue(KratosMultiphysics.IS_RESTARTED,True)
# serialize model
serialized_model = KratosMultiphysics.MpiSerializer()
serialized_model.Save("ModelSerialization",simulation.model)
# self.serialized_model.append(serialized_model)
# pickle dataserialized_data
pickled_model = pickle.dumps(serialized_model, 2) # second argument is the protocol and is NECESSARY (according to pybind11 docs)
return pickled_model
@constraint(computing_units=computing_units_mlmc_execute_2)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_2, processes_per_node=ppn_mlmc_execute_2)
@task(keep=True, returns=computing_procs_mlmc_execute_2)
def SerializeMPIModelAuxLev2_Task(pickled_parameters, main_model_part_name, fake_sample_to_serialize, analysis):
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
serialized_parameters = pickle.loads(pickled_parameters)
del pickled_parameters
deserialized_parameters = KratosMultiphysics.Parameters()
serialized_parameters.Load("ParametersSerialization", deserialized_parameters)
# prepare the model to serialize
model = KratosMultiphysics.Model()
fake_sample = fake_sample_to_serialize
deserialized_parameters["solver_settings"]["model_import_settings"]["input_type"].SetString("mdpa")
# initialize analysis stage
simulation = analysis(model,deserialized_parameters,fake_sample)
simulation.Initialize()
# reset general flags
simulation.model.GetModelPart(main_model_part_name).ProcessInfo.SetValue(KratosMultiphysics.IS_RESTARTED,True)
# serialize model
serialized_model = KratosMultiphysics.MpiSerializer()
serialized_model.Save("ModelSerialization",simulation.model)
# self.serialized_model.append(serialized_model)
# pickle dataserialized_data
pickled_model = pickle.dumps(serialized_model, 2) # second argument is the protocol and is NECESSARY (according to pybind11 docs)
return pickled_model
########################################## Serialization DAR ##########################################
@constraint(computing_units=computing_units_mlmc_execute_0)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0, processes_per_node=ppn_mlmc_execute_0, pickled_coarse_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1})
@task(keep=True, pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
def SerializeDeterministicAdaptiveRefinementMPIModelAuxLev0_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,adaptive_refinement_jump_to_finest_level):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_0
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
mapping_flag = False
print_to_file = False
filename = ""
pickled_coarsest_model = pickled_coarse_model
for current_local_index in range(current_index+1):
if ((adaptive_refinement_jump_to_finest_level is False) or (adaptive_refinement_jump_to_finest_level is True and (current_local_index == 0 or current_local_index == current_index))):
qoi,pickled_current_model,time_for_qoi = \
ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,open_mp_threads,mapping_flag,pickled_coarsest_model,print_to_file,filename)
del(pickled_coarse_model)
pickled_coarse_model = pickled_current_model
del(pickled_current_model)
return pickled_coarse_model
@constraint(computing_units=computing_units_mlmc_execute_1)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_1, processes_per_node=ppn_mlmc_execute_1, pickled_coarse_model_layout={block_count: computing_procs_mlmc_execute_1, block_length: 1, stride: 1})
@task(keep=True, pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_1)
def SerializeDeterministicAdaptiveRefinementMPIModelAuxLev1_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,adaptive_refinement_jump_to_finest_level):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_1
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
mapping_flag = False
print_to_file = False
filename = ""
pickled_coarsest_model = pickled_coarse_model
for current_local_index in range(current_index+1):
if ((adaptive_refinement_jump_to_finest_level is False) or (adaptive_refinement_jump_to_finest_level is True and (current_local_index == 0 or current_local_index == current_index))):
qoi,pickled_current_model,time_for_qoi = \
ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,open_mp_threads,mapping_flag,pickled_coarsest_model,print_to_file,filename)
del(pickled_coarse_model)
pickled_coarse_model = pickled_current_model
del(pickled_current_model)
return pickled_coarse_model
@constraint(computing_units=computing_units_mlmc_execute_2)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_2, processes_per_node=ppn_mlmc_execute_2, pickled_coarse_model_layout={block_count: computing_procs_mlmc_execute_2, block_length: 1, stride: 1})
@task(keep=True, pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_2)
def SerializeDeterministicAdaptiveRefinementMPIModelAuxLev2_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,adaptive_refinement_jump_to_finest_level):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_2
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
mapping_flag = False
print_to_file = False
filename = ""
pickled_coarsest_model = pickled_coarse_model
for current_local_index in range(current_index+1):
if ((adaptive_refinement_jump_to_finest_level is False) or (adaptive_refinement_jump_to_finest_level is True and (current_local_index == 0 or current_local_index == current_index))):
qoi,pickled_current_model,time_for_qoi = \
ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,open_mp_threads,mapping_flag,pickled_coarsest_model,print_to_file,filename)
del(pickled_coarse_model)
pickled_coarse_model = pickled_current_model
del(pickled_current_model)
return pickled_coarse_model
############################### StochasticAdaptiveRefinementAllAtOnce ##############################
# @task(keep=True, filename=FILE_OUT, pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
@constraint(computing_units=computing_units_mlmc_execute_0)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0, processes_per_node=ppn_mlmc_execute_0, pickled_coarse_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1})
@task(keep=True, pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
def ExecuteInstanceStochasticAdaptiveRefinementAllAtOnceAuxLev0_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_0
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
pickled_coarsest_model = pickled_coarse_model
for current_local_index in range(current_index+1):
if ((adaptive_refinement_jump_to_finest_level is False) or (adaptive_refinement_jump_to_finest_level is True and (current_local_index == 0 or current_local_index == current_index))):
qoi,pickled_current_model,time_for_qoi = \
ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,open_mp_threads,mapping_flag,pickled_coarsest_model,print_to_file,filename)
del(pickled_coarse_model)
pickled_coarse_model = pickled_current_model
del(pickled_current_model)
return qoi,time_for_qoi
# @task(keep=True, filename=FILE_OUT, pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_1)
@constraint(computing_units=computing_units_mlmc_execute_1)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_1, processes_per_node=ppn_mlmc_execute_1, pickled_coarse_model_layout={block_count: computing_procs_mlmc_execute_1, block_length: 1, stride: 1})
@task(keep=True, pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_1)
def ExecuteInstanceStochasticAdaptiveRefinementAllAtOnceAuxLev1_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_1
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
pickled_coarsest_model = pickled_coarse_model
for current_local_index in range(current_index+1):
if ((adaptive_refinement_jump_to_finest_level is False) or (adaptive_refinement_jump_to_finest_level is True and (current_local_index == 0 or current_local_index == current_index))):
qoi,pickled_current_model,time_for_qoi = \
ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,open_mp_threads,mapping_flag,pickled_coarsest_model,print_to_file,filename)
del(pickled_coarse_model)
pickled_coarse_model = pickled_current_model
del(pickled_current_model)
return qoi,time_for_qoi
# @task(keep=True, filename=FILE_OUT, pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_2)
@constraint(computing_units=computing_units_mlmc_execute_2)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_2, processes_per_node=ppn_mlmc_execute_2, pickled_coarse_model_layout={block_count: computing_procs_mlmc_execute_2, block_length: 1, stride: 1})
@task(keep=True, pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_2)
def ExecuteInstanceStochasticAdaptiveRefinementAllAtOnceAuxLev2_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_2
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
pickled_coarsest_model = pickled_coarse_model
for current_local_index in range(current_index+1):
if ((adaptive_refinement_jump_to_finest_level is False) or (adaptive_refinement_jump_to_finest_level is True and (current_local_index == 0 or current_local_index == current_index))):
qoi,pickled_current_model,time_for_qoi = \
ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,open_mp_threads,mapping_flag,pickled_coarsest_model,print_to_file,filename)
del(pickled_coarse_model)
pickled_coarse_model = pickled_current_model
del(pickled_current_model)
return qoi,time_for_qoi
############################# StochasticAdaptiveRefinementMultipleTasks ############################
# @task(keep=True, filename=FILE_OUT,pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
@constraint(computing_units=computing_units_mlmc_execute_0)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0, processes_per_node=ppn_mlmc_execute_0, pickled_coarse_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1})
@task(keep=True, pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
def ExecuteInstanceStochasticAdaptiveRefinementMultipleTasksAuxLev0_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_0
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
qoi,pickled_current_model,time_for_qoi = \
ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,open_mp_threads,mapping_flag,pickled_mapping_reference_model,print_to_file,filename)
return qoi,pickled_current_model,time_for_qoi
########################################## DeterministicAdaptiveRefinement ########################################
# @task(keep=True, filename=FILE_OUT,pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
@constraint(computing_units=computing_units_mlmc_execute_0)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0, processes_per_node=ppn_mlmc_execute_0, pickled_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1}, pickled_mapping_reference_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1})
@task(keep=True, pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
def executeInstanceDeterministicAdaptiveRefinementAuxLev0_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_0
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
qoi,time_for_qoi = \
ExecuteInstanceDeterministicAdaptiveRefinementAux_Functionality(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename,open_mp_threads)
return qoi,time_for_qoi
# @task(keep=True, filename=FILE_OUT,pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_1)
@constraint(computing_units=computing_units_mlmc_execute_1)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_1, processes_per_node=ppn_mlmc_execute_1, pickled_model_layout={block_count: computing_procs_mlmc_execute_1, block_length: 1, stride: 1}, pickled_mapping_reference_model_layout={block_count: computing_procs_mlmc_execute_1, block_length: 1, stride: 1})
@task(keep=True, pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_1)
def executeInstanceDeterministicAdaptiveRefinementAuxLev1_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_1
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
qoi,time_for_qoi = \
ExecuteInstanceDeterministicAdaptiveRefinementAux_Functionality(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename,open_mp_threads)
return qoi,time_for_qoi
# @task(keep=True, filename=FILE_OUT,pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_2)
@constraint(computing_units=computing_units_mlmc_execute_2)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_2, processes_per_node=ppn_mlmc_execute_2, pickled_model_layout={block_count: computing_procs_mlmc_execute_2, block_length: 1, stride: 1}, pickled_mapping_reference_model_layout={block_count: computing_procs_mlmc_execute_2, block_length: 1, stride: 1})
@task(keep=True, pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_2)
def executeInstanceDeterministicAdaptiveRefinementAuxLev2_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_2
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
qoi,time_for_qoi = \
ExecuteInstanceDeterministicAdaptiveRefinementAux_Functionality(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename,open_mp_threads)
return qoi,time_for_qoi
########################################## ReadingFromFile #########################################
# @task(keep=True, filename=FILE_OUT, pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
@constraint(computing_units=computing_units_mlmc_execute_0)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0, processes_per_node=ppn_mlmc_execute_0, pickled_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1}, pickled_mapping_reference_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1})
@task(keep=True, pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
def executeInstanceReadingFromFileAuxLev0_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_0
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
qoi,time_for_qoi = \
ExecuteInstanceReadingFromFileAux_Functionality(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename,open_mp_threads)
return qoi,time_for_qoi
# @task(keep=True, filename=FILE_OUT, pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_1)
@constraint(computing_units=computing_units_mlmc_execute_1)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_1, processes_per_node=ppn_mlmc_execute_1, pickled_model_layout={block_count: computing_procs_mlmc_execute_1, block_length: 1, stride: 1}, pickled_mapping_reference_model_layout={block_count: computing_procs_mlmc_execute_1, block_length: 1, stride: 1})
@task(keep=True, pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_1)
def executeInstanceReadingFromFileAuxLev1_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_1
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
qoi,time_for_qoi = \
ExecuteInstanceReadingFromFileAux_Functionality(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename,open_mp_threads)
return qoi,time_for_qoi
# @task(keep=True, filename=FILE_OUT, pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_2)
@constraint(computing_units=computing_units_mlmc_execute_2)
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_2, processes_per_node=ppn_mlmc_execute_2, pickled_model_layout={block_count: computing_procs_mlmc_execute_2, block_length: 1, stride: 1}, pickled_mapping_reference_model_layout={block_count: computing_procs_mlmc_execute_2, block_length: 1, stride: 1})
@task(keep=True, pickled_model=COLLECTION_IN, pickled_mapping_reference_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_2)
def executeInstanceReadingFromFileAuxLev2_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = computing_units_mlmc_execute_2
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
qoi,time_for_qoi = \
ExecuteInstanceReadingFromFileAux_Functionality(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename,open_mp_threads)
return qoi,time_for_qoi
| 75.084381 | 536 | 0.822701 | 5,077 | 41,822 | 6.276541 | 0.048257 | 0.042804 | 0.032323 | 0.052564 | 0.956568 | 0.949005 | 0.936421 | 0.921044 | 0.905699 | 0.898638 | 0 | 0.007567 | 0.093085 | 41,822 | 556 | 537 | 75.219424 | 0.832582 | 0.084142 | 0 | 0.801909 | 0 | 0 | 0.034483 | 0.008386 | 0 | 0 | 0 | 0 | 0 | 1 | 0.062053 | false | 0.002387 | 0.131265 | 0 | 0.25537 | 0.095465 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
637d0d1675c7c8be26d86ec4f4fe0889580a09db | 101 | py | Python | src/autoks/distance/__init__.py | lschlessinger1/MS-project | e1c02d1d1a7a2480ff6f14f30625dc42ee3417e3 | [
"MIT"
] | 2 | 2019-04-29T15:18:11.000Z | 2019-12-13T18:58:40.000Z | src/autoks/distance/__init__.py | lschlessinger1/MS-project | e1c02d1d1a7a2480ff6f14f30625dc42ee3417e3 | [
"MIT"
] | 275 | 2019-02-19T22:59:39.000Z | 2020-10-03T08:56:08.000Z | src/autoks/distance/__init__.py | lschlessinger1/MS-project | e1c02d1d1a7a2480ff6f14f30625dc42ee3417e3 | [
"MIT"
] | null | null | null | from .distance import HellingerDistanceBuilder, FrobeniusDistanceBuilder, CorrelationDistanceBuilder
| 50.5 | 100 | 0.910891 | 6 | 101 | 15.333333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.059406 | 101 | 1 | 101 | 101 | 0.968421 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
891f39e7696074dd2b85e0931ba1bd52c4e7f177 | 16,166 | py | Python | tests/library/series/fileparser/group/test_group_###.py | stampedeboss/DadVision2 | 572d377086f7f356d24f60493cdbb655f5729e8d | [
"Apache-2.0"
] | 1 | 2021-02-26T19:43:06.000Z | 2021-02-26T19:43:06.000Z | tests/library/series/fileparser/group/test_group_###.py | stampedeboss/DadVision2 | 572d377086f7f356d24f60493cdbb655f5729e8d | [
"Apache-2.0"
] | null | null | null | tests/library/series/fileparser/group/test_group_###.py | stampedeboss/DadVision2 | 572d377086f7f356d24f60493cdbb655f5729e8d | [
"Apache-2.0"
] | null | null | null | import unittest
from logging import INFO
import logger
from series import FileParser
class KnownValues(unittest.TestCase):
File_SxxExx = {}
File_SxxExx['FileName'] = "/srv/DadVision/Series/Covert Affiars/Season 1/E01 Pilot.mkv"
File_SxxExx['SeriesName'] = 'Covert Affairs'
File_SxxExx['SeasonNum'] = 1
File_SxxExx['EpisodeNums'] = [1]
# File_SxxExx['type'] = 'episode'
File_SxxExx['Ext'] = 'mkv'
class fileParserGroup_1(unittest.TestCase):
def setUp(self):
TRACE = 5
VERBOSE = 15
logger.initialize(unit_test=True, level=INFO)
self.library = FileParser()
args = self.library.options.parser.parse_args('--error')
'''
Test Cases:
01 {Group Name}Covert Affairs ...
02 {Group.Name}Covert.Affairs. ...
03 {Group.Name}Covert_Affairs_ ...
04 {Group Name} Covert Affairs ...
05 {Group.Name}.Covert.Affairs. ...
06 {Group.Name}_Covert_Affairs_ ...
07 [Group Name] Covert Affairs ...
08 [Group.Name].Covert.Affairs.
09 [Group.Name]_Covert_Affairs.
10 [Group Name] - Covert Affairs ...
11 [Group Name].-.Covert.Affairs ...
12 [Group Name]_-_Covert_Affairs ...
'''
# 01 {Group Name}Covert Affairs ...
def test_fileparser_group_1_011(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group Name}Covert Affairs 101 Case 011.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_012(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group Name}Covert Affairs 0101 Case 012.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_013(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group Name}Covert Affairs 1001 Case 013.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_014(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group Name}Covert Affairs 01001 Case 014.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 02 {Group.Name}Covert.Affairs. ...
def test_fileparser_group_1_021(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group.Name}Covert.Affairs.101 Case 021.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_022(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group.Name}Covert.Affairs.0101 Case 022.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_023(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group.Name}Covert.Affairs.1001 Case 023.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_024(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group.Name}Covert.Affairs.01001 Case 024.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 03 {Group.Name}Covert_Affairs_ ...
def test_fileparser_group_1_031(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group_Name}Covert_Affairs_101 Case 031.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_032(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group_Name}Covert_Affairs_0101 Case 032.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_033(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group_Name}Covert_Affairs_1001 Case 033.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_034(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group_Name}Covert_Affairs_01001 Case 034.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 04 {Group Name} Covert Affairs ...
def test_fileparser_group_1_041(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group Name} Covert Affairs 101 Case 041.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_042(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group Name} Covert Affairs 0101 Case 042.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_043(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group Name} Covert Affairs 1001 Case 043.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_044(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group Name} Covert Affairs 01001 Case 044.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 05 {Group.Name}.Covert.Affairs. ...
def test_fileparser_group_1_051(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group.Name}.Covert.Affairs.101 Case 051.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_052(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group.Name}.Covert.Affairs.0101 Case 052.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_053(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group.Name}.Covert.Affairs.1001 Case 053.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_054(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group.Name}.Covert.Affairs.01001 Case 054.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 06 {Group.Name}_Covert_Affairs_ ...
def test_fileparser_group_1_061(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group_Name}_Covert_Affairs_101 Case 061.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_062(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group_Name}_Covert_Affairs_0101 Case 062.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_063(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group_Name}_Covert_Affairs_1001 Case 063.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_064(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/{Group_Name}_Covert_Affairs_01001 Case 064.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 07 [Group Name] Covert Affairs ...
def test_fileparser_group_1_071(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group Name] Covert Affairs 101 Case 071.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_072(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group Name] Covert Affairs 0101 Case 072.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_073(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group Name] Covert Affairs 1001 Case 073.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_074(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group Name] Covert Affairs 01001 Case 074.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 08 [Group.Name].Covert.Affairs.
def test_fileparser_group_1_081(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group.Name].Covert.Affairs.101 Case 081.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_082(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group.Name].Covert.Affairs.0101 Case 082.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_083(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group.Name].Covert.Affairs.1001 Case 083.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_084(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group.Name].Covert.Affairs.01001 Case 084.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 09 [Group.Name]_Covert_Affairs.
def test_fileparser_group_1_091(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group_Name]_Covert_Affairs_101 Case 091.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_092(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group_Name]_Covert_Affairs_0101 Case 092.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_093(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group_Name]_Covert_Affairs_1001 Case 093.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_094(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group_Name]_Covert_Affairs_01001 Case 094.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 10 [Group Name] - Covert Affairs ...
def test_fileparser_group_1_101(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group Name] - Covert Affairs 101 Case 101.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_102(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group Name] - Covert Affairs 0101 Case 102.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_103(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group Name] - Covert Affairs 1001 Case 103.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_104(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group Name] - Covert Affairs 01001 Case 104.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 11 [Group Name].-.Covert.Affairs ...
def test_fileparser_group_1_111(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group.Name].-.Covert.Affairs.101 Case 111.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_112(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group.Name].-.Covert.Affairs.0101 Case 112.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_113(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group.Name].-.Covert.Affairs.1001 Case 113.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_114(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group.Name].-.Covert.Affairs.01001 Case 114.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
# 12 [Group Name]_-_Covert_Affairs ...
def test_fileparser_group_1_121(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group_Name]_-_Covert_Affairs_101 Case 121.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def test_fileparser_group_1_122(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group_Name]_-_Covert_Affairs_0101 Case 122.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_123(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group_Name]_-_Covert_Affairs_1001 Case 0123.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
@unittest.expectedFailure
def test_fileparser_group_1_124(self):
KnownValues.File_SxxExx["FileName"] = "/srv/Download/Bittorrent/[Group_Name]_-_Covert_Affairs_01001 Case 124.mkv"
self.assertEqual(self.library.getFileDetails(KnownValues.File_SxxExx["FileName"]), KnownValues.File_SxxExx)
def theSuite(self):
suite = unittest.TestLoader().loadTestsFromTestCase(self)
return suite
if __name__ == '__main__':
suite = fileParserGroup_1.theSuite()
unittest.TextTestRunner(verbosity=2).run(suite)
| 57.942652 | 121 | 0.747 | 1,907 | 16,166 | 6.102255 | 0.069743 | 0.129759 | 0.259861 | 0.239237 | 0.922145 | 0.895334 | 0.895334 | 0.895334 | 0.895334 | 0.84893 | 0 | 0.041952 | 0.132995 | 16,166 | 278 | 122 | 58.151079 | 0.788313 | 0.030187 | 0 | 0.375 | 0 | 0 | 0.284317 | 0.156764 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.260417 | false | 0 | 0.020833 | 0 | 0.302083 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
89284751e8857a651487d3dccc1439ed72aa324c | 5,165 | py | Python | test/test_jre_auditor.py | mikefeneley/stig-jre | cb889e794bdf9569302e74292c0a60cc6018e077 | [
"MIT"
] | null | null | null | test/test_jre_auditor.py | mikefeneley/stig-jre | cb889e794bdf9569302e74292c0a60cc6018e077 | [
"MIT"
] | null | null | null | test/test_jre_auditor.py | mikefeneley/stig-jre | cb889e794bdf9569302e74292c0a60cc6018e077 | [
"MIT"
] | null | null | null |
import sys
sys.path.append("../src/")
import unittest
from jre_auditor import JREAuditor
class TestJREAuditor(unittest.TestCase):
def setUp(self):
self.auditor = JREAuditor()
def test_get_deployment_path(self):
result = self.auditor.get_deployment_path(direc="./", filename="deployment1.config")
self.assertEqual(result, 1)
result = self.auditor.get_deployment_path(direc="./", filename="deployment2.config")
self.assertEqual(result, 1)
result = self.auditor.get_deployment_path(direc="./", filename="deployment3.config")
self.assertEqual(result, 0)
def test_get_properties_path(self):
result = self.auditor.get_properties_path(direc="./", filename="deployment1.properties")
self.assertEqual(result, 1)
result = self.auditor.get_properties_path(direc="./", filename="deployment2.properties")
self.assertEqual(result, 1)
result = self.auditor.get_properties_path(direc="./", filename="deployment3.properties")
self.assertEqual(result, 0)
def test_permission_dialog_disabled(self):
self.auditor.get_deployment_path(direc="./", filename="deployment1.config")
self.auditor.get_properties_path(direc="./", filename="deployment1.properties")
result = self.auditor.permission_dialog_disabled()
self.assertTrue(result)
self.auditor.get_deployment_path(direc="./", filename="deployment2.config")
self.auditor.get_properties_path(direc="./", filename="deployment2.properties")
result = self.auditor.permission_dialog_disabled()
self.assertFalse(result)
def test_permission_dialog_locked(self):
self.auditor.get_deployment_path(direc="./", filename="deployment1.config")
self.auditor.get_properties_path(direc="./", filename="deployment1.properties")
result = self.auditor.permission_dialog_locked()
self.assertTrue(result)
self.auditor.get_deployment_path(direc="./", filename="deployment2.config")
self.auditor.get_properties_path(direc="./", filename="deployment2.properties")
result = self.auditor.permission_dialog_locked()
self.assertFalse(result)
def test_publisher_revocation_enabled(self):
self.auditor.get_deployment_path(direc="./", filename="deployment1.config")
self.auditor.get_properties_path(direc="./", filename="deployment1.properties")
result = self.auditor.publisher_revocation_enabled()
self.assertTrue(result)
self.auditor.get_deployment_path(direc="./", filename="deployment2.config")
self.auditor.get_properties_path(direc="./", filename="deployment2.properties")
result = self.auditor.publisher_revocation_enabled()
self.assertFalse(result)
def test_publisher_revocation_locked(self):
self.auditor.get_deployment_path(direc="./", filename="deployment1.config")
self.auditor.get_properties_path(direc="./", filename="deployment1.properties")
result = self.auditor.publisher_revocation_locked()
self.assertTrue(result)
self.auditor.get_deployment_path(direc="./", filename="deployment2.config")
self.auditor.get_properties_path(direc="./", filename="deployment2.properties")
result = self.auditor.publisher_revocation_locked()
self.assertFalse(result)
def test_certificate_validation_enabled(self):
self.auditor.get_deployment_path(direc="./", filename="deployment1.config")
self.auditor.get_properties_path(direc="./", filename="deployment1.properties")
result = self.auditor.certificate_validation_enabled()
self.assertTrue(result)
self.auditor.get_deployment_path(direc="./", filename="deployment2.config")
self.auditor.get_properties_path(direc="./", filename="deployment2.properties")
result = self.auditor.certificate_validation_enabled()
self.assertFalse(result)
def test_certificate_validation_locked(self):
self.auditor.get_deployment_path(direc="./", filename="deployment1.config")
self.auditor.get_properties_path(direc="./", filename="deployment1.properties")
result = self.auditor.certificate_validation_locked()
self.assertTrue(result)
self.auditor.get_deployment_path(direc="./", filename="deployment2.config")
self.auditor.get_properties_path(direc="./", filename="deployment2.properties")
result = self.auditor.certificate_validation_locked()
self.assertFalse(result)
def test_config_keys_set(self):
self.auditor.get_deployment_path(direc="./", filename="deployment1.config")
self.auditor.get_properties_path(direc="./", filename="deployment1.properties")
result = self.auditor.config_keys_set()
self.assertTrue(result)
self.auditor.get_deployment_path(direc="./", filename="deployment2.config")
self.auditor.get_properties_path(direc="./", filename="deployment2.properties")
result = self.auditor.config_keys_set()
self.assertFalse(result)
if __name__ == "__main__":
print(sys.path)
unittest.main()
| 47.824074 | 96 | 0.70939 | 547 | 5,165 | 6.457038 | 0.085923 | 0.152605 | 0.134768 | 0.115515 | 0.911665 | 0.90487 | 0.877123 | 0.827293 | 0.795017 | 0.77718 | 0 | 0.009214 | 0.159535 | 5,165 | 107 | 97 | 48.271028 | 0.804423 | 0 | 0 | 0.712644 | 0 | 0 | 0.14784 | 0.072467 | 0 | 0 | 0 | 0 | 0.229885 | 1 | 0.114943 | false | 0 | 0.034483 | 0 | 0.16092 | 0.011494 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
89533a8b1f3601b065db1ffe63a2cf4c83bad1b6 | 214 | py | Python | src/__init__.py | disiji/fc_mondrian | 97420ff311242afe103c45130ada509e1e60a0ac | [
"MIT"
] | 1 | 2020-12-28T16:41:33.000Z | 2020-12-28T16:41:33.000Z | src/__init__.py | disiji/fc_mondrian | 97420ff311242afe103c45130ada509e1e60a0ac | [
"MIT"
] | 1 | 2019-10-07T19:17:58.000Z | 2019-10-08T06:55:16.000Z | src/__init__.py | disiji/fc_mondrian | 97420ff311242afe103c45130ada509e1e60a0ac | [
"MIT"
] | null | null | null | from .flowMP_sample import *
from .flowMP_sample_RE import *
from .flowMP_compute import *
from .flowMP_classify import *
from .flowMP_visualize import *
from .flowMP_helper import *
from .flowMP_diagnosis import * | 30.571429 | 31 | 0.808411 | 29 | 214 | 5.689655 | 0.344828 | 0.424242 | 0.581818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126168 | 214 | 7 | 32 | 30.571429 | 0.882353 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
98659b05de0a76a06b4e5a3a6adf015f3c4f89d7 | 2,156 | py | Python | tests/core/test_connect.py | vaporydev/lahja | 10fb6276d2312629cdbc7367fa3a0057656b540b | [
"MIT"
] | null | null | null | tests/core/test_connect.py | vaporydev/lahja | 10fb6276d2312629cdbc7367fa3a0057656b540b | [
"MIT"
] | null | null | null | tests/core/test_connect.py | vaporydev/lahja | 10fb6276d2312629cdbc7367fa3a0057656b540b | [
"MIT"
] | null | null | null | import pytest
from conftest import (
generate_unique_name,
)
from lahja import (
ConnectionAttemptRejected,
ConnectionConfig,
Endpoint,
)
@pytest.mark.asyncio
async def test_can_not_connect_conflicting_names_blocking() -> None:
own = ConnectionConfig.from_name(generate_unique_name())
endpoint = Endpoint()
await endpoint.start_serving(own)
# We connect to our own Endpoint because for this test, it doesn't matter
# if we use a foreign one or our own
endpoint.connect_to_endpoints_blocking(own)
# Can't connect a second time
with pytest.raises(ConnectionAttemptRejected):
endpoint.connect_to_endpoints_blocking(own)
@pytest.mark.asyncio
async def test_can_not_connect_conflicting_names() -> None:
own = ConnectionConfig.from_name(generate_unique_name())
endpoint = Endpoint()
await endpoint.start_serving(own)
# We connect to our own Endpoint because for this test, it doesn't matter
# if we use a foreign one or our own
await endpoint.connect_to_endpoints(own)
# Can't connect a second time
with pytest.raises(ConnectionAttemptRejected):
await endpoint.connect_to_endpoints(own)
@pytest.mark.asyncio
async def test_rejects_duplicates_when_connecting_blocking() -> None:
own = ConnectionConfig.from_name(generate_unique_name())
endpoint = Endpoint()
await endpoint.start_serving(own)
with pytest.raises(ConnectionAttemptRejected):
endpoint.connect_to_endpoints_blocking(own, own)
@pytest.mark.asyncio
async def test_rejects_duplicates_when_connecting() -> None:
own = ConnectionConfig.from_name(generate_unique_name())
endpoint = Endpoint()
await endpoint.start_serving(own)
with pytest.raises(ConnectionAttemptRejected):
await endpoint.connect_to_endpoints(own, own)
@pytest.mark.asyncio
async def test_rejects_duplicates_when_connecting_nowait() -> None:
own = ConnectionConfig.from_name(generate_unique_name())
endpoint = Endpoint()
await endpoint.start_serving(own)
with pytest.raises(ConnectionAttemptRejected):
endpoint.connect_to_endpoints_nowait(own, own)
| 28.368421 | 77 | 0.756957 | 267 | 2,156 | 5.868914 | 0.198502 | 0.051691 | 0.075941 | 0.116146 | 0.917039 | 0.917039 | 0.875558 | 0.875558 | 0.875558 | 0.875558 | 0 | 0 | 0.166976 | 2,156 | 75 | 78 | 28.746667 | 0.872494 | 0.124768 | 0 | 0.630435 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.065217 | 0 | 0.065217 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
988d52bba89e9f7543d1c0f0cf284b92feb2dbe8 | 2,712 | py | Python | soda/core/tests/data_source/test_schema_required_columns.py | sodadata/soda-core | d9b98d4f6f3364c5eb8210e8288c4c861bcf8f8a | [
"Apache-2.0"
] | 4 | 2022-03-23T02:43:42.000Z | 2022-03-31T15:20:54.000Z | soda/core/tests/data_source/test_schema_required_columns.py | sodadata/soda-core | d9b98d4f6f3364c5eb8210e8288c4c861bcf8f8a | [
"Apache-2.0"
] | 543 | 2022-03-22T09:02:17.000Z | 2022-03-31T16:29:41.000Z | soda/core/tests/data_source/test_schema_required_columns.py | sodadata/soda-core | d9b98d4f6f3364c5eb8210e8288c4c861bcf8f8a | [
"Apache-2.0"
] | 1 | 2022-03-27T03:37:55.000Z | 2022-03-27T03:37:55.000Z | from soda.execution.schema_check import SchemaCheck
from tests.helpers.common_test_tables import customers_test_table
from tests.helpers.data_source_fixture import DataSourceFixture
from tests.helpers.utils import format_checks
def test_required_columns_pass(data_source_fixture: DataSourceFixture):
table_name = data_source_fixture.ensure_test_table(customers_test_table)
default_casify_column_name = data_source_fixture.data_source.default_casify_column_name
scan = data_source_fixture.create_test_scan()
scan.add_sodacl_yaml_str(
f"""
checks for {table_name}:
- schema:
fail:
when required column missing: [{default_casify_column_name('id')}, {default_casify_column_name('sizeTxt')}, {default_casify_column_name('distance')}]
"""
)
scan.execute()
scan.assert_all_checks_pass()
def test_required_columns_fail(data_source_fixture: DataSourceFixture):
table_name = data_source_fixture.ensure_test_table(customers_test_table)
default_casify_column_name = data_source_fixture.data_source.default_casify_column_name
scan = data_source_fixture.create_test_scan()
checks_str = format_checks(
["id", "sizeTxt", "non_existing_column", "name"],
indent=15,
prefix="-",
data_source=data_source_fixture.data_source,
)
scan.add_sodacl_yaml_str(
f"""
checks for {table_name}:
- schema:
fail:
when required column missing:
{checks_str}
"""
)
scan.execute()
scan.assert_all_checks_fail()
check: SchemaCheck = scan._checks[0]
assert sorted(check.schema_missing_column_names) == sorted(
[default_casify_column_name("non_existing_column"), default_casify_column_name("name")]
)
def test_required_columns_warn(data_source_fixture: DataSourceFixture):
table_name = data_source_fixture.ensure_test_table(customers_test_table)
default_casify_column_name = data_source_fixture.data_source.default_casify_column_name
scan = data_source_fixture.create_test_scan()
checks_str = format_checks(
["id", "sizeTxt", "non_existing_column", "name"],
indent=15,
prefix="-",
data_source=data_source_fixture.data_source,
)
scan.add_sodacl_yaml_str(
f"""
checks for {table_name}:
- schema:
warn:
when required column missing:
{checks_str}
"""
)
scan.execute()
scan.assert_all_checks_warn()
check: SchemaCheck = scan._checks[0]
assert sorted(check.schema_missing_column_names) == sorted(
[default_casify_column_name("non_existing_column"), default_casify_column_name("name")]
)
| 33.481481 | 163 | 0.717552 | 330 | 2,712 | 5.427273 | 0.169697 | 0.122836 | 0.142379 | 0.166946 | 0.790061 | 0.790061 | 0.773311 | 0.773311 | 0.773311 | 0.773311 | 0 | 0.002738 | 0.192109 | 2,712 | 80 | 164 | 33.9 | 0.814696 | 0 | 0 | 0.720588 | 0 | 0 | 0.223451 | 0.043142 | 0 | 0 | 0 | 0 | 0.073529 | 1 | 0.044118 | false | 0.029412 | 0.058824 | 0 | 0.102941 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
98996b5d0cedb4768deb8dc36b92b6b969a54632 | 29,502 | py | Python | nnMorpho/operations.py | Manza12/nnMorpho | a7952b3c81e3f4df690c5c28763d3ec6a8a82ef1 | [
"MIT"
] | 9 | 2021-05-13T08:11:18.000Z | 2022-02-25T08:04:18.000Z | nnMorpho/operations.py | Manza12/nnMorpho | a7952b3c81e3f4df690c5c28763d3ec6a8a82ef1 | [
"MIT"
] | 7 | 2021-04-21T06:30:30.000Z | 2021-11-23T17:28:51.000Z | nnMorpho/operations.py | Manza12/nnMorpho | a7952b3c81e3f4df690c5c28763d3ec6a8a82ef1 | [
"MIT"
] | 2 | 2021-04-21T06:47:27.000Z | 2021-05-03T02:32:18.000Z | from nnMorpho.parameters import *
from nnMorpho.utils import pad_tensor, fill_border, convert_float
from nnMorpho.checks import check_parameters, check_parameters_partial, check_parameters_dependent
def erosion(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" Erosion is one of the basic operations of Mathematical Morphology. This function computes the grayscale
erosion of an input tensor by a structuring element.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to erode. It should be a PyTorch tensor of arbitrary dimension. The
dimensions that will be eroded are determined by the structuring element.
:param structuring_element: torch.Tensor
The structuring element to erode. The structuring element should be a PyTorch tensor of arbitrary dimension.
Its shape should coincide with the shape of the last dimensions of the input_tensor.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the minimum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The erosion as a PyTorch tensor of the same shape than the original input.
"""
# Check parameters
check_parameters(input_tensor, structuring_element, origin, border_value)
# Adapt origin
if not origin:
origin = (structuring_element.shape[0] // 2, structuring_element.shape[1] // 2)
# Fill border value if needed
border_value = fill_border(border_value, 'erosion')
# Convert tensor to float if needed
input_tensor = convert_float(input_tensor)
# Compute erosion
if str(input_tensor.device) == 'cpu':
# Pad input
input_pad = pad_tensor(input_tensor, origin, structuring_element, border_value)
# Unfold the input
input_unfolded = input_pad
dim_shift = input_tensor.ndim - structuring_element.ndim
for dim in range(structuring_element.ndim):
input_unfolded = input_unfolded.unfold(dim_shift + dim, structuring_element.shape[dim], 1)
# Differences
result = input_unfolded - structuring_element
# Take the minimum
for dim in range(structuring_element.ndim):
result, _ = torch.min(result, dim=-1)
else:
if structuring_element.ndim == 2:
# Pad input
pad_list = [origin[1], structuring_element.shape[1] - origin[1] - 1,
origin[0], structuring_element.shape[0] - origin[0] - 1]
input_pad = f.pad(input_tensor, pad_list, mode='constant', value=border_value)
if input_tensor.ndim - structuring_element.ndim == 0:
result = morphology_cuda.erosion(input_pad, structuring_element, BLOCK_SHAPE)
elif input_tensor.ndim - structuring_element.ndim == 1:
result = morphology_cuda.erosion_batched(input_pad, structuring_element, BLOCK_SHAPE)
elif input_tensor.ndim - structuring_element.ndim == 2:
batch_channel_dim = input_pad.shape[0] * input_pad.shape[1]
input_height = input_pad.shape[2]
input_width = input_pad.shape[3]
input_view = input_pad.view(batch_channel_dim, input_height, input_width)
result = morphology_cuda.erosion_batched(input_view, structuring_element, BLOCK_SHAPE)
result = result.view(*input_tensor.shape)
else:
raise NotImplementedError("Currently, nnMorpho only supports as input:\n"
"- 2D tensors of the form (H, W)\n"
"- 3D tensors of the form (B, H, W)"
"- 4D tensors of the form (B, C, H, W)")
else:
raise NotImplementedError("Currently nnMorpho only supports 2D erosion.")
return result
def dilation(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" Dilation is one of the basic operations of Mathematical Morphology. This function computes the grayscale
dilation of an input tensor by a structuring element.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to dilate. It should be a PyTorch tensor of arbitrary dimension. The
dimensions that will be dilated are determined by the structuring element.
:param structuring_element: torch.Tensor
The structuring element to dilate. The structuring element should be a PyTorch tensor of arbitrary
dimension.
Its shape should coincide with the shape of the last dimensions of the input_tensor.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the maximum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The dilation as a PyTorch tensor of the same shape than the original input.
"""
# Check parameters
check_parameters(input_tensor, structuring_element, origin, border_value)
# Adapt origin
if not origin:
origin = (structuring_element.shape[0] // 2, structuring_element.shape[1] // 2)
# Fill border value if needed
border_value = fill_border(border_value, 'dilation')
# Convert tensor to float if needed
input_tensor = convert_float(input_tensor)
# Compute the dilation
if str(input_tensor.device) == 'cpu':
# Pad input
input_pad = pad_tensor(input_tensor, origin, structuring_element, border_value)
# Unfold the input
input_unfolded = input_pad
dim_shift = input_tensor.ndim - structuring_element.ndim
for dim in range(structuring_element.ndim):
input_unfolded = input_unfolded.unfold(dim + dim_shift, structuring_element.shape[dim], 1)
# Sums
result = input_unfolded + torch.flip(structuring_element, list(range(structuring_element.ndim)))
# Take the maximum
for dim in range(structuring_element.ndim):
result, _ = torch.max(result, dim=-1)
else:
if structuring_element.ndim == 2:
# Pad input
pad_list = [origin[1], structuring_element.shape[1] - origin[1] - 1,
origin[0], structuring_element.shape[0] - origin[0] - 1]
input_pad = f.pad(input_tensor, pad_list, mode='constant', value=border_value)
if input_tensor.ndim - structuring_element.ndim == 0:
result = morphology_cuda.dilation(input_pad, structuring_element, BLOCK_SHAPE)
elif input_tensor.ndim - structuring_element.ndim == 1:
result = morphology_cuda.dilation_batched(input_pad, structuring_element, BLOCK_SHAPE)
elif input_tensor.ndim - structuring_element.ndim == 2:
batch_channel_dim = input_pad.shape[0] * input_pad.shape[1]
input_height = input_pad.shape[2]
input_width = input_pad.shape[3]
input_view = input_pad.view(batch_channel_dim, input_height, input_width)
result = morphology_cuda.dilation_batched(input_view, structuring_element, BLOCK_SHAPE)
result = result.view(*input_tensor.shape)
else:
raise NotImplementedError("Currently, nnMorpho only supports as input:\n"
"- 2D tensors of the form (H, W)\n"
"- 3D tensors of the form (B, H, W)"
"- 4D tensors of the form (B, C, H, W)")
else:
raise NotImplementedError("Currently nnMorpho only supports 2D erosion.")
return result
def opening(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" Opening is one of the derived operations of Mathematical Morphology: it consists on eroding an image and then
dilating it. This function computes the grayscale opening of an image by a structuring element.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to open. It should be a PyTorch tensor of arbitrary dimension. The
dimensions that will be opened are determined by the structuring element.
:param structuring_element: torch.Tensor
The structuring element to open. The structuring element should be a PyTorch tensor of arbitrary dimension.
Its shape should coincide with the shape of the last dimensions of the input_tensor.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the minimum and the maximum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The opening as a PyTorch tensor of the same shape than the original input.
"""
# Compute the opening
return dilation(erosion(input_tensor, structuring_element, origin, border_value),
structuring_element, origin, border_value)
def closing(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" Closing is one of the derived operations of Mathematical Morphology: it consists on dilating an image and then
eroding it. This function computes the grayscale closing of an image by a structuring element.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to close. It should be a PyTorch tensor of arbitrary dimension. The
dimensions that will be closed are determined by the structuring element.
:param structuring_element: torch.Tensor
The structuring element to close. The structuring element should be a PyTorch tensor of arbitrary dimension.
Its shape should coincide with the shape of the last dimensions of the input_tensor.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the maximum and the minimum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The closing as a PyTorch tensor of the same shape than the original input.
"""
# Compute the closing
return erosion(dilation(input_tensor, structuring_element, origin, border_value),
structuring_element, origin, border_value)
def top_hat(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" Top-hat transform is one of the differential operations of Mathematical Morphology:
it consists subtracting the opening of an image to the image itself.
This function computes the grayscale top-hat of an image by a structuring element.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to transform. It should be a PyTorch tensor of arbitrary dimension. The
dimensions that will be transformed are determined by the structuring element.
:param structuring_element: torch.Tensor
The structuring element to transform. The structuring element should be a PyTorch tensor of arbitrary
dimension. Its shape should coincide with the shape of the last dimensions of the input_tensor.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the maximum and the minimum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The top-hat as a PyTorch tensor of the same shape than the original input.
"""
# Compute the top-hat transform
return input_tensor - opening(input_tensor, structuring_element, origin, border_value)
def bottom_hat(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" Black Top-hat transform is one of the differential operations of Mathematical Morphology:
it consists subtracting an image to the closing of the image.
This function computes the grayscale black top-hat of an image by a structuring element.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to transform. It should be a PyTorch tensor of arbitrary dimension. The
dimensions that will be transformed are determined by the structuring element.
:param structuring_element: torch.Tensor
The structuring element to transform. The structuring element should be a PyTorch tensor of arbitrary
dimension. Its shape should coincide with the shape of the last dimensions of the input_tensor.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the maximum and the minimum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The black top-hat as a PyTorch tensor of the same shape than the original input.
"""
# Compute the black top-hat transform
return closing(input_tensor, structuring_element, origin, border_value) - input_tensor
white_top_hat = top_hat
black_top_hat = bottom_hat
def internal_gradient(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" Internal gradient is one of the differential operations of Mathematical Morphology:
it consists subtracting the erosion of an image to the image itself.
This function computes the internal gradient of an image by a structuring element.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to transform. It should be a PyTorch tensor of arbitrary dimension. The
dimensions that will be transformed are determined by the structuring element.
:param structuring_element: torch.Tensor
The structuring element to transform. The structuring element should be a PyTorch tensor of arbitrary
dimension. Its shape should coincide with the shape of the last dimensions of the input_tensor.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the maximum and the minimum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The internal gradient as a PyTorch tensor of the same shape than the original input.
"""
# Compute the internal gradient
return input_tensor - erosion(input_tensor, structuring_element, origin, border_value)
def external_gradient(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" External gradient is one of the differential operations of Mathematical Morphology:
it consists subtracting an image to the dilation of the image.
This function computes the external gradient of an image by a structuring element.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to transform. It should be a PyTorch tensor of arbitrary dimension. The
dimensions that will be transformed are determined by the structuring element.
:param structuring_element: torch.Tensor
The structuring element to transform. The structuring element should be a PyTorch tensor of arbitrary
dimension. Its shape should coincide with the shape of the last dimensions of the input_tensor.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the maximum and the minimum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The external gradient as a PyTorch tensor of the same shape than the original input.
"""
# Compute the internal gradient
return dilation(input_tensor, structuring_element, origin, border_value) - input_tensor
def gradient(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" Gradient is one of the differential operations of Mathematical Morphology:
it consists subtracting the erosion of an image to the dilation of the image.
This function computes the gradient of an image by a structuring element.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to transform. It should be a PyTorch tensor of arbitrary dimension. The
dimensions that will be transformed are determined by the structuring element.
:param structuring_element: torch.Tensor
The structuring element to transform. The structuring element should be a PyTorch tensor of arbitrary
dimension. Its shape should coincide with the shape of the last dimensions of the input_tensor.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the maximum and the minimum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The gradient as a PyTorch tensor of the same shape than the original input.
"""
# Compute the internal gradient
return dilation(input_tensor, structuring_element, origin,
border_value) - erosion(input_tensor, structuring_element, origin, border_value)
def erosion_dependent(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" This type of erosion is needed when you want a structuring element to vary along one axis.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to erode. It should be a PyTorch tensor of 2 dimensions.
:param structuring_element: torch.Tensor
The structuring element to erode. The structuring element should be a PyTorch tensor of 3 dimensions;
first dimension should coincide with first dimension of input_tensor and two other dimensions are the
shape of the structuring element.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed. The origin will be the same for all the structuring elements.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the minimum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The erosion dependent of the first axis as a PyTorch tensor of the same shape than the original input.
"""
# Check parameters
check_parameters_dependent(input_tensor, structuring_element, origin, border_value)
# Adapt origin
if not origin:
origin = (structuring_element.shape[1] // 2, structuring_element.shape[2] // 2)
# Fill border value if needed
border_value = fill_border(border_value, 'erosion')
# Convert tensor to float if needed
input_tensor = convert_float(input_tensor)
# Pad input
pad_list = [origin[1], structuring_element.shape[2] - origin[1] - 1,
origin[0], structuring_element.shape[1] - origin[0] - 1]
input_pad = f.pad(input_tensor, pad_list, mode='constant', value=border_value)
# Compute erosion
if str(input_tensor.device) == 'cpu':
raise ValueError('Operation currently only implemented for GPU.')
else:
result = morphology_cuda.erosion_dependent(input_pad, structuring_element, BLOCK_SHAPE)
return result
def dilation_dependent(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
""" This type of dilation is needed when you want a structuring element to vary along one axis.
Parameters
----------
:param input_tensor: torch.Tensor
The input tensor that you want to dilate. It should be a PyTorch tensor of 2 dimensions.
:param structuring_element: torch.Tensor
The structuring element to dilate. The structuring element should be a PyTorch tensor of 3 dimensions;
first dimension should coincide with first dimension of input_tensor and two other dimensions are the
shape of the structuring element.
:param origin: None, tuple, List[int]
The origin of the structuring element. Default to center of the structuring element.
Negative indexes are allowed. The origin will be the same for all the structuring elements.
:param border_value: int, float, str
The value used to pad the image in the border. Two options are allowed when a string is passed in parameter:
- 'geodesic': only points within the input are considered when taking the minimum.
- 'euclidean': extends naturally the image setting minus infinite value to the border.
Default value is 'geodesic'.
Outputs
-------
:return: torch.Tensor
The dilation dependent of the first axis as a PyTorch tensor of the same shape than the original input.
"""
# Check parameters
check_parameters_dependent(input_tensor, structuring_element, origin, border_value)
# Adapt origin
if not origin:
origin = (structuring_element.shape[1] // 2, structuring_element.shape[2] // 2)
# Fill border value if needed
border_value = fill_border(border_value, 'dilation')
# Convert tensor to float if needed
input_tensor = convert_float(input_tensor)
# Pad input
pad_list = [origin[1], structuring_element.shape[2] - origin[1] - 1,
origin[0], structuring_element.shape[1] - origin[0] - 1]
input_pad = f.pad(input_tensor, pad_list, mode='constant', value=border_value)
# Compute dilation
if str(input_tensor.device) == 'cpu':
raise ValueError('Operation currently only implemented for GPU.')
else:
result = morphology_cuda.dilation_dependent(input_pad, structuring_element, BLOCK_SHAPE)
return result
def partial_erosion(input_tensor: torch.Tensor,
structuring_element: torch.Tensor,
origin: Optional[Union[tuple, List[int]]] = None,
border_value: Union[int, float, str] = 'geodesic'):
# ToDo: Improve the documentation
""" Partial erosion is a new operation that does a one-dimension-long erosion.
Parameters
----------
:param input_tensor: torch.Tensor
:param structuring_element: torch.Tensor
:param origin: tuple, List[int]
:param border_value: int, float, str
Outputs
-------
:return: torch.Tensor
"""
# Check parameters
check_parameters_partial(input_tensor, structuring_element, origin, border_value)
# Adapt origin
if not origin:
origin = (structuring_element.shape[0] // 2, structuring_element.shape[1] // 2)
# Fill border value if needed
border_value = fill_border(border_value, 'erosion')
# Convert tensor to float if needed
input_tensor = convert_float(input_tensor)
# Pad input
pad_list = [origin[1], structuring_element.shape[1] - origin[1] - 1]
input_pad = f.pad(input_tensor, pad_list, mode='constant', value=border_value)
# Compute erosion
if str(input_tensor.device) == 'cpu':
raise NotImplementedError("CPU computation is not implemented yet for partial erosion.")
else:
result = morphology_cuda.partial_erosion(input_pad, structuring_element, BLOCK_SHAPE)
return result
| 50.430769 | 120 | 0.66243 | 3,688 | 29,502 | 5.20038 | 0.054772 | 0.144533 | 0.060222 | 0.02753 | 0.943845 | 0.93446 | 0.926013 | 0.922728 | 0.916523 | 0.902341 | 0 | 0.004097 | 0.272015 | 29,502 | 584 | 121 | 50.517123 | 0.888904 | 0.540879 | 0 | 0.757062 | 0 | 0 | 0.063382 | 0 | 0 | 0 | 0 | 0.001712 | 0 | 1 | 0.067797 | false | 0 | 0.016949 | 0 | 0.152542 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7f2997ba9fbd593cfc8aeafed03ad9c7a568caed | 1,827 | py | Python | vyapp/plugins/pane_resize.py | iogf/vy | 4ba0d379e21744fd79a740e8aeaba3a0a779973c | [
"MIT"
] | 927 | 2015-02-22T17:34:21.000Z | 2018-03-23T07:26:17.000Z | vyapp/plugins/pane_resize.py | iogf/vy | 4ba0d379e21744fd79a740e8aeaba3a0a779973c | [
"MIT"
] | 22 | 2015-09-02T19:20:22.000Z | 2018-02-13T16:41:02.000Z | vyapp/plugins/pane_resize.py | iogf/vy | 4ba0d379e21744fd79a740e8aeaba3a0a779973c | [
"MIT"
] | 53 | 2015-09-02T12:26:32.000Z | 2018-01-18T09:11:30.000Z | """
Overview
========
Commands
========
"""
class PaneResize:
def __init__(self, area):
self.area = area
area.install('pane-resize',
('EXTRA', '<Control-h>', self.dec_vsash),
('EXTRA', '<Control-l>', self.inc_vsash),
('EXTRA', '<Control-k>', self.dec_hsash),
('EXTRA', '<Control-j>', self.inc_hsash))
def dec_vsash(self, event):
wids = self.area.master.master.panes()
wids = [str(item) for item in wids]
count = wids.index(str(self.area.master))
count = count - 1 if count > 0 else 0
pos = self.area.master.master.sash_coord(count)
self.area.master.master.sash_place(count, pos[0] - 15, 0)
def inc_vsash(self, event):
wids = self.area.master.master.panes()
wids = [str(item) for item in wids]
count = wids.index(str(self.area.master))
count = count - 1 if count > 0 else 0
pos = self.area.master.master.sash_coord(count)
self.area.master.master.sash_place(count, pos[0] + 15, 0)
def dec_hsash(self, event):
wids = self.area.master.master.master.panes()
wids = [str(item) for item in wids]
count = wids.index(str(self.area.master.master))
count = count - 1 if count > 0 else 0
pos = self.area.master.master.master.sash_coord(count)
self.area.master.master.master.sash_place(count, 0, pos[1] - 15)
def inc_hsash(self, event):
wids = self.area.master.master.master.panes()
wids = [str(item) for item in wids]
count = wids.index(str(self.area.master.master))
count = count - 1 if count > 0 else 0
pos = self.area.master.master.master.sash_coord(count)
self.area.master.master.master.sash_place(count, 0, pos[1] + 15)
install = PaneResize | 30.45 | 72 | 0.595512 | 257 | 1,827 | 4.155642 | 0.155642 | 0.224719 | 0.209738 | 0.262172 | 0.799625 | 0.799625 | 0.799625 | 0.799625 | 0.799625 | 0.799625 | 0 | 0.020528 | 0.253421 | 1,827 | 60 | 73 | 30.45 | 0.762463 | 0.020252 | 0 | 0.540541 | 0 | 0 | 0.042111 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.135135 | false | 0 | 0 | 0 | 0.162162 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7f5836a16e0a0a3ba584c5ca181da1c5c5568cd6 | 152 | py | Python | src/067.py | mackorone/euler | 1b0c2271690d1598d2643e75b7e1f693b4155f49 | [
"MIT"
] | null | null | null | src/067.py | mackorone/euler | 1b0c2271690d1598d2643e75b7e1f693b4155f49 | [
"MIT"
] | null | null | null | src/067.py | mackorone/euler | 1b0c2271690d1598d2643e75b7e1f693b4155f49 | [
"MIT"
] | null | null | null | from path import max_sum_through_triangle
def ans():
return max_sum_through_triangle('067.txt')
if __name__ == '__main__':
print(ans())
| 15.2 | 46 | 0.697368 | 21 | 152 | 4.380952 | 0.761905 | 0.130435 | 0.282609 | 0.456522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02439 | 0.190789 | 152 | 9 | 47 | 16.888889 | 0.723577 | 0 | 0 | 0 | 0 | 0 | 0.098684 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | true | 0 | 0.2 | 0.2 | 0.6 | 0.2 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
7fb09eb0b595488ad31875975fbfcce0e30855c3 | 10,126 | py | Python | hazma/single_channel.py | LoganAMorrison/Hazma | e9612729767ff48d5ce50633393f81ee021242d2 | [
"MIT"
] | 6 | 2019-07-30T18:14:43.000Z | 2020-10-25T04:58:44.000Z | hazma/single_channel.py | LoganAMorrison/Hazma | e9612729767ff48d5ce50633393f81ee021242d2 | [
"MIT"
] | 8 | 2017-12-19T08:06:59.000Z | 2021-04-22T02:15:26.000Z | hazma/single_channel.py | LoganAMorrison/Hazma | e9612729767ff48d5ce50633393f81ee021242d2 | [
"MIT"
] | 1 | 2020-04-01T11:08:49.000Z | 2020-04-01T11:08:49.000Z | import numpy as np
from hazma.theory import TheoryAnn, TheoryDec
from hazma.parameters import (
neutral_pion_mass as m_pi0,
charged_pion_mass as m_pi,
alpha_em,
electron_mass as m_e,
muon_mass as m_mu,
)
from hazma.decay import (
muon as dnde_g_mu,
neutral_pion as dnde_g_pi0,
charged_pion as dnde_g_pi,
)
from hazma.positron_spectra import charged_pion as dnde_p_pi, muon as dnde_p_mu
class SingleChannelAnn(TheoryAnn):
def __init__(self, mx, fs, sigma):
self._mx = mx
self._fs = fs
self.sigma = sigma
self.setup()
def __repr__(self):
return f"SingleChannelAnn(mx={self._mx} MeV, final state='{self._fs}', sigma={self.sigma} MeV^-1)"
@property
def fs(self):
return self._fs
@fs.setter
def fs(self, fs):
self._fs = fs
self.setup()
@property
def mx(self):
return self._mx
@mx.setter
def mx(self, mx):
self._mx = mx
self.setup()
def annihilation_cross_section_funcs(self):
def xsec(e_cm):
if e_cm < 2 * self.mx or e_cm < self.fs_mass:
return 0.0
else:
return self.sigma
return {self.fs: xsec}
def list_annihilation_final_states(self):
return [self.fs]
def setup(self):
self.set_fs_mass()
self.set_spectrum_funcs()
self.set_gamma_ray_line_energies()
self.set_positron_spectrum_funcs()
self.set_positron_line_energies()
def set_fs_mass(self):
# Sets kinematic threshold for DM annihilations/decays
if self.fs == "g g":
self.fs_mass = 0.0
elif self.fs == "e e":
self.fs_mass = 2 * m_e
elif self.fs == "mu mu":
self.fs_mass = 2 * m_mu
elif self.fs == "pi pi":
self.fs_mass = 2 * m_pi
elif self.fs == "pi0 pi0":
self.fs_mass = 2 * m_pi0
elif self.fs == "pi0 g":
self.fs_mass = m_pi0
def set_spectrum_funcs(self):
"""
Sets gamma ray spectrum functions.
"""
if self.fs == "e e":
def dnde_g(e_g, e_cm):
return self._dnde_ap_fermion(e_g, e_cm, m_e)
elif self.fs == "mu mu":
def dnde_g(e_g, e_cm):
return 2 * dnde_g_mu(e_g, e_cm / 2) + self._dnde_ap_fermion(
e_g, e_cm, m_mu
)
elif self.fs == "pi0 pi0":
def dnde_g(e_g, e_cm):
return 2 * dnde_g_pi0(e_g, e_cm / 2)
elif self.fs == "pi0 g":
def dnde_g(e_g, e_cm):
return dnde_g_pi0(e_g, (e_cm ** 2 + m_pi0 ** 2) / (2.0 * e_cm))
elif self.fs == "pi pi":
def dnde_g(e_g, e_cm):
return 2 * dnde_g_pi(e_g, e_cm / 2) + self._dnde_ap_scalar(
e_g, e_cm, m_pi
)
else:
# Final state produces no photons
self._spectrum_funcs = lambda: {}
return
self._spectrum_funcs = lambda: {self.fs: dnde_g}
def set_gamma_ray_line_energies(self):
if self.fs == "g g":
self._gamma_ray_line_energies = lambda e_cm: {"g g": e_cm / 2}
elif self.fs == "pi0 g":
self._gamma_ray_line_energies = lambda e_cm: {
"pi0 g": (e_cm ** 2 - m_pi0 ** 2) / (2.0 * e_cm)
}
else:
self._gamma_ray_line_energies = lambda e_cm: {}
def set_positron_spectrum_funcs(self):
if self.fs == "mu mu":
def dnde_p(e_p, e_cm):
if e_cm < self.fs_mass:
return 0.0
return dnde_p_mu(e_p, e_cm / 2.0)
elif self.fs == "pi pi":
def dnde_p(e_p, e_cm):
if e_cm < self.fs_mass:
return 0.0
return dnde_p_pi(e_p, e_cm / 2.0)
else:
# Final state produces no positrons
self._positron_spectrum_funcs = lambda: {}
return
self._positron_spectrum_funcs = lambda: {self.fs: dnde_p}
def set_positron_line_energies(self):
if self.fs == "e e":
self._positron_line_energies = lambda e_cm: {"e e": e_cm / 2.0}
else:
self._positron_line_energies = lambda e_cm: {}
def _dnde_ap_scalar(self, e_g, e_cm, m_scalar):
def fn(e_g):
mu = m_scalar / e_cm
x = 2 * e_g / e_cm
P_g_scalar = 2 * (1 - x) / x
res = (
2
* alpha_em
/ (np.pi * e_cm)
* P_g_scalar
* (np.log((1 - x) / mu ** 2) - 1)
)
if not np.isnan(res) and res >= 0:
return res
else:
return 0
return np.vectorize(fn)(e_g)
def _dnde_ap_fermion(self, e_g, e_cm, m_fermion):
def fn(e_g):
mu = m_fermion / e_cm
x = 2 * e_g / e_cm
P_g_fermion = (1 + (1 - x) ** 2) / x
res = (
2
* alpha_em
/ (np.pi * e_cm)
* P_g_fermion
* (np.log((1 - x) / mu ** 2) - 1)
)
if not np.isnan(res) and res >= 0:
return res
else:
return 0
return np.vectorize(fn)(e_g)
class SingleChannelDec(TheoryDec):
def __init__(self, mx, fs, width):
self._mx = mx
self._fs = fs
self.width = width
self.setup()
def __repr__(self):
return f"SingleChannelDec(mx={self._mx} MeV, final state='{self._fs}', width={self.width} MeV)"
@property
def fs(self):
return self._fs
@fs.setter
def fs(self, fs):
self._fs = fs
self.setup()
@property
def mx(self):
return self._mx
@mx.setter
def mx(self, mx):
self._mx = mx
self.setup()
def list_decay_final_states(self):
return [self.fs]
def _decay_widths(self):
return {self.fs: self.width}
def setup(self):
self.set_fs_mass()
self.set_spectrum_funcs()
self.set_gamma_ray_line_energies()
self.set_positron_spectrum_funcs()
self.set_positron_line_energies()
def set_fs_mass(self):
# Sets kinematic threshold for DM annihilations/decays
if self.fs == "g g":
self.fs_mass = 0.0
elif self.fs == "e e":
self.fs_mass = 2 * m_e
elif self.fs == "mu mu":
self.fs_mass = 2 * m_mu
elif self.fs == "pi pi":
self.fs_mass = 2 * m_pi
elif self.fs == "pi0 pi0":
self.fs_mass = 2 * m_pi0
elif self.fs == "pi0 g":
self.fs_mass = m_pi0
def set_spectrum_funcs(self):
"""
Sets gamma ray spectrum functions.
"""
if self.fs == "e e":
def dnde_g(e_g):
return self._dnde_ap_fermion(e_g, m_e)
elif self.fs == "mu mu":
def dnde_g(e_g):
return 2 * dnde_g_mu(e_g, self.mx / 2) + self._dnde_ap_fermion(
e_g, m_mu
)
elif self.fs == "pi0 pi0":
def dnde_g(e_g):
return 2 * dnde_g_pi0(e_g, self.mx / 2)
elif self.fs == "pi0 g":
def dnde_g(e_g):
return dnde_g_pi0(e_g, (self.mx ** 2 + m_pi0 ** 2) / (2.0 * self.mx))
elif self.fs == "pi pi":
def dnde_g(e_g):
return 2 * dnde_g_pi(e_g, self.mx / 2) + self._dnde_ap_scalar(e_g, m_pi)
else:
# Final state produces no photons
self._spectrum_funcs = lambda: {}
return
self._spectrum_funcs = lambda: {self.fs: dnde_g}
def set_gamma_ray_line_energies(self):
if self.fs == "g g":
self._gamma_ray_line_energies = lambda: {"g g": self.mx / 2}
elif self.fs == "pi0 g":
self._gamma_ray_line_energies = lambda: {
"pi0 g": (self.mx ** 2 - m_pi0 ** 2) / (2.0 * self.mx)
}
else:
self._gamma_ray_line_energies = lambda: {}
def set_positron_spectrum_funcs(self):
if self.fs == "mu mu":
def dnde_p(e_p):
if self.mx < self.fs_mass:
return 0.0
return dnde_p_mu(e_p, self.mx / 2.0)
elif self.fs == "pi pi":
def dnde_p(e_p):
if self.mx < self.fs_mass:
return 0.0
return dnde_p_pi(e_p, self.mx / 2.0)
else:
# Final state produces no positrons
self._positron_spectrum_funcs = lambda: {}
return
self._positron_spectrum_funcs = lambda: {self.fs: dnde_p}
def set_positron_line_energies(self):
if self.fs == "e e":
self._positron_line_energies = lambda: {"e e": self.mx / 2.0}
else:
self._positron_line_energies = lambda: {}
def _dnde_ap_scalar(self, e_g, m_scalar):
def fn(e_g):
mu = m_scalar / self.mx
x = 2 * e_g / self.mx
P_g_scalar = 2 * (1 - x) / x
res = (
2
* alpha_em
/ (np.pi * self.mx)
* P_g_scalar
* (np.log((1 - x) / mu ** 2) - 1)
)
if not np.isnan(res) and res >= 0:
return res
else:
return 0
return np.vectorize(fn)(e_g)
def _dnde_ap_fermion(self, e_g, m_fermion):
def fn(e_g):
mu = m_fermion / self.mx
x = 2 * e_g / self.mx
P_g_fermion = (1 + (1 - x) ** 2) / x
res = (
2
* alpha_em
/ (np.pi * self.mx)
* P_g_fermion
* (np.log((1 - x) / mu ** 2) - 1)
)
if not np.isnan(res) and res >= 0:
return res
else:
return 0
return np.vectorize(fn)(e_g)
| 27.591281 | 106 | 0.492198 | 1,430 | 10,126 | 3.202797 | 0.065734 | 0.087773 | 0.048035 | 0.017467 | 0.867031 | 0.856114 | 0.849563 | 0.785371 | 0.723362 | 0.678821 | 0 | 0.022047 | 0.399763 | 10,126 | 366 | 107 | 27.666667 | 0.731491 | 0.030417 | 0 | 0.70922 | 0 | 0.007092 | 0.035122 | 0.006144 | 0 | 0 | 0 | 0 | 0 | 1 | 0.180851 | false | 0 | 0.017731 | 0.067376 | 0.368794 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.